コード例 #1
0
def betweenness_centrality_subset(G,
                                  sources,
                                  targets,
                                  normalized=False,
                                  weight=None):
    r"""Compute betweenness centrality for a subset of nodes.

    .. math::

       c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}

    where $S$ is the set of sources, $T$ is the set of targets,
    $\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
    and $\sigma(s, t|v)$ is the number of those paths
    passing through some  node $v$ other than $s, t$.
    If $s = t$, $\sigma(s, t) = 1$,
    and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.


    Parameters
    ----------
    G : graph
      A NetworkX graph.

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by $2/((n-1)(n-2))$
      for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
      is the number of nodes in G.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    edge_betweenness_centrality
    load_centrality

    Notes
    -----
    The basic algorithm is from [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length
    paths between pairs of nodes.

    The normalization might seem a little strange but it is the same
    as in betweenness_centrality() and is designed to make
    betweenness_centrality(G) be the same as
    betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).


    References
    ----------
    .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
       Centrality and their Generic Computation.
       Social Networks 30(2):136-145, 2008.
       http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
    """
    b = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
    for s in sources:
        # single source shortest paths
        if weight is None:  # use BFS
            S, P, sigma = shortest_path(G, s)
        else:  # use Dijkstra's algorithm
            S, P, sigma = dijkstra(G, s, weight)
        b = _accumulate_subset(b, S, P, sigma, s, targets)
    b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
    return b
コード例 #2
0
def edge_betweenness_centrality_subset(G,
                                       sources,
                                       targets,
                                       normalized=False,
                                       weight=None):
    r"""Compute betweenness centrality for edges for a subset of nodes.

    .. math::

       c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}

    where `S` is the set of sources, `T` is the set of targets,
    :math:`\sigma(s, t)` is the number of shortest `(s, t)`-paths,
    and :math:`\sigma(s, t|e)` is the number of those paths
    passing through edge `e` [2]_.

    Parameters
    ----------
    G : graph
      A networkx graph.

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by `2/(n(n-1))`
      for graphs, and `1/(n(n-1))` for directed graphs where `n`
      is the number of nodes in G.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.

    Returns
    -------
    edges : dictionary
       Dictionary of edges with Betweenness centrality as the value.

    See Also
    --------
    betweenness_centrality
    edge_load

    Notes
    -----
    The basic algorithm is from [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length
    paths between pairs of nodes.

    The normalization might seem a little strange but it is the same
    as in edge_betweenness_centrality() and is designed to make
    edge_betweenness_centrality(G) be the same as
    edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).

    References
    ----------
    .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
       Centrality and their Generic Computation.
       Social Networks 30(2):136-145, 2008.
       http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
    """
    b = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
    b.update(dict.fromkeys(G.edges(), 0.0))  # b[e] for e in G.edges()
    for s in sources:
        # single source shortest paths
        if weight is None:  # use BFS
            S, P, sigma = shortest_path(G, s)
        else:  # use Dijkstra's algorithm
            S, P, sigma = dijkstra(G, s, weight)
        b = _accumulate_edges_subset(b, S, P, sigma, s, targets)
    for n in G:  # remove nodes to only return edges
        del b[n]
    b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed())
    return b
コード例 #3
0
ファイル: betweenness_subset.py プロジェクト: jg-you/networkx
def betweenness_centrality_subset(G, sources, targets, normalized=False,
                                  weight=None):
    r"""Compute betweenness centrality for a subset of nodes.

    .. math::

       c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}

    where $S$ is the set of sources, $T$ is the set of targets,
    $\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
    and $\sigma(s, t|v)$ is the number of those paths
    passing through some  node $v$ other than $s, t$.
    If $s = t$, $\sigma(s, t) = 1$,
    and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.


    Parameters
    ----------
    G : graph
      A NetworkX graph.

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by $2/((n-1)(n-2))$
      for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
      is the number of nodes in G.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    edge_betweenness_centrality
    load_centrality

    Notes
    -----
    The basic algorithm is from [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length
    paths between pairs of nodes.

    The normalization might seem a little strange but it is the same
    as in betweenness_centrality() and is designed to make
    betweenness_centrality(G) be the same as
    betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).


    References
    ----------
    .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
       Centrality and their Generic Computation.
       Social Networks 30(2):136-145, 2008.
       http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
    """
    b = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
    for s in sources:
        # single source shortest paths
        if weight is None:  # use BFS
            S, P, sigma = shortest_path(G, s)
        else:  # use Dijkstra's algorithm
            S, P, sigma = dijkstra(G, s, weight)
        b = _accumulate_subset(b, S, P, sigma, s, targets)
    b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
    return b
コード例 #4
0
def edge_betweenness_centrality_subset(G,sources,targets,
                                       normalized=False,
                                       weight=None):
    """Compute betweenness centrality for edges for a subset of nodes.

    .. math::

       c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}

    where `S` is the set of sources, `T` is the set of targets,
    `\sigma(s, t)` is the number of shortest `(s, t)`-paths, 
    and `\sigma(s, t|e)` is the number of those paths
    passing through edge `e` [2]_.

    Parameters
    ----------
    G : graph
      A networkx graph 

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by `2/(n(n-1))` 
      for graphs, and `1/(n(n-1))` for directed graphs where `n` 
      is the number of nodes in G.
       
    weight : None or string, optional  
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.

    Returns
    -------
    edges : dictionary
       Dictionary of edges with Betweenness centrality as the value.
        
    See Also
    --------
    betweenness_centrality
    edge_load

    Notes
    -----
    The basic algorithm is from [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length 
    paths between pairs of nodes.

    The normalization might seem a little strange but it is the same
    as in edge_betweenness_centrality() and is designed to make
    edge_betweenness_centrality(G) be the same as
    edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).

    References
    ----------
    .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness 
       Centrality and their Generic Computation. 
       Social Networks 30(2):136-145, 2008.
       http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf

    """

    b=dict.fromkeys(G,0.0) # b[v]=0 for v in G
    b.update(dict.fromkeys(G.edges(),0.0)) # b[e] for e in G.edges()
    for s in sources:
        # single source shortest paths
        if weight is None:  # use BFS
            S,P,sigma=shortest_path(G,s)
        else:  # use Dijkstra's algorithm
            S,P,sigma=dijkstra(G,s,weight)
        b=_accumulate_edges_subset(b,S,P,sigma,s,targets)
    for n in G: # remove nodes to only return edges 
        del b[n]
    b=_rescale_e(b,len(G),normalized=normalized,directed=G.is_directed())
    return b
コード例 #5
0
ファイル: percolation.py プロジェクト: xuys50/networkx
def percolation_centrality(G,
                           attribute="percolation",
                           states=None,
                           weight=None):
    r"""Compute the percolation centrality for nodes.

    Percolation centrality of a node $v$, at a given time, is defined
    as the proportion of ‘percolated paths’ that go through that node.

    This measure quantifies relative impact of nodes based on their
    topological connectivity, as well as their percolation states.

    Percolation states of nodes are used to depict network percolation
    scenarios (such as during infection transmission in a social network
    of individuals, spreading of computer viruses on computer networks, or
    transmission of disease over a network of towns) over time. In this
    measure usually the percolation state is expressed as a decimal
    between 0.0 and 1.0.

    When all nodes are in the same percolated state this measure is
    equivalent to betweenness centrality.

    Parameters
    ----------
    G : graph
      A NetworkX graph.

    attribute : None or string, optional (default='percolation')
      Name of the node attribute to use for percolation state, used
      if `states` is None.

    states : None or dict, optional (default=None)
      Specify percolation states for the nodes, nodes as keys states
      as values.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with percolation centrality as the value.

    See Also
    --------
    betweenness_centrality

    Notes
    -----
    The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and
    Liaquat Hossain [1]_
    Pair dependecies are calculated and accumulated using [2]_

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length
    paths between pairs of nodes.

    References
    ----------
    .. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain
       Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes
       during Percolation in Networks
       http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095
    .. [2] Ulrik Brandes:
       A Faster Algorithm for Betweenness Centrality.
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    """
    percolation = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G

    nodes = G

    if states is None:
        states = nx.get_node_attributes(nodes, attribute)

    # sum of all percolation states
    p_sigma_x_t = 0.0
    for v in states.values():
        p_sigma_x_t += v

    for s in nodes:
        # single source shortest paths
        if weight is None:  # use BFS
            S, P, sigma, _ = shortest_path(G, s)
        else:  # use Dijkstra's algorithm
            S, P, sigma, _ = dijkstra(G, s, weight)
        # accumulation
        percolation = _accumulate_percolation(percolation, G, S, P, sigma, s,
                                              states, p_sigma_x_t)

    n = len(G)

    for v in percolation:
        percolation[v] *= 1 / (n - 2)

    return percolation
コード例 #6
0
def betweenness_centrality_subset(G,sources,targets,
                                  normalized=False,
                                  weighted_edges=False):
    """Compute betweenness centrality for a subset of nodes.

    .. math::

       c_B(v) =\\sum_{s\\in S, t \\in T} \\frac{\\sigma(s, t|v)}{\\sigma(s, t)}

    where :math:`S` is the set of sources, 
    :math:`T` is the set of targets,
    :math:`\\sigma(s, t)` is the number of shortest :math:`(s, t)`-paths, 
    and :math:`\\sigma(s, t|v)` is the number of those paths
    passing through some  node :math:`v` other than :math:`s, t`. 
    If :math:`s = t`, :math:`\\sigma(s, t) = 1`,
    and if :math:`v \\in {s, t}`,  :math:`\\sigma(s, t|v) = 0` [2]_.


    Parameters
    ----------
    G : graph
      A networkx graph 

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by
      :math:`1/(n-1)(n-2)` where :math:`n` is the number of nodes in G.
       
    weighted_edges : bool, optional
      Consider the edge weights in determining the shortest paths.
      The edge weights must be greater than zero.
      If False, all edge weights are considered equal.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    edge_betweenness_centrality
    load_centrality

    Notes
    -----
    The basic algorithm is from Ulrik Brandes [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length 
    paths between pairs of nodes.

    The normalization might seem a little strange but it is the same
    as in betweenness_centrality() and is designed to make
    betweenness_centrality(G) be the same as
    betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).

    
    References
    ----------
    .. [1]  A Faster Algorithm for Betweenness Centrality.
       Ulrik Brandes, 
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness 
       Centrality and their Generic Computation. 
       Social Networks 30(2):136-145, 2008.
       http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
    """
    b=dict.fromkeys(G,0.0) # b[v]=0 for v in G
    for s in sources:
        # single source shortest paths
        if weighted_edges:  # use Dijkstra's algorithm
            S,P,sigma=dijkstra(G,s)
        else:  # use BFS
            S,P,sigma=shortest_path(G,s)
        b=_accumulate_subset(b,S,P,sigma,s,targets)
    b=_rescale(b,normalized=normalized,directed=G.is_directed())
    return b
コード例 #7
0
def betweenness_centrality_subset(G,
                                  sources,
                                  targets,
                                  normalized=False,
                                  weight=None):
    r"""Compute betweenness centrality for a subset of nodes.

    .. math::

       c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}

    where $S$ is the set of sources, $T$ is the set of targets,
    $\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
    and $\sigma(s, t|v)$ is the number of those paths
    passing through some  node $v$ other than $s, t$.
    If $s = t$, $\sigma(s, t) = 1$,
    and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.


    Parameters
    ----------
    G : graph
      A NetworkX graph.

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by $2/((n-1)(n-2))$
      for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
      is the number of nodes in G.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.
      Weights are used to calculate weighted shortest paths, so they are
      interpreted as distances.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    edge_betweenness_centrality
    load_centrality

    Notes
    -----
    The basic algorithm is from [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length
    paths between pairs of nodes.

    The normalization might seem a little strange but it is
    designed to make betweenness_centrality(G) be the same as
    betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).

    The total number of paths between source and target is counted
    differently for directed and undirected graphs. Directed paths
    are easy to count. Undirected paths are tricky: should a path
    from "u" to "v" count as 1 undirected path or as 2 directed paths?

    For betweenness_centrality we report the number of undirected
    paths when G is undirected.

    For betweenness_centrality_subset the reporting is different.
    If the source and target subsets are the same, then we want
    to count undirected paths. But if the source and target subsets
    differ -- for example, if sources is {0} and targets is {1},
    then we are only counting the paths in one direction. They are
    undirected paths but we are counting them in a directed way.
    To count them as undirected paths, each should count as half a path.

    References
    ----------
    .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       https://doi.org/10.1080/0022250X.2001.9990249
    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
       Centrality and their Generic Computation.
       Social Networks 30(2):136-145, 2008.
       https://doi.org/10.1016/j.socnet.2007.11.001
    """
    b = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
    for s in sources:
        # single source shortest paths
        if weight is None:  # use BFS
            S, P, sigma, _ = shortest_path(G, s)
        else:  # use Dijkstra's algorithm
            S, P, sigma, _ = dijkstra(G, s, weight)
        b = _accumulate_subset(b, S, P, sigma, s, targets)
    b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
    return b
コード例 #8
0
def edge_betweenness_centrality_subset(G,sources,targets,
                                       normalized=False,
                                       weighted_edges=False):
    """Compute betweenness centrality for edges.

    Betweenness centrality of an edge is the fraction of all shortest 
    paths that pass through that edge.

    Parameters
    ----------
    G : graph
      A networkx graph 

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by 
      b=b/(n-1)(n-2) where n is the number of nodes in G.
       
    weighted_edges : bool, optional
      Consider the edge weights in determining the shortest paths.
      The edge weights must be greater than zero.
      If False, all edge weights are considered equal.

    Returns
    -------
    edges : dictionary
       Dictionary of edges with Betweenness centrality as the value.
        
    See Also
    --------
    betweenness_centrality
    edge_load

    Notes
    -----
    The basic algorithm is from Ulrik Brandes [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length 
    paths between pairs of nodes.

    The normalization might seem a little strange but it is the same
    as in edge_betweenness_centrality() and is designed to make
    edge_betweenness_centrality(G) be the same as
    edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).

    References
    ----------
    .. [1]  A Faster Algorithm for Betweenness Centrality.
       Ulrik Brandes, 
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    """

    b=dict.fromkeys(G,0.0) # b[v]=0 for v in G
    b.update(dict.fromkeys(G.edges(),0.0)) # b[e] for e in G.edges()
    for s in sources:
        # single source shortest paths
        if weighted_edges:  # use Dijkstra's algorithm
            S,P,sigma=dijkstra(G,s)
        else:  # use BFS
            S,P,sigma=shortest_path(G,s)
        b=_accumulate_edges_subset(b,S,P,sigma,s,targets)
    for n in G: # remove nodes to only return edges 
        del b[n]
    b=_rescale(b,normalized=normalized,directed=G.is_directed())
    return b
コード例 #9
0
def edge_betweenness_centrality_subset(G,
                                       sources,
                                       targets,
                                       normalized=False,
                                       weighted_edges=False):
    """Compute betweenness centrality for edges.

    Betweenness centrality of an edge is the fraction of all shortest 
    paths that pass through that edge.

    Parameters
    ----------
    G : graph
      A networkx graph 

    sources: list of nodes
      Nodes to use as sources for shortest paths in betweenness

    targets: list of nodes
      Nodes to use as targets for shortest paths in betweenness

    normalized : bool, optional
      If True the betweenness values are normalized by 
      b=b/(n-1)(n-2) where n is the number of nodes in G.
       
    weighted_edges : bool, optional
      Consider the edge weights in determining the shortest paths.
      The edge weights must be greater than zero.
      If False, all edge weights are considered equal.

    Returns
    -------
    edges : dictionary
       Dictionary of edges with Betweenness centrality as the value.
        
    See Also
    --------
    betweenness_centrality
    edge_load

    Notes
    -----
    The basic algorithm is from Ulrik Brandes [1]_.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length 
    paths between pairs of nodes.

    The normalization might seem a little strange but it is the same
    as in edge_betweenness_centrality() and is designed to make
    edge_betweenness_centrality(G) be the same as
    edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).

    References
    ----------
    .. [1]  A Faster Algorithm for Betweenness Centrality.
       Ulrik Brandes, 
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    """

    b = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
    b.update(dict.fromkeys(G.edges(), 0.0))  # b[e] for e in G.edges()
    for s in sources:
        # single source shortest paths
        if weighted_edges:  # use Dijkstra's algorithm
            S, P, sigma = dijkstra(G, s)
        else:  # use BFS
            S, P, sigma = shortest_path(G, s)
        b = _accumulate_edges_subset(b, S, P, sigma, s, targets)
    for n in G:  # remove nodes to only return edges
        del b[n]
    b = _rescale(b, normalized=normalized, directed=G.is_directed())
    return b
コード例 #10
0
ファイル: percolation.py プロジェクト: boothby/networkx
def percolation_centrality(G, attribute='percolation',
                           states=None, weight=None):
    r"""Compute the percolation centrality for nodes.

    Percolation centrality of a node $v$, at a given time, is defined
    as the proportion of ‘percolated paths’ that go through that node.

    This measure quantifies relative impact of nodes based on their
    topological connectivity, as well as their percolation states.

    Percolation states of nodes are used to depict network percolation
    scenarios (such as during infection transmission in a social network
    of individuals, spreading of computer viruses on computer networks, or
    transmission of disease over a network of towns) over time. In this
    measure usually the percolation state is expressed as a decimal
    between 0.0 and 1.0.

    When all nodes are in the same percolated state this measure is
    equivalent to betweenness centrality.

    Parameters
    ----------
    G : graph
      A NetworkX graph.

    attribute : None or string, optional (default='percolation')
      Name of the node attribute to use for percolation state, used
      if `states` is None.

    states : None or dict, optional (default=None)
      Specify percolation states for the nodes, nodes as keys states
      as values.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with percolation centrality as the value.

    See Also
    --------
    betweenness_centrality

    Notes
    -----
    The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and
    Liaquat Hossain [1]_
    Pair dependecies are calculated and accumulated using [2]_

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length
    paths between pairs of nodes.

    References
    ----------
    .. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain
       Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes
       during Percolation in Networks
       http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095
    .. [2] Ulrik Brandes:
       A Faster Algorithm for Betweenness Centrality.
       Journal of Mathematical Sociology 25(2):163-177, 2001.
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
    """
    percolation = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G

    nodes = G

    if states is None:
        states = nx.get_node_attributes(nodes, attribute)

    # sum of all percolation states
    p_sigma_x_t = 0.0
    for v in states.values():
        p_sigma_x_t += v

    for s in nodes:
        # single source shortest paths
        if weight is None:  # use BFS
            S, P, sigma = shortest_path(G, s)
        else:  # use Dijkstra's algorithm
            S, P, sigma = dijkstra(G, s, weight)
        # accumulation
        percolation = _accumulate_percolation(percolation, G, S, P, sigma, s,
                                              states, p_sigma_x_t)

    n = len(G)

    for v in percolation:
        percolation[v] *= 1 / (n - 2)

    return percolation