def _local_gefura(G, groups, weight=None, normalized=True): gamma = dict.fromkeys(G, 0) # Make mapping node -> group. # This assumes that groups are disjoint. group_of = {n: group for group in groups for n in group} for s in G: if weight is None: S, P, sigma = _single_source_shortest_path_basic(G, s) else: S, P, sigma = _single_source_dijkstra_path_basic(G, s, weight) # Accumulation delta = dict.fromkeys(G, 0) while S: w = S.pop() different_groups = group_of[s] != group_of[w] deltaw, sigmaw = delta[w], sigma[w] coeff = (1 + deltaw) / sigmaw if different_groups \ else deltaw / sigmaw for v in P[w]: delta[v] += sigma[v] * coeff if w != s and not different_groups: gamma[w] += deltaw gamma = rescale_local(gamma, G, group_of, normalized) return gamma
def global_gefura(G, groups, weight=None, normalized=True): """Determine global gefura measure of each node This function handles both weighted and unweighted networks, directed and undirected, and connected and unconnected. Arguments --------- G : a networkx.Graph the network groups : a list or iterable of sets Each set represents a group and contains 1 to N nodes weight : None or a string If None, the network is treated as unweighted. If a string, this is the edge data key corresponding to the edge weight normalized : True|False Whether or not to normalize the output to [0, 1]. Examples -------- >>> import networkx as nx >>> G = nx.path_graph(5) >>> groups = [{0, 2}, {1}, {3, 4}] >>> global_gefura(G, groups) {0: 0.0, 1: 0.5, 2: 0.8, 3: 0.6, 4: 0.0} """ gamma = dict.fromkeys(G, 0) # Make mapping node -> group. # This assumes that groups are disjoint. group_of = {n: group for group in groups for n in group} for s in G: if weight is None: S, P, sigma = _single_source_shortest_path_basic(G, s) else: S, P, sigma = _single_source_dijkstra_path_basic(G, s, weight) # Accumulation delta = dict.fromkeys(G, 0) while S: w = S.pop() deltaw, sigmaw = delta[w], sigma[w] coeff = (1 + deltaw) / sigmaw if group_of[s] != group_of[w] \ else deltaw / sigmaw for v in P[w]: delta[v] += sigma[v] * coeff if w != s: gamma[w] += deltaw gamma = rescale_global(gamma, G, groups, normalized) return gamma
def stress_centrality(G, k=None, normalized=True, weight=None, endpoints=False, seed=None): """ Compute stress centrality We use the same BSF algorithm as for beteweeness centrality used in networkx, but we change the accumulating phase in order to get only the number of shortests path see algorithm 12 in http://algo.uni-konstanz.de/publications/b-vspbc-08.pdf """ stress = dict.fromkeys(G, 0.0) # b[v]=0 for v in G if k is None: nodes = G else: random.seed(seed) nodes = random.sample(G.nodes(), k) for s in nodes: # single source shortest paths if weight is None: # use BFS S, P, sigma = _single_source_shortest_path_basic(G, s) else: # use Dijkstra's algorithm S, P, sigma = _single_source_dijkstra_path_basic(G, s, weight) # accumulation if endpoints: stress = _accumulate_stress_endpoints(stress, S, P, sigma, s) else: stress = _accumulate_stress_basic(stress, S, P, sigma, s) # rescaling stress = _rescale(stress, len(G), normalized=normalized, directed=G.is_directed(), k=k) return stress
def _group_preprocessing(G, set_v, weight): sigma = {} delta = {} D = {} betweenness = dict.fromkeys(G, 0) for s in G: if weight is None: # use BFS S, P, sigma[s], D[s] = _single_source_shortest_path_basic(G, s) else: # use Dijkstra's algorithm S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight) betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s) for i in delta[s].keys(): # add the paths from s to i and rescale sigma if s != i: delta[s][i] += 1 if weight is not None: sigma[s][i] = sigma[s][i] / 2 # building the path betweenness matrix only for nodes that appear in the group PB = dict.fromkeys(G) for group_node1 in set_v: PB[group_node1] = dict.fromkeys(G, 0.0) for group_node2 in set_v: if group_node2 not in D[group_node1]: continue for node in G: # if node is connected to the two group nodes than continue if group_node2 in D[node] and group_node1 in D[node]: if ( D[node][group_node2] == D[node][group_node1] + D[group_node1][group_node2] ): PB[group_node1][group_node2] += ( delta[node][group_node2] * sigma[node][group_node1] * sigma[group_node1][group_node2] / sigma[node][group_node2] ) return PB, sigma, D