def augmentNodes(g):
    r1 = nx.eigenvector_centrality_numpy(g)
    r2 = nx.degree_centrality(g) # DP MY
    r3 = nx.betweenness_centrality(g)
    r5 = nx.load_centrality(g,weight='weight') # DY, WY-writename # Scientific collaboration networks: II. Shortest paths, weighted networks, and centrality, M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
    r6 = nx.pagerank(g, alpha=0.85, personalization=None, max_iter=100, tol=1e-08, nstart=None, weight='weight')
    
    if nx.is_directed(g) == True:
        r8 = nx.in_degree_centrality(g)
        r9 = nx.out_degree_centrality(g)
#        r10 = nx.hits(g, max_iter=100, tol=1e-08, nstart=None)
    else:
        r4 = nx.communicability_centrality(g)
        r7 = nx.clustering(g, weight='weight')
        
    for x in g.nodes():
        g.node[x]['eigenvector_centrality_numpy'] = r1[x]
        g.node[x]['degree_centrality'] = r2[x]  
        g.node[x]['betweenness_centrality'] = r3[x]
        g.node[x]['load_centrality'] = r5[x]  
        g.node[x]['pagerank'] = r6[x]

        if nx.is_directed(g) == True:
            g.node[x]['in_degree_centrality'] = r8[x]
            g.node[x]['out_degree_centrality'] = r9[x]
#            g.node[x]['hits'] = r10[x]
        else:
            g.node[x]['communicability_centrality'] = r4[x]
            g.node[x]['clustering'] = r7[x]
    return g        
Example #2
0
def _create_flow_graph(G, H, infcapFlows):
    """Creates the flow graph on G corresponding to the auxiliary
    digraph H and infinite capacity edges flows infcapFlows.
    """
    if nx.is_directed(G):
        flowGraph = nx.DiGraph(G)
    else:
        flowGraph = nx.Graph(G)

    for (u, v) in flowGraph.edges():
        if H.has_edge(u, v):
            try:
                flowGraph[u][v]['flow'] = abs(G[u][v]['capacity']
                                              - H[u][v]['capacity'])
            except KeyError: # (u, v) has infinite capacity
                try:
                    flowGraph[u][v]['flow'] = H[v][u]['capacity']
                except KeyError:
                    # Infinite capacity digon in the original graph.
                    if nx.is_directed(G):
                        flowGraph[u][v]['flow'] = max(infcapFlows[(u, v)]
                                                    - infcapFlows[(v, u)], 0)
                    else:
                        flowGraph[u][v]['flow'] = abs(infcapFlows[(u, v)]
                                                    - infcapFlows[(v, u)])
        else:
            flowGraph[u][v]['flow'] = G[u][v]['capacity']

    return flowGraph
Example #3
0
def calculate_network_measures(net, analyser):
    deg=nx.degree_centrality(net)
    clust=[]

    if(net.is_multigraph()):
        net = analyser.flatGraph(net)

    if(nx.is_directed(net)):
        tmp_net=net.to_undirected()
        clust=nx.clustering(tmp_net)
    else:
        clust=nx.clustering(net)



    if(nx.is_directed(net)):
        tmp_net=net.to_undirected()
        paths=nx.shortest_path(tmp_net, source=None, target=None, weight=None)
    else:
        paths=nx.shortest_path(net, source=None, target=None, weight=None)

    lengths = [map(lambda a: len(a[1]), x[1].items()[1:]) for x in paths.items()]
    all_lengths=[]
    for a in lengths:
        all_lengths.extend(a)
    max_value=max(all_lengths)
    #all_lengths = [x / float(max_value) for x in all_lengths]

    return deg.values(),clust.values(),all_lengths
Example #4
0
def _create_auxiliary_digraph(G):
    """Initialize an auxiliary digraph and dict of infinite capacity
    edges for a given graph G.
    Ignore edges with capacity <= 0.
    """
    auxiliary = nx.DiGraph()
    infcapFlows = {}

    if nx.is_directed(G):
        for edge in G.edges(data = True):
            if edge[2].has_key('capacity'):
                if edge[2]['capacity'] > 0:
                    auxiliary.add_edge(*edge)
            else:
                auxiliary.add_edge(*edge)
                infcapFlows[(edge[0], edge[1])] = 0
    else:
        for edge in G.edges(data = True):
            if edge[2].has_key('capacity'):
                if edge[2]['capacity'] > 0:
                    auxiliary.add_edge(*edge)
                    auxiliary.add_edge(edge[1], edge[0], edge[2])
            else:
                auxiliary.add_edge(*edge)
                auxiliary.add_edge(edge[1], edge[0], edge[2])
                infcapFlows[(edge[0], edge[1])] = 0
                infcapFlows[(edge[1], edge[0])] = 0

    return auxiliary, infcapFlows
Example #5
0
def _create_flow_dict(G, H, infcapFlows, capacity="capacity"):
    """Creates the flow dict of dicts on G corresponding to the
    auxiliary digraph H and infinite capacity edges flows infcapFlows.
    """
    flowDict = {}

    for u in G.nodes_iter():
        if not u in flowDict:
            flowDict[u] = {}
        for v in G.neighbors(u):
            if H.has_edge(u, v):
                try:
                    flowDict[u][v] = abs(G[u][v][capacity] - H[u][v][capacity])
                except KeyError:  # (u, v) has infinite capacity
                    try:
                        flowDict[u][v] = H[v][u][capacity]
                    except KeyError:
                        try:  # Infinite capacity digon in the original graph.
                            if nx.is_directed(G):
                                flowDict[u][v] = max(infcapFlows[(u, v)] - infcapFlows[(v, u)], 0)
                            else:
                                flowDict[u][v] = abs(infcapFlows[(u, v)] - infcapFlows[(v, u)])
                        except KeyError:  # Zero flow
                            flowDict[u][v] = 0
            else:
                flowDict[u][v] = G[u][v][capacity]
    return flowDict
def attack_based_max_closeness(G):
    """ Recalcuat closeness attack
    """
    n = G.number_of_nodes()
    tot_ND = [0] * (n+1)
    tot_T = [0] * (n+1)

    ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
    tot_ND[0] = ND
    tot_T[0] = 0

    # remember when all the closeness have been zero for all nodes
    for i in range(1, n+1):
        all_closeness = nx.closeness_centrality(G)
        # get node with max betweenness       
        node = max(all_closeness, key=all_closeness.get)
        
        # remove all the edges adjacent to node
        if not nx.is_directed(G):   # undirected graph
            for key in G[node].keys():
                G.remove_edge(node, key)
        else:   # directed graph
            for x in [v for u, v in G.out_edges_iter(node)]:
                G.remove_edge(node, x)
            for x in [u for u, v in G.in_edges_iter(node)]:
                G.remove_edge(x, node)
        # calculate driver node number ND
        ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
        tot_ND[i] = ND
        tot_T[i]  = i
    return (tot_ND, tot_T, Max_Betweenness_Zero_T)
def random_rewiring(network):
    """
    Rewires a pair of edges such that the degree sequence is preserved.

    Arguments:
        network => The input network.

    Returns:
        A network with one pair of edges randomly rewired.
    """

    # Don't terminate until the rewiring is performed.
    while True:

        # Store the number of edges in the network to avoid repeated computation.
        network_edges = nx.edges(network)

        # If there isn't at least 1 edge, break out and return.
        if len(network_edges) == 0:
            break

        # Randomly selected a link from the network.
        link1 = (source1, target1) = random.choice(network_edges)

        # Find all the edges that share no nodes with link1.
        disjoint_links = [link for link in network_edges if not any(node in link for node in link1)]

        # If there are no disjoint links, it would be impossible to randomize the network while
        # still preserving the degree sequence, so break out and return.
        if len(disjoint_links) == 0:
            break

        # Randomly selected a DIFFERENT link from the network (no sharing of nodes allowed).
        link2 = (source2, target2) = random.choice(disjoint_links)

        # If the graph is directed, there is only one option.
        # If the graph is undirected, there are two options, each with a 50-50 chance.
        if not nx.is_directed(network) and random.random() < 0.5:

            # Rewire links A-B and C-D to A-C and B-D.
            new_link1 = (source1, source2)
            new_link2 = (target1, target2)

        else:

            # Rewire links A-B and C-D to A-D and C-B.
            new_link1 = (source1, target2)
            new_link2 = (source2, target1)

        # If the new links aren't in the network already, replace the old links with the new links.
        if not network.has_edge(*new_link1) and not network.has_edge(*new_link2):

            # Remove the old links.
            network.remove_edges_from([link1, link2])

            # Add the new links.
            network.add_edges_from([new_link1, new_link2])

            # Returned the slightly altered new network.
            return network
Example #8
0
def estimate_joint_dist(graph, nsteps):
    assert(not nx.is_directed(graph))
    assert('labeled' in graph.graph and graph.graph['labeled'])

    n = nsteps  #total num seen
    n_iod = {}  #total seen with indeg i, outdeg o, deg d

    # random initial node; don't include in estimator
    node = random.choice(graph.nodes())
    
    # rw
    for i in xrange(nsteps):
        node = random.choice(list(nx.all_neighbors(graph, node)))
        iod_tuple = (graph.node[node]['in-degree'],
                     graph.node[node]['out-degree'],
                     graph.node[node]['degree'])
        n_iod[iod_tuple] = n_iod.get(iod_tuple,0) + 1

    # degree distribution parameters
    max_indeg  = max([graph.node[k]['in-degree'] for k in graph.node.keys()])
    max_outdeg = max([graph.node[k]['out-degree'] for k in graph.node.keys()])
    deg_par = np.zeros((max_indeg + 1, max_outdeg + 1))

    for (indeg, outdeg, deg) in n_iod.keys():
        val = n_iod[(indeg, outdeg, deg)]
        deg_par[indeg, outdeg] += float(val) / float(n * deg)
        #deg_par[indeg, outdeg] += float(val) / float(deg)

    # normalize
    deg_par /= deg_par.sum()

    np.savetxt("deg_par.csv", deg_par, delimiter=",")

    return deg_par
Example #9
0
    def prepare_visjs_data(g):
        nodes = []
        for node in g.nodes_iter():
            new = {'id': str(node),
                   'label': str(g.node[node]['label']) if 'label' in g.node[node] else str(node),
                   'shape': 'box'
            }
            nodes.append(new)

        edges = []
        for fromnode, tonode, etype in g.edges_iter(keys=True):
            if 'label' in g[fromnode][tonode][etype]:
                label = str(g[fromnode][tonode][etype]['label'])
            elif 'weight' in g[fromnode][tonode][etype]:
                label = str(g[fromnode][tonode][etype]['weight'])
            else:
                #label = str(etype)
                label = ''
            new = {'from': str(fromnode),
                   'to': str(tonode),
                   'label': label,
                   'color': {'color': 'black', 'highlight': 'blue', 'hover': 'blue'},
            }
            if nx.is_directed(g):
                new['arrows'] = 'to'
            edges.append(new)

        return {'nodes': nodes,
                'edges': edges}
Example #10
0
def DFS(G, s):
    cor = {}
    pred = {}
    d = {}
    f = {}

    tempo = 0

    for v in G.nodes():
        cor[v] = "branco"  # cores possíveis: branco cinza e preto
        pred[v] = None

    for v in G.nodes():
        if cor[v] == "branco":
            tempo = visit(G, v, cor, pred, d, f, tempo)

    H = nx.create_empty_copy(G)

    for v1, v2, data in G.edges(data=True):
        if (pred[v2] is v1) or (pred[v1] is v2 and not nx.is_directed(H)):
            H.add_edge(v1, v2, data)
            H.node[v1]["begin_time"] = d[v1]
            H.node[v2]["begin_time"] = d[v2]
            H.node[v1]["finish_time"] = f[v1]
            H.node[v2]["finish_time"] = f[v2]

    return H
Example #11
0
def Prim(G = nx.Graph(), R = None):
    # Q é a lista de vértices que não estão na árvore
    Q    = {}
    # pred armazenará o predecessor de cada vértice
    pred = {}

    # Inicializamos Q com todos os vértices com valor infinito, pois neste
    # ponto ainda não há ligação entre nenhum vértice. Igualmente, nenhum
    # vértice tem predecessor, portanto utilizamos o valor 'null'.
    for v,data in G.nodes(data=True):
        Q[v]    = n.inf
        pred[v] = 'null'

    # Caso não haja pesos definidos para os vértices, atribuímos o valor 1.0.
    # Esta é uma abordagem alternativa à que usamos em Kruskal, de utilizar uma
    # variável para verificar se estamos levando em conta o peso ou não.
    for e,x in G.edges():
        if ('weight' not in G[e][x]):
            G[e][x]['weight'] = 1.0

    # Inicializamos a raiz da árvore com valor 0, e criamos uma árvore chamada
    # MST apenas com os vértices de G.
    Q[R] = 0.0
    MST  = nx.create_empty_copy(G)

    while Q:
        # u := índice do menor elemento de Q
        # pois queremos o vértice de menor peso
        u = min(Q,key=Q.get)

        # removemos de Q, pois ele será adicionado na árvore
        del Q[u]

        # guardamos os pesos mínimos de cada vizinho de u em Q, se forem
        # menores do que os já armazenados
        for vizinho in G[u]:
            if vizinho in Q:
                if G[u][vizinho]['weight'] < Q[vizinho]:
                    pred[vizinho] = u
                    Q[vizinho]    = G[u][vizinho]['weight']

        # Se existirem predecessores para u, então adicionaremos as arestas
        # conectando o vértice u a seus predecessores
        if pred[u] is not 'null':
            for v1,v2,data in G.edges(data=True):
                # para preservar os dados da aresta, foi necessário esse loop
                # que verifica todas as arestas do grafo e procura a aresta
                # (pred(u),u), porém, como um grafo não direcionado da
                # biblioteca não duplica a existência de suas arestas no
                # conjunto de arestas, isto é, se tem (u,v) não tem (v,u), há a
                # necessidade de verificar, no caso de grafos não direcionados,
                # se há a existência da aresta (u,pred(u)) ao invés de
                # (pred(u),u)
                if ( v1 is pred[u] and v2 is u ):
                    MST.add_edge(pred[u],u,data)
                elif (  ( v1 is u and v2 is pred[u] ) and
                        ( not nx.is_directed(G) )  ):
                    MST.add_edge(pred[u],u,data)

    return MST
Example #12
0
def _create_auxiliary_digraph(G, capacity="capacity"):
    """Initialize an auxiliary digraph and dict of infinite capacity
    edges for a given nxgraph G.
    Ignore edges with capacity <= 0.
    """
    auxiliary = nx.DiGraph()
    auxiliary.add_nodes_from(G)
    inf_capacity_flows = {}
    if nx.is_directed(G):
        for edge in G.edges(data=True):
            if capacity in edge[2]:
                if edge[2][capacity] > 0:
                    auxiliary.add_edge(*edge)
            else:
                auxiliary.add_edge(*edge)
                inf_capacity_flows[(edge[0], edge[1])] = 0
    else:
        for edge in G.edges(data=True):
            if capacity in edge[2]:
                if edge[2][capacity] > 0:
                    auxiliary.add_edge(*edge)
                    auxiliary.add_edge(edge[1], edge[0], edge[2])
            else:
                auxiliary.add_edge(*edge)
                auxiliary.add_edge(edge[1], edge[0], edge[2])
                inf_capacity_flows[(edge[0], edge[1])] = 0
                inf_capacity_flows[(edge[1], edge[0])] = 0

    return auxiliary, inf_capacity_flows
Example #13
0
def output_graph_sgf(G):
    print("# Simple Graph Format")
    print("# name:", G.name)

    if nx.is_directed(G):
        d = "d"
    else:
        d = "u"
    print(d, G.number_of_nodes(), G.number_of_edges())
    cnt = 0
    for node in G.nodes_iter(data=True):
        node_id = node[0]
        if node_id != cnt:
            raise ("non-consecutive node exception")
        # if 'weight' in node[1]:
        cnt += 1
        # print(G.out_edges([node[0]]))
        print(node_id, end="|")
        edges = []
        # edges = [str(edge[1]) for edge in nx.edges_iter(G, [node[0]])]
        for edge in G.edges_iter([node[0]], data="weight"):
            # print('edge', edge)
            src = edge[0]
            dst = edge[1]
            weight = edge[2]
            if src != node_id:
                raise ("invalid link")
            if weight != None:
                edges.append(str(dst) + ":" + str(weight))
            else:
                edges.append(str(dst))

        print(",".join(edges))
    if cnt != G.number_of_nodes():
        raise ("non-consecutive node exception")
Example #14
0
def scaling_mincostflow(G, s, t, capacity='capacity', weight='weight',
                        demand='demand', refine_scaling_constant=2):
    """Find a minimum cost flow solution using the push-relabel
    algorithm/successive approximation algorithm
    """
    if G.is_multigraph():
        raise nx.NetworkXError(
            'MultiGraph and MultiDiGraph not supported (yet).')
    if not nx.is_directed(G):
        raise nx.NetworkXError(
            'Undirected graphs are not supported (yet).')

    getcontext().prec = 28
    G_copy = nx.DiGraph(G)
    max_cost = max([G_copy[u][v][weight] for u,v in G_copy.edges_iter()])

    #initialization
    price='price'
    for v in G_copy:
        G_copy.node[v][price] = 0
    epsilon = Decimal(max_cost)
    
    _get_maxflow(G_copy, s, t, capacity)
    
    len_G = len(G_copy)
    tol = Decimal(1/Decimal(len_G))

    while epsilon >= tol:
        epsilon = _refine(epsilon, G_copy, capacity, weight, refine_scaling_constant, s, t)
    
    return _create_flow_dict(G, G_copy, t)
Example #15
0
    def __init__(
        self,
        graph,
        weight='weight',
        cap='capacity',
        ):
        """
        Constructor
        """

        self.wt = weight
        self.cap = cap
        self.g = graph
        self.pathHeap = []  # Use the heapq module functions heappush(pathHeap, item) and heappop(pathHeap, item)
        self.pathList = []  # Contains WeightedPath objects
        self.deletedEdges = set()
        self.deletedNodes = set()
        self.kPath = None

        # Make a copy of the graph tempG that we can manipulate

        if isinstance(graph, nx.Graph):

            # self.tempG = graph.copy()

            if nx.is_directed(graph):
                self.tempG = nx.DiGraph(graph)
            else:
                self.tempG = nx.Graph(graph)
        else:
            self.tempG = None
Example #16
0
def WC_model(G, a):                 # a: the set of initial active nodes
                                    # each edge from node u to v is assigned probability 1/in-degree(v) of activating v
    A = set(a)                      # A: the set of active nodes, initially a
    B = set(a)                      # B: the set of nodes activated in the last completed iteration
    converged = False
 
    if nx.is_directed(G):
        my_degree_function = G.in_degree
    else:
        my_degree_function = G.degree

    while not converged:
        nextB = set()
        for n in B:
            for m in set(G.neighbors(n)) - A:
                prob = random.random()	# in the range [0.0, 1.0)
                p = 1.0/my_degree_function(m)
                if prob <= p:
                    nextB.add(m)
        B = set(nextB)
        if not B:
            converged = True
        A |= B

    return len(A)
def attack_based_max_eigenvector(G):
    """ Recalculate eigenvector centrality attack
    """
    n = G.number_of_nodes()
    tot_ND = [0] * (n+1)
    tot_T = [0] * (n+1)

    ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
    tot_ND[0] = ND
    tot_T[0] = 0

    for i in range(1, n+1):
        # calculate all nodes' eigenvector centrality
        allEigenvectorCentrality = nx.eigenvector_centrality(G, max_iter=1000, weight=None)
        # get node with max eigenvector centrality       
        node = max(allEigenvectorCentrality, key=allEigenvectorCentrality.get)
        # remove all the edges adjacent to node
        if not nx.is_directed(G):   # undirected graph
            for key in G[node].keys():
                G.remove_edge(node, key)
        else:   # directed graph
            for x in [v for u, v in G.out_edges_iter(node)]:
                G.remove_edge(node, x)
            for x in [u for u, v in G.in_edges_iter(node)]:
                G.remove_edge(x, node)
        ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
        tot_ND[i] = ND
        tot_T[i]  = i
    return (tot_ND, tot_T)
def single_discount_high_degree_nodes_gen(k, G):

    if nx.is_directed(G):
        my_degree_function = G.out_degree
    else:
        my_degree_function = G.degree

    D = []

    for i in range(k):
        # find the node of max out_degree, discounting any out-edge
        # to a node already in D
        maxoutdeg_i = -1
        v_i = -1
        for v in list(set(G.nodes()) - set(D)):
            outdeg = my_degree_function(v)
            for u in D:
                if G.has_edge(v, u):
                    outdeg -= 1
            if outdeg > maxoutdeg_i:
                maxoutdeg_i = outdeg
                v_i = v

        D.append(v_i)
        yield D
Example #19
0
def add_graph(canvas: CanvasSelection,
              graph,
              weight: Union[str, None] = 'weight') -> CanvasSelection:
    """
    Adds all nodes and edges from a NetworkX graph to the given canvas.
    Edges will automatically set the :meth:`~graphics.EdgeSelection.directed`
    attribute and/or add a weight :meth:`~graphics.EdgeSelection.label`
    depending on the provided graph.

    :param canvas: The CanvasSelection onto which the graph should be added.
    :type canvas: :class:`~graphics.CanvasSelection`

    :param graph: The NetworkX graph
    :type graph: Any type of NetworkX graph

    :param weight: The name of the attribute which describes edge weight in the the
        NetworkX graph. Edges without the attribute will not display a weight,
        and a value of ``None`` will prevent any weight from being displayed.
        Defaults to "weight".
    :type weight: Union[str, None]

    :return: The provided CanvasSelection with animations disabled, allowing
        initial attributes to be configured.
    :rtype: :class:`~graphics.CanvasSelection`
    """
    weighted_edges = []
    unweighted_edges = []
    for e in graph.edges:
        if weight in graph.edges[e]:
            weighted_edges.append(e)
        else:
            unweighted_edges.append(e)

    canvas.nodes(graph.nodes).add()

    if len(unweighted_edges) > 0:
        init_edges = canvas.edges(unweighted_edges).add()
        if is_directed(graph):
            init_edges.directed(True)

    if len(weighted_edges) > 0:
        init_edges = canvas.edges(weighted_edges).add()
        if is_directed(graph):
            init_edges.directed(True)
        init_edges.label().add().text(lambda e: graph.edges[e][weight])

    return canvas.duration(0)
Example #20
0
    def matchingVertices(self, graph, trie_node, nodes_used, states):
        candidates = []
        
        if not trie_node.areConditionsRespectedWeak(nodes_used):
            return candidates
        
        min_value = trie_node.getMinLabelForCurrentPos(nodes_used)

        if nodes_used == []:
            candidates = [x for x in graph.nodes() if x >= min_value]
        else:
            cand_graph = graph.to_undirected() if networkx.is_directed(graph) else graph
            connections = [set(cand_graph.neighbors(x)) for x in nodes_used]
            if trie_node.getGraph().degree(trie_node.getGraph().nodes()[len(nodes_used)]) == 0:
                connections.append(set([x for x, y in graph.degree_iter() if y == 0]))
            connections = list(set.union(*connections))
            connections = [x for x in connections if x >= min_value]
            candidates = [x for x in connections if x not in nodes_used]
            
            #Testing the space reduction
            #candidates.sort(key=lambda x: len(graph.neighbors(x)))
            #candidates = [x for x in candidates if len(graph.neighbors(x)) == len(graph.neighbors(candidates[0]))]
            #candidates = [x for x in candidates if x not in nodes_used]
            #candidates = []
            #if len(connections) > 0:
                #candidates = [x for x in graph.neighbors(connections[0]) if x not in nodes_used]
            
        vertices = []
        for node in candidates:
            cand_test = []
            test_nodes = copy.deepcopy(nodes_used)
            test_nodes.append(node)
            if states:
                if graph.node[node] == trie_node.getNodeStates()[len(nodes_used)]:
                    for i in range(0, len(trie_node.getInLinks())):
                        if ((trie_node.getInLinks()[i] == 1 and node in graph.edge[test_nodes[i]] and 
                             trie_node.getInLinkStates()[i] == graph.edge[test_nodes[i]][node]) or 
                            (trie_node.getInLinks()[i] == 0 and node not in graph.edge[test_nodes[i]])) and \
                            ((trie_node.getOutLinks()[i] == 1 and test_nodes[i] in graph.edge[node] and 
                              trie_node.getOutLinkStates()[i] == graph.edge[node][test_nodes[i]]) or
                            (trie_node.getOutLinks()[i] == 0 and test_nodes[i] not in graph.edge[node])):
                            cand_test.append(True)                    
                        else:
                            cand_test.append(False)
                    if False not in cand_test:
                        vertices.append(node)
            else:
                for i in range(0, len(trie_node.getInLinks())):
                    if ((trie_node.getInLinks()[i] == 1 and node in graph.edge[test_nodes[i]]) or 
                        (trie_node.getInLinks()[i] == 0 and node not in graph.edge[test_nodes[i]])) and \
                        ((trie_node.getOutLinks()[i] == 1 and test_nodes[i] in graph.edge[node]) or
                        (trie_node.getOutLinks()[i] == 0 and test_nodes[i] not in graph.edge[node])):
                        cand_test.append(True)                    
                    else:
                        cand_test.append(False)
                if False not in cand_test:
                    vertices.append(node)                
                     
        return vertices
Example #21
0
def evaluate(input_graph,
             lst,
             teleportation_vector,
             r,
             alpha=0.85,
             element='edge',
             max_iter=100,
             tol=1e-3):
    """
    evaluation
    :param input_graph: a networkx graph
    :param lst: list of selected edges/nodes, or the list of nodes in induced-subgraph
    :param teleportation_vector: teleportation vector
    :param r: ranking vector before elements removal
    :param alpha: damping factor
    :param element: edge, node, or subgraph
    :param max_iter: maximum number of iterations
    :param tol: tolerance
    :return: value for evaluation metric
    """
    graph = deepcopy(input_graph)
    directed = nx.is_directed(graph)
    r_change = list()
    if element == 'edge':
        graph.remove_edges_from(lst)
        r_change = power_method_left(graph,
                                     teleportation_vector,
                                     alpha=alpha,
                                     max_iter=max_iter,
                                     tol=tol)
    if element == 'node':
        if directed:
            edge_vertices = list(graph.out_edges(
                lst, data='weight', default=1)) + list(
                    graph.in_edges(lst, data='weight', default=1))
        else:
            edge_vertices = list(graph.edges(lst, data='weight', default=1))
        graph.remove_edges_from(edge_vertices)
        r_change = power_method_left(graph,
                                     teleportation_vector,
                                     alpha=alpha,
                                     max_iter=max_iter,
                                     tol=tol)
    if element == 'subgraph':
        subgraph = nx.subgraph(graph, lst)
        if directed:
            edge_subgraph = list(subgraph.out_edges(
                data='weight', default=1)) + list(
                    subgraph.in_edges(data='weight', default=1))
        else:
            edge_subgraph = list(subgraph.edges(data='weight', default=1))
        graph.remove_edges_from(edge_subgraph)
        r_change = power_method_left(graph,
                                     teleportation_vector,
                                     alpha=alpha,
                                     max_iter=max_iter,
                                     tol=tol)
    change = abs(norm(r) - norm(r_change))
    return change
Example #22
0
def Prim(G=nx.Graph(), R=None):
    # Q é a lista de vértices que não estão na árvore
    Q = {}
    # pred armazenará o predecessor de cada vértice
    pred = {}

    # Inicializamos Q com todos os vértices com valor infinito, pois neste
    # ponto ainda não há ligação entre nenhum vértice. Igualmente, nenhum
    # vértice tem predecessor, portanto utilizamos o valor 'null'.
    for v, data in G.nodes(data=True):
        Q[v] = n.inf
        pred[v] = 'null'

    # Caso não haja pesos definidos para os vértices, atribuímos o valor 1.0.
    # Esta é uma abordagem alternativa à que usamos em Kruskal, de utilizar uma
    # variável para verificar se estamos levando em conta o peso ou não.
    for e, x in G.edges():
        if ('weight' not in G[e][x]):
            G[e][x]['weight'] = 1.0

    # Inicializamos a raiz da árvore com valor 0, e criamos uma árvore chamada
    # MST apenas com os vértices de G.
    Q[R] = 0.0
    MST = nx.create_empty_copy(G)

    while Q:
        # u := índice do menor elemento de Q
        # pois queremos o vértice de menor peso
        u = min(Q, key=Q.get)

        # removemos de Q, pois ele será adicionado na árvore
        del Q[u]

        # guardamos os pesos mínimos de cada vizinho de u em Q, se forem
        # menores do que os já armazenados
        for vizinho in G[u]:
            if vizinho in Q:
                if G[u][vizinho]['weight'] < Q[vizinho]:
                    pred[vizinho] = u
                    Q[vizinho] = G[u][vizinho]['weight']

        # Se existirem predecessores para u, então adicionaremos as arestas
        # conectando o vértice u a seus predecessores
        if pred[u] is not 'null':
            for v1, v2, data in G.edges(data=True):
                # para preservar os dados da aresta, foi necessário esse loop
                # que verifica todas as arestas do grafo e procura a aresta
                # (pred(u),u), porém, como um grafo não direcionado da
                # biblioteca não duplica a existência de suas arestas no
                # conjunto de arestas, isto é, se tem (u,v) não tem (v,u), há a
                # necessidade de verificar, no caso de grafos não direcionados,
                # se há a existência da aresta (u,pred(u)) ao invés de
                # (pred(u),u)
                if (v1 is pred[u] and v2 is u):
                    MST.add_edge(pred[u], u, data)
                elif ((v1 is u and v2 is pred[u]) and (not nx.is_directed(G))):
                    MST.add_edge(pred[u], u, data)

    return MST
Example #23
0
def adjacency_embedding(G, max_dim=2, elb=1, get_lcc=True, weightcol='weight', svd_seed=None):

    """
    Inputs
        G - A networkx graph
    Outputs
        eig_vectors - The scaled (or unscaled) eigenvectors
    """
    # if get_lcc==True:
    #     #print("extracting largest_connected_component")
    #     G_lcc = lcc_BNU.extract_lcc(G)
    # else:
    #     G_lcc = G.copy()

    # weightcolumn = weightcol
    # print("pass_to_ranks")

    # G_ptr = ptr.pass_to_ranks(G_lcc, weightcol=weightcolumn)
    
    # print ("diagonoal augmentation")
    # G_aug_ptr= cvec.diag_aug(G_ptr, weightcol=weightcolumn)

    sorted_vertex = sorted(G.nodes())
    A = nx.to_scipy_sparse_matrix(G, nodelist=sorted_vertex)
    
    row, col = A.shape
    n = min(row, col)

    #print ("spectral embedding into %d dimensions" %max_dim)
    
    U, Sigma, VT = randomized_svd(A, 
                              n_components=min(max_dim, n - 1),
                              n_iter=50,
                              random_state=svd_seed)

    #print ("dimension reduction (elbow selection)")
    rank_graph =  getElbows_BNU.getElbows(Sigma, n_elbows=elb)
    
    reduced_dim = rank_graph[(elb-1)]
    #print ("elbow is %d" %reduced_dim)
    s_sqrt = np.sqrt(Sigma) #[np.newaxis] Zeinab commented this out

    
    s_sqrt_dim_reduced = s_sqrt[:reduced_dim]
    U_dim_reduced = U[:, :reduced_dim ]
    VT_dim_reduced =VT[:reduced_dim, :]

    Xhat1 = np.multiply( s_sqrt_dim_reduced, U_dim_reduced)
          
    if nx.is_directed(G) == False:
        Xhat2 = np.array([]).reshape(Xhat1.shape[0],0)
    else:
        Xhat2 = np.multiply( np.transpose(VT_dim_reduced), s_sqrt_dim_reduced)
    Xhat = np.concatenate((Xhat1, Xhat2), axis=1)
    
    embedded = collections.namedtuple('embedded', 'X vertex_labels')
    result = embedded(X = Xhat, vertex_labels = sorted_vertex)

    return result
Example #24
0
    def train(self, G):
        self.G = G
        node2id = dict([(node, vid) for vid, node in enumerate(G.nodes())])
        self.is_directed = nx.is_directed(self.G)
        self.num_node = self.G.number_of_nodes()
        self.num_edge = G.number_of_edges()
        self.edges = [[node2id[e[0]], node2id[e[1]]] for e in self.G.edges()]

        id2node = dict(zip(node2id.values(), node2id.keys()))

        self.num_neigh = np.asarray([len(list(self.G.neighbors(id2node[i]))) for i in range(self.num_node)])
        self.neighbors = [[node2id[v] for v in self.G.neighbors(id2node[i])]
                          for i in range(self.num_node)]
        s = time.time()
        self.alias_nodes = {}
        self.node_weight = {}
        for i in range(self.num_node):
            unnormalized_probs = [G[id2node[i]][nbr].get("weight", 1.0) for nbr in G.neighbors(id2node[i])]
            norm_const = sum(unnormalized_probs)
            normalized_probs =  [float(u_prob)/norm_const for u_prob in unnormalized_probs]
            self.alias_nodes[i] = alias_setup(normalized_probs)
            self.node_weight[i] = dict(zip([node2id[nbr] for nbr in G.neighbors(id2node[i])],unnormalized_probs))

        t = time.time()
        print('alias_nodes', t-s)

        # run netsmf algorithm with multiprocessing and apply randomized svd
        print("number of sample edges ", self.num_round * self.num_edge * self.window_size)
        print("random walk start...")
        t0 = time.time()
        results = []
        pool = Pool(processes=self.worker)
        for i in range(self.worker):
            results.append(pool.apply_async(func=self._random_walk_matrix, args=(i,)))
        pool.close()
        pool.join()
        print('random walk time', time.time() - t0)

        matrix = sp.lil_matrix((self.num_node, self.num_node))
        A = sp.csr_matrix(nx.adjacency_matrix(self.G))
        degree = sp.diags(np.array(A.sum(axis=0))[0], format="csr")
        degree_inv = degree.power(-1)

        t1 = time.time()
        for res in results:
            matrix += res.get()
        print('number of nzz', matrix.nnz)
        t2 = time.time()
        print('construct random walk matrix time', time.time() - t1)

        L = sp.csgraph.laplacian(matrix, normed=False, return_diag=False)
        M = degree_inv.dot(degree - L).dot(degree_inv)
        M = M * A.sum() / self.negative
        M.data[M.data <= 1] = 1
        M.data = np.log(M.data)
        print('construct matrix sparsifier time', time.time() - t2)

        embedding = self._get_embedding_rand(M)
        return embedding
Example #25
0
 def test_graph_extract(self):
     """
     Make sure that extract graph does not error
     """
     G = self.reader.extract_graph()
     self.assertEqual(nx.number_of_nodes(G), 7)
     self.assertEqual(nx.number_of_edges(G), 6)
     self.assertFalse(nx.is_directed(G))
Example #26
0
def is_directed_graph(graph):
    """
    Get whether the graph has directed edges.

    :param graph: The graph.
    :return: True if the graph has directed edges.
    """
    return nx.is_directed(graph)
Example #27
0
 def extract_ipc(self, dependency_graph):
     if nx.is_directed(dependency_graph):
         self.log_handler.info("Extracting IPC...")
         lng_path = nx.dag_longest_path(dependency_graph)
         if self.log_output:
             self.log_handler.info("Longest path:%s\n" % str(lng_path))
         self.longest_path = str(lng_path)
         self.IPC = float(dependency_graph.order()) / (len(lng_path))
 def jaccard_coefficient(graph, author_osn_id, labeled_author_osn_id):
     if not nx.is_directed(graph):
         pair = [(author_osn_id, labeled_author_osn_id)]
         jaccard_coefficient_iterator = nx.jaccard_coefficient(graph, pair)
         jaccard_coefficient_score = LinkPredictionStaticFunctions.get_score_from_iterator(
             jaccard_coefficient_iterator)
         return jaccard_coefficient_score
     return 0
Example #29
0
 def __init__(self, p, q, num_runs, g=nx.gnm_random_graph(10000, 30000)):
     if not nx.is_directed(g):
         self.g = g.to_directed()
     else:
         self.g = g
     self.p = p
     self.q = q
     self.num_runs = num_runs
 def adamic_adar_index(graph, author_osn_id, labeled_author_osn_id):
     if not nx.is_directed(graph):
         pair = [(author_osn_id, labeled_author_osn_id)]
         adamic_adar_iterator = nx.adamic_adar_index(graph, pair)
         adamic_adar_score = LinkPredictionStaticFunctions.get_score_from_iterator(
             adamic_adar_iterator)
         return adamic_adar_score
     return 0
Example #31
0
    def dist(self, G1, G2):
        """A scalable approach to network similarity.

        A network similarity measure based on node signature distrubtions.

        Params
        ------

        G1, G2 (nx.Graph): two undirected networkx graphs to be compared.

        Returns
        -------

        dist (float): the distance between G1 and G2.

        """

        # NOTE: the measure only works for undirected
        # graphs. For now we will silently convert a
        # directed graph to be undirected.
        directed_flag = False
        if nx.is_directed(G1):
            G1 = nx.to_undirected(G1)
            directed_flag = True
        if nx.is_directed(G2):
            G2 = nx.to_undirected(G2)
            directed_flag = True

        if directed_flag:
            warnings.warn("Coercing directed graph to undirected.", RuntimeWarning)


        # find the graph node feature matrices
        G1_node_features = feature_extraction(G1)
        G2_node_features = feature_extraction(G2)

        # get the graph signature vectors
        G1_signature = graph_signature(G1_node_features)
        G2_signature = graph_signature(G2_node_features)

        # the final distance is the absolute canberra distance
        dist = abs(canberra(G1_signature, G2_signature))

        self.results['dist'] = dist

        return dist
 def _check_directedness(self, graph):
     """Checking the undirected nature of a single graph."""
     try:
         directed = nx.is_directed(graph)
         if directed:
             raise ValueError("Graph is directed. Please see requirements.")
     except:
         exit("Graph is directed. Please see requirements.")
Example #33
0
def disparity_filter(G, weight='weight'):
    from scipy import integrate
    '''
    Compute significance scores (alpha) for weighted edges in G as defined in Serrano et al. 2009
        Args
            G: Weighted NetworkX graph
        Returns
            Weighted graph with a significance score (alpha) assigned to each edge
        References
            M. A. Serrano et al. (2009) Extracting the Multiscale backbone of complex weighted networks. PNAS, 106:16, 
            pp. 6483-6488.
    '''

    if nx.is_directed(G):  # directed case
        N = nx.DiGraph()
        for u in G:

            k_out = G.out_degree(u)
            k_in = G.in_degree(u)

            if k_out > 1:
                sum_w_out = sum(np.absolute(G[u][v][weight]) for v in G.successors(u))
                for v in G.successors(u):
                    w = G[u][v][weight]
                    p_ij_out = float(np.absolute(w)) / sum_w_out
                    alpha_ij_out = 1 - (k_out - 1) * integrate.quad(lambda x: (1 - x) ** (k_out - 2), 0, p_ij_out)[0]
                    N.add_edge(u, v, weight=w, alpha_out=float('%.4f' % alpha_ij_out))

            elif k_out == 1 and G.in_degree(G.successors(u)[0]) == 1:
                # we need to keep the connection as it is the only way to maintain the connectivity of the network
                v = G.successors(u)[0]
                w = G[u][v][weight]
                N.add_edge(u, v, weight=w, alpha_out=0., alpha_in=0.)
                # there is no need to do the same for the k_in, since the link is built already from the tail

            if k_in > 1:
                sum_w_in = sum(np.absolute(G[v][u][weight]) for v in G.predecessors(u))
                for v in G.predecessors(u):
                    w = G[v][u][weight]
                    p_ij_in = float(np.absolute(w)) / sum_w_in
                    alpha_ij_in = 1 - (k_in - 1) * integrate.quad(lambda x: (1 - x) ** (k_in - 2), 0, p_ij_in)[0]
                    N.add_edge(v, u, weight=w, alpha_in=float('%.4f' % alpha_ij_in))
        return N

    else:  # undirected case
        B = nx.Graph()
        for u in G:
            k = len(G[u])
            if k > 1:
                sum_w = sum(np.absolute(G[u][v][weight]) for v in G[u])
                for v in G[u]:
                    w = G[u][v][weight]
                    p_ij = float(np.absolute(w)) / sum_w
                    alpha_ij = 1 - (k - 1) * integrate.quad(lambda x: (1 - x) ** (k - 2), 0, p_ij)[0]
                    B.add_edge(u, v, weight=w, alpha=float('%.4f' % alpha_ij))
            else:
                B.add_node(u)
        return B
Example #34
0
def main():

    args = parse_args()
    args.directed = True

    seed = args.seed
    training_edgelist_dir = os.path.join(args.output,
                                         "seed={:03d}".format(seed),
                                         "training_edges")
    removed_edges_dir = os.path.join(args.output, "seed={:03d}".format(seed),
                                     "removed_edges")

    if not os.path.exists(training_edgelist_dir):
        os.makedirs(training_edgelist_dir, exist_ok=True)
    if not os.path.exists(removed_edges_dir):
        os.makedirs(removed_edges_dir, exist_ok=True)

    training_edgelist_fn = os.path.join(training_edgelist_dir, "edgelist.tsv")
    val_edgelist_fn = os.path.join(removed_edges_dir, "val_edges.tsv")
    val_non_edgelist_fn = os.path.join(removed_edges_dir, "val_non_edges.tsv")
    test_edgelist_fn = os.path.join(removed_edges_dir, "test_edges.tsv")
    test_non_edgelist_fn = os.path.join(removed_edges_dir,
                                        "test_non_edges.tsv")

    graph, _ = load_data(args)
    print("loaded dataset")
    assert nx.is_directed(graph)

    edges = list(graph.edges())
    print("enumerated edges")

    (training_edges, (val_edges, val_non_edges),
     (test_edges, test_non_edges)) = split_edges(graph,
                                                 edges,
                                                 seed,
                                                 val_split=0)

    print("number of val edges", len(val_edges), "number of val non edges",
          len(val_edges))
    print("number of test edges", len(test_edges), "number of test non edges",
          len(test_edges))

    N = len(graph)
    graph.remove_edges_from(val_edges +
                            test_edges)  # remove val and test edges
    assert len(nx.DiGraph(training_edges)) == N
    print("removed edges")

    nx.write_edgelist(graph,
                      training_edgelist_fn,
                      delimiter="\t",
                      data=["weight"])
    write_edgelist_to_file(val_edges, val_edgelist_fn)
    write_edgelist_to_file(val_non_edges, val_non_edgelist_fn)
    write_edgelist_to_file(test_edges, test_edgelist_fn)
    write_edgelist_to_file(test_non_edges, test_non_edgelist_fn)

    print("done")
Example #35
0
def from_nx(graph, any_hashable=True):
    """Create a graph from a NetworkX graph.

    :param graph: a graph
    :type graph: nx graph
    :param any_hashable: if true the returned graph uses the same objects as the nx graph,
        otherwise integers are used. In the later case a renumbering is performed independently from 
        any possible ordering in the original graph.
    :type any_hashable: boolean
    :returns: a new graph
    :type: jgrapht graph
    """
    try:
        import networkx as nx
    except ImportError:
        raise ImportError("NetworkX required")

    is_directed = nx.is_directed(graph)
    is_weighted = any("weight" in data for u, v, data in graph.edges(data=True))
    allowing_multiple_edges = isinstance(graph, (nx.MultiGraph, nx.MultiDiGraph))

    result = _create_graph(
        directed=is_directed,
        weighted=is_weighted,
        allowing_self_loops=True,
        allowing_multiple_edges=allowing_multiple_edges,
        any_hashable=any_hashable,
    )

    if any_hashable:
        # copy graph topology and attributes
        result.graph_attrs.update(**graph.graph)

        for v in graph.nodes:
            result.add_vertex(vertex=v)
            result.vertex_attrs[v].update(**graph.nodes[v])

        try: 
            for u, v, k in graph.edges(keys=True):
                e = result.add_edge(u, v)
                result.edge_attrs[e].update(**graph.edges[u,v,k])
        except TypeError:
            for u, v in graph.edges():
                e = result.add_edge(u, v)
                result.edge_attrs[e].update(**graph.edges[u,v])
    else: 
        # copy graph topology only
        vmap = {}
        for v in graph.nodes:
            vmap[v] = result.add_vertex()

        for u, v, d in graph.edges(data=True):
            if 'weight' in d: 
                result.add_edge(vmap[u], vmap[v], weight=d['weight'])    
            else: 
                result.add_edge(vmap[u], vmap[v])

    return result
Example #36
0
def print_node_edge_data(DG):
    print '#Source----Destination---weight#'
    if(nx.is_directed(DG)):
        for node in DG.nodes_iter():
            for item in DG.adjacency_iter():
                    if item[1].has_key(node):
                            print str(item[0])+'--->'+str(node)+' : '+str(item[1][node])
    else:
        print 'Not a Directed Graph'
 def preferential_attachment(graph, author_osn_id, labeled_author_osn_id):
     if not nx.is_directed(graph):
         pair = [(author_osn_id, labeled_author_osn_id)]
         preferential_attachment_iterator = nx.preferential_attachment(
             graph, pair)
         preferential_attachment_score = LinkPredictionStaticFunctions.get_score_from_iterator(
             preferential_attachment_iterator)
         return preferential_attachment_score
     return 0
 def common_neighbors(graph, author_osn_id, labeled_author_osn_id):
     if not nx.is_directed(graph):
         pair = [(author_osn_id, labeled_author_osn_id)]
         common_neighbors_iterator = nx.nx.cn_soundarajan_hopcroft(
             graph, pair)
         common_neighbors_score = LinkPredictionStaticFunctions.get_score_from_iterator(
             common_neighbors_iterator)
         return common_neighbors_score
     return 0
Example #39
0
def scIndex(g, alpha=-1./2., _isWeightedCalc=True):
    res=0.
    #res=np.zeros(4);
    weights=nx.get_edge_attributes(g,'weight')
    
    for pair in weights.keys():
            
            if _isWeightedCalc:
                if nx.is_directed(g):
                    res+=  (weights[pair])*np.power( ( g.out_degree(pair[0])+g.in_degree(pair[1]) ) ,alpha)
                else:
                    res+=  (weights[pair])*np.power( ( g.degree(pair[0])+g.degree(pair[1]) ) ,alpha)
            else:
                if nx.is_directed(g):
                    res+=  (1)*np.power( ( g.out_degree(pair[0])+g.in_degree(pair[1]) ) ,alpha)
                else:
                    res+=  (1)*np.power( ( g.degree(pair[0])+g.degree(pair[1]) ) ,alpha)
    return res
Example #40
0
    def display_node_attributes(node):
        """Accepts a node, calculates various attributes, and formats it into a string"""

        text = G[node]["name"] if hasattr(G[node], "name") else "Node: " + str(node)

        if nx.is_directed(G):
            text += ", In Degree: " + str(G.in_degree[node])
            text += ", Out Degree: " + str(G.out_degree[node])
        else:
            text += "Degree: " + str(G.degree[node])

        eccentricity = max(nx.single_source_shortest_path_length(G, node))
        text += ", Ecentricity: " + str(eccentricity)

        if nx.is_directed(G):
            text += ", Reciprocity: {:.2f}".format(reciprocities[node])

        return text
Example #41
0
def makeAllDirected( *graphs ):
    """If any one of the input graphs is directed, make directed versions of all
    of the inputs and return them.  Otherwise, return the input unchanged."""
    someDirected = reduce( lambda x, y: x or y, map( nx.is_directed, graphs ) )
    if someDirected:
        return [ ( g.to_directed() if (not nx.is_directed( g )) else g )
                 for g in graphs ]
    else:
        return graphs
Example #42
0
def draw_top_size_cliques(graph, top=10, layout="spring"):
    if nx.is_directed(graph):
        print("Err, only graph undirected")
        return
    print("Showing the top " + str(top) + " for size")
    top_n_size_cliques = []
    all_cliques = list(nx.algorithms.clique.enumerate_all_cliques(graph))
    cliques = []
    for i in range(len(all_cliques)):
        k = sorted(all_cliques[i], key=lambda j: j)
        if k not in cliques:
            cliques.append(k)

    s = []
    for i in cliques:
        s.append((i, len(i)))

    s = sorted(s, key=lambda i: i[1])
    s.reverse()
    max_size = int(s[0][1])
    print("Max Size: ", str(max_size))
    clique_for_size = dict()
    for i in range(1, max_size + 1):
        clique_for_size[i] = []
    for i in s:
        clique_for_size[i[1]] += i[0]

    if top > max_size:
        top = max_size
    a = []
    for i in range(max_size, max_size - top, -1):
        a += clique_for_size[i]

    for n in graph:
        if n in a and n not in top_n_size_cliques:
            top_n_size_cliques.append(n)
    graph = graph.subgraph(top_n_size_cliques)

    coords = layout_dealer(graph, layout)

    cliques = [
        clique for clique in nx.find_cliques(graph)
        if len(clique) >= max_size - top
    ]

    print("Number of Cliques: " + str(len(cliques)))

    for clique in cliques:
        if len(clique) > max_size - top:
            plt.figure()
            nx.draw(graph, pos=coords, with_labels=graph.nodes().values())
            print("Clique to appear: ", clique, " length: ", str(len(clique)))
            nx.draw_networkx_nodes(graph,
                                   pos=coords,
                                   nodelist=clique,
                                   node_color=next(colors))
            plt.show()
Example #43
0
 def __check_type(self):
     if isinstance(self.graph, nx.Graph):
         self.directed = nx.is_directed(self.graph)
         self.tp = 0
     elif ig is not None and isinstance(self.graph, ig.Graph):
         self.directed = self.graph.is_directed()
         self.tp = 1
     else:
         raise ValueError("Graph model not supported")
Example #44
0
def independent_numbers(G, nodes=None, seed=None):
    if not G:
        raise Exception("You must provice path graph value")
    if nx.is_directed(G):
        C = G.to_undirected()

    else:
        C = G
    return nx.maximal_independent_set(C, nodes=None, seed=None)
Example #45
0
def check_graph(G, **kwargs):
    out = True
    if 'node_number' in kwargs:
        if len(G) is not kwargs['node_number']:
            out = False
    if 'directed' in kwargs:
        if nx.is_directed(G) is not kwargs['directed']:
            out = False
    return out
def calculate_shortest_paths_lengths(graph: Union[nx.Graph, nx.DiGraph],
                                     shortest_paths_path: Union[str, Path]):
    if nx.is_directed(graph):
        graph = nx.DiGraph(graph)
    else:
        graph = nx.Graph(graph)

    serializer.save(dict(nx_sp.shortest_path_length(graph)),
                    shortest_paths_path)
Example #47
0
def signed_degrees(G, n):
    in_pos, in_neg = signed_in_degrees(G, n)
    pos = in_pos
    neg = in_neg
    if nx.is_directed(G):
        out_pos, out_neg = signed_out_degrees(G, n)
        pos += out_pos
        neg += out_neg
    return pos, neg
def deg_of_net(G):
    print 'Is the karate club network graph directed ? : ', nx.is_directed(G)
    all_degrees = []  # List containing degree of each individual nodes
    for i in nx.degree(G):
        all_degrees.append(i[1])
    sum_degrees = 0  # Sum of degree of all nodes
    for i in all_degrees:
        sum_degrees = sum_degrees + i
    print 'Degree of the network is : ', sum_degrees / nx.number_of_nodes(G)
Example #49
0
def Dijkstra(G, s):
    Lambda = {}             # valores de "peso" para cada vértice
    pred = {}               # predecessores
    Q = G.nodes()           # vértices que ainda não estão na árvore de
                            # caminhos mínimos

    # inicializamos todos os lambdas com infinito
    for v in G.nodes():
        Lambda[v] = n.inf

    # Caso não haja pesos definidos para os vértices, atribuímos o valor 1
    for v1,v2 in G.edges():
        if ('weight' not in G[v1][v2]):
            G[v1][v2]['weight'] = 1

    Lambda[s] = 0
    pred[s] = None

    while Q:

        # encontramos o menor valor de Lambda pertencente a Q
        menor = n.inf
        u     = Q[0]
        if sys.version_info[0] < 3:
            for k,v in Lambda.iteritems():
                if (v < menor) and (k in Q):
                    menor = v
                    u = k
        else:
            for k,v in Lambda.items():
                if (v < menor) and (k in Q):
                    menor = v
                    u = k

        # removemos o item de Q, já que está sendo inserido na árvore
        u_index = Q.index(u)
        del Q[u_index]

        # percorremos a vizinhança de u procurando pesos menores
        for v in G[u]:
            if (v in Q) and (Lambda[v] > Lambda[u] + G[u][v]['weight']):
                Lambda[v] = Lambda[u] + G[u][v]['weight']
                pred[v] = u

    # Criamos um novo grafo vazio do mesmo tipo de G
    H = nx.create_empty_copy(G)

    # Adicionamos os vértices de acordo com os dados de G
    for v1,v2,data in G.edges(data=True):
        if (pred[v2] is v1) or (pred[v1] is v2 and not nx.is_directed(H)):
            H.add_edge( v1, v2, data )
            H.node[v1]['lambda'] = Lambda[v1]
            H.node[v2]['lambda'] = Lambda[v2]

    # Retornamos a árvore de predecessores com a informação de Lambda[v]
    return H
    def iteration(self, node_status=True):
        """
        Execute a single model iteration

        :return: Iteration_id, Incremental node status (dictionary node->status)
        """
        self.clean_initial_status(self.available_statuses.values())
        actual_status = {node: nstatus for node, nstatus in future.utils.iteritems(self.status)}

        if self.actual_iteration == 0:
            self.actual_iteration += 1
            delta, node_count, status_delta = self.status_delta(actual_status)
            if node_status:
                return {"iteration": 0, "status": actual_status.copy(),
                        "node_count": node_count.copy(), "status_delta": status_delta.copy()}
            else:
                return {"iteration": 0, "status": {},
                        "node_count": node_count.copy(), "status_delta": status_delta.copy()}

        for u in self.graph.nodes():
            if self.status[u] != 1:
                continue

            neighbors = list(self.graph.neighbors(u))  # neighbors and successors (in DiGraph) produce the same result

            # Standard threshold
            if len(neighbors) > 0:
                threshold = 1.0/len(neighbors)

                for v in neighbors:
                    if actual_status[v] == 0:
                        key = (u, v)

                        # Individual specified thresholds
                        if 'threshold' in self.params['edges']:
                            if key in self.params['edges']['threshold']:
                                threshold = self.params['edges']['threshold'][key]
                            elif (v, u) in self.params['edges']['threshold'] and not nx.is_directed(self.graph):
                                threshold = self.params['edges']['threshold'][(v, u)]

                        flip = np.random.random_sample()
                        if flip <= threshold:
                            actual_status[v] = 1

            actual_status[u] = 2

        delta, node_count, status_delta = self.status_delta(actual_status)
        self.status = actual_status
        self.actual_iteration += 1

        if node_status:
            return {"iteration": self.actual_iteration - 1, "status": delta.copy(),
                    "node_count": node_count.copy(), "status_delta": status_delta.copy()}
        else:
            return {"iteration": self.actual_iteration - 1, "status": {},
                    "node_count": node_count.copy(), "status_delta": status_delta.copy()}
Example #51
0
 def _decompress(self, focus_frame, change_frame):
     """ Decompression function that takes the compressed graph: changeFrame, and "unpacks" it
     into the focusFrame.
     """
     # Loop over the nodes in the compressed frame
     for nodes in change_frame.nodes_iter():
         change = change_frame.node[nodes].pop(compressState.tag)
         # If the change state is 'Added', add node
         if change == compressState.added:
             add_node = nodes
             if nodes in focus_frame.nodes_iter():
                 add_node = max(focus_frame.nodes())+1
                 while add_node in change_frame.node:
                     add_node += 1
                 update_map = {nodes: add_node}
                 focus_frame.add_node(add_node, change_frame.node[nodes])
                 change_frame = nx.relabel_nodes(change_frame, update_map)
             else:
                 focus_frame.add_node(add_node, change_frame.node[nodes])
         # If the change state is 'Deleted', delete node
         elif change == compressState.deleted:
             focus_frame.remove_node(nodes)
         # If the change state is 'StateChanged', update with changed state
         elif change == compressState.stateChange:
             state_name = change_frame.node[nodes][compressState.stateChangedName]
             assert (focus_frame.node[nodes][state_name] == change_frame.node[nodes][compressState.stateChangedFrom])
             focus_frame.node[nodes][state_name] = change_frame.node[nodes][compressState.stateChangedTo]
     
     processed = []
     # Loop over the edges in the compressed frame
     for edges in change_frame.edges_iter():
         # Extract start and end nodes for the edge
         start = edges[0]
         end = edges[1]
         change = change_frame.edge[start][end].pop(compressState.tag)
         
         #skip undirected edges that have already been changed
         if not nx.is_directed(focus_frame) and (end, start) in processed:
             continue
         
         # Was this edge added
         if change == compressState.added:
             focus_frame.add_edge(start, end, change_frame.edge[start][end])
         # Or was it deleted
         elif change == compressState.deleted:
             # If it was deleted check to make sure the deletion of the nodes didn't already
             # clean the edges
             if start in focus_frame.edge and end in focus_frame.edge[start]:
                 focus_frame.remove_edge(start, end)
                 # If the change state is 'StateChanged', update with changed state
         elif change == compressState.stateChange:
             state_name = change_frame.edge[start][end][compressState.stateChangedName]
             assert (focus_frame.edge[start][end][state_name] == change_frame.edge[start][end][compressState.stateChangedFrom])
             focus_frame.edge[start][end][state_name] = change_frame.edge[start][end][compressState.stateChangedTo]
         processed.append((start, end))
def disparity_filter(G, weight='weight'):
    ''' Compute significance scores (alpha) for weighted edges in G as defined in Serrano et al. 2009
        Args
            G: Weighted NetworkX graph
        Returns
            Weighted graph with a significance score (alpha) assigned to each edge
        References
            M. A. Serrano et al. (2009) Extracting the Multiscale backbone of complex weighted networks. PNAS, 106:16, pp. 6483-6488.
    '''
    
    if nx.is_directed(G): #directed case    
        N = nx.DiGraph()
        for u in G:
            
            k_out = G.out_degree(u)
            k_in = G.in_degree(u)
            
            if k_out > 1:
                sum_w_out = sum(np.absolute(G[u][v][weight]) for v in G.successors(u))
                for v in G.successors(u):
                    w = G[u][v][weight]
                    p_ij_out = float(np.absolute(w))/sum_w_out
                    alpha_ij_out = 1 - (k_out-1) * integrate.quad(lambda x: (1-x)**(k_out-2), 0, p_ij_out)[0]
                    N.add_edge(u, v, weight = w, alpha_out=float('%.4f' % alpha_ij_out))
                    
            elif k_out == 1 and G.in_degree(G.successors(u)[0]) == 1:
                #we need to keep the connection as it is the only way to maintain the connectivity of the network
                v = G.successors(u)[0]
                w = G[u][v][weight]
                N.add_edge(u, v, weight = w, alpha_out=0., alpha_in=0.)
                #there is no need to do the same for the k_in, since the link is built already from the tail
            
            if k_in > 1:
                sum_w_in = sum(np.absolute(G[v][u][weight]) for v in G.predecessors(u))
                for v in G.predecessors(u):
                    w = G[v][u][weight]
                    p_ij_in = float(np.absolute(w))/sum_w_in
                    alpha_ij_in = 1 - (k_in-1) * integrate.quad(lambda x: (1-x)**(k_in-2), 0, p_ij_in)[0]
                    N.add_edge(v, u, weight = w, alpha_in=float('%.4f' % alpha_ij_in))
        return N
    
    else: #undirected case
        B = nx.Graph()
        for u in G:
            k = len(G[u])
            if k > 1:
                sum_w = sum(np.absolute(G[u][v][weight]) for v in G[u])
                for v in G[u]:
                    w = G[u][v][weight]
                    p_ij = float(np.absolute(w))/sum_w
                    alpha_ij = 1 - (k-1) * integrate.quad(lambda x: (1-x)**(k-2), 0, p_ij)[0]
                    B.add_edge(u, v, weight = w, alpha=float('%.4f' % alpha_ij))
        return B
def high_degree_nodes_gen(k, G):

    if nx.is_directed(G):
        my_degree_function = G.out_degree
    else:
        my_degree_function = G.degree

    V = [(my_degree_function(i), i) for i in G.nodes()]
    V.sort(reverse=True)
    N = [t[1] for t in V]

    for i in range(1, min(k, len(N)) + 1):
        yield N[:i]
Example #54
0
def choose_most_inter_used_nodes(G, I, node_cnt, role, secondary_sort, seed=None):
    # select nodes with the specified role from graph G and create a list of tuples with their in-degree in graph I
    rank_node_pairs = list()
    for node in G.nodes():
        node_role = G.node[node]['role']
        if node_role == role:
            if nx.is_directed(I):
                rank = I.in_degree(node)
            else:
                rank = I.degree(node)
            rank_node_pairs.append((rank, node))

    return choose_nodes_by_rank(rank_node_pairs, node_cnt, secondary_sort, seed)
Example #55
0
def _create_auxiliary_anti_parallel_digraph(G, capacity, preflow='preflow'):
    """
    Converts the input graph to an anti parallel digraph which is
    amenable to simple implementations of the push-relabel
    algorithm variants.
    """
    if not nx.is_directed(G):
        G_copy = nx.DiGraph()
        #Transform graph to anti-parallel digraph
        for u, v in G.edges_iter():
            auxiliary_node = str(u)+'-'+str(v)
            if capacity in G.get_edge_data(u,v):
                edge_cap = G[u][v][capacity]
                G_copy.add_edge(u,v)
                G_copy.add_edge(v,auxiliary_node)
                G_copy.add_edge(auxiliary_node,u)
                G_copy[u][v][capacity] = edge_cap
                G_copy[v][auxiliary_node][capacity] = edge_cap
                G_copy[auxiliary_node][u][capacity] = edge_cap
            else:
                G_copy.add_edge(u,v)
                G_copy.add_edge(v,auxiliary_node)
                G_copy.add_edge(auxiliary_node,u)
            G_copy[u][v][preflow] = 0
            G_copy[v][auxiliary_node][preflow] = 0
            G_copy[auxiliary_node][u][preflow] = 0
    else:
        G_copy = nx.DiGraph()
        for u, v in G.edges_iter():
            if G_copy.has_edge(v,u):
                auxiliary_node = str(u)+'-'+str(v)
                if capacity in G.get_edge_data(u,v):
                    edge_cap = G[u][v][capacity]
                    G_copy.add_edge(u,auxiliary_node)
                    G_copy.add_edge(auxiliary_node,v)
                    G_copy[u][auxiliary_node][capacity] = edge_cap
                    G_copy[auxiliary_node][v][capacity] = edge_cap
                else:
                    G_copy.add_edge(u,auxiliary_node)
                    G_copy.add_edge(auxiliary_node,v)
                G_copy[u][auxiliary_node][preflow] = 0
                G_copy[auxiliary_node][v][preflow] = 0
            else:
                if capacity in G.get_edge_data(u,v):
                    G_copy.add_edge(u, v)
                    edge_cap = G[u][v][capacity]
                    G_copy[u][v][capacity] = G[u][v][capacity]
                else:
                    G_copy.add_edge(u, v)
                G_copy[u][v][preflow] = 0
    return G_copy
def compute_normalized_triad_motif_z_scores(network, num_rand_instances=10, num_rewirings=None):
    """
    Computes the normalized triad motif z-score for each connected non-isomorphic triadic subgraph
    in the input network.

    Arguments:
        network => The input network (can be directed or undirected).
        num_rand_instances => The number of randomly-rewired network instances used when computing
        z-score values.
        num_rewirings => The number of edge rewirings performed when randomizing the network.

    Returns:
        A fixed-size numpy array where each index corresponds to a predefined unique triad motif
        and where the value at each index represents the normalized z-score for the average
        over- or underexpression of a triad motif in the network.
    """

    # Determine if the network is directed or not (store to avoid recalculation).
    directed = nx.is_directed(network)

    # Count the number of occurences of each triad motif.
    original_motif_counts = count_triad_motifs(network, directed=directed)

    # Initialize an array for storing motif counts in randomized instances.
    rand_motif_counts = []

    # Iterate through random instances.
    for _ in range(num_rand_instances):

        # Randomize the network.
        rand_network = randomize(network, num_rewirings=num_rewirings)

        # Store the number of occurences of each motif in the randomized instance.
        rand_motif_counts.append(count_triad_motifs(rand_network, directed=directed))

    # Stack the counts as an array.
    rand_motif_counts = np.vstack(rand_motif_counts)

    # Divide the random motif counts by the number of instances to make them into average counts.
    avg_rand_motif_counts = np.mean(rand_motif_counts, axis=0)

    # Compute the random motif standard deviation.
    rand_motif_std_dev = np.std(rand_motif_counts, axis=0)

    # Compute the z-scores (ignoring division by 0 warnings and place the resulting NaNs/infs with 0s).
    with np.errstate(divide="ignore", invalid="ignore"):
        motif_z_scores = (original_motif_counts - avg_rand_motif_counts) / rand_motif_std_dev
        motif_z_scores[motif_z_scores == np.inf] = 0
        motif_z_scores = np.nan_to_num(motif_z_scores)

    return motif_z_scores
def attack_based_max_betweenness(G):
    """ Recalculate betweenness attack

    Basic Idea:
    ----------
    Every time we remove the node with max betweeneness-centrality, then 
    recalcuate all nodes' betweenness NOTE that here remove a node only 
    remove all the edges adjacent to it.

    Parameters:
    ----------
    G: graph (directed or undirected)

    Returns:
    -------
    tot_ND:     the number of driver nodes after every node removed
    tot_T:      the number of removed nodes
    Max_Betweenness_Zero_T: the number of removed nodes such that 
                            all nodes' betweenness centrality have been zeros
    """
    n = G.number_of_nodes()
    tot_ND = [0] * (n+1)
    tot_T = [0] * (n+1)

    ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
    tot_ND[0] = ND
    tot_T[0] = 0

    # remember when all the betweenness have been zero for all nodes
    Max_Betweenness_Zero_T = -1
    for i in range(1, n+1):
        all_betweenness = nx.betweenness_centrality(G)
        # get node with max betweenness       
        node = max(all_betweenness, key=all_betweenness.get)
        if Max_Betweenness_Zero_T == -1 and abs(all_betweenness[node] - 0.0) < 1E-8:
            Max_Betweenness_Zero_T = i
        
        # remove all the edges adjacent to node
        if not nx.is_directed(G):   # undirected graph
            for key in G[node].keys():
                G.remove_edge(node, key)
        else:   # directed graph
            for x in [v for u, v in G.out_edges_iter(node)]:
                G.remove_edge(node, x)
            for x in [u for u, v in G.in_edges_iter(node)]:
                G.remove_edge(x, node)
        # calculate driver node number ND
        ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
        tot_ND[i] = ND
        tot_T[i]  = i
    return (tot_ND, tot_T, Max_Betweenness_Zero_T)
Example #58
0
def addNode_degree(G):
    """
    Add strength, general and specific degree of nodes
    :param G: directed or undirected Graph
    :return: G
    """
    # add strength degree
    for n in G.nodes():
        G.node[n]['N'] = G.degree(n, weight='weight')
        G.node[n]['n'] = float(G.degree(n))
        if G.node[n]['N']<=0.0:
            raise TypeError('find isolated node, or negative weight:{}-{}'.format(n,G.node[n]['label']))
    #--------------
    if nx.is_directed(G):
        G_nei_iter = PF.genChain(G.successors_iter, G.predecessors_iter)
    else:
        G_nei_iter = G.neighbors_iter
    def getMaxMinStrength():
        node = G.nodes_iter().next()
        max_S = G.node[node]['N']
        min_S = G.node[node]['N']
        for n in G.nodes_iter():
            s = G.node[n]['N']
            if s > max_S:
                max_S = s
            if s < min_S:
                min_S = s
        return max_S,min_S
    def getNeiStrength(x):
        s=0
        for n in G_nei_iter(x):
            s = s + G.node[n]['N']
        return s

    maxS,minS = getMaxMinStrength()
    arrayForpercentile = [ G.node[n]['N'] for n in G.nodes() ]+[maxS]
    percdict = PF.listtopercentiles( arrayForpercentile )

    # calculate general and specific degree
    for n in G.nodes():
        strength = G.node[n]['N']
        #general degree
        G.node[n]['G_r'] = strength / ( getNeiStrength(n) + 0.1 )
        G.node[n]['G_n'] = PF.scaling( maxS + 0.1, minS - 0.1, strength)
        G.node[n]['G_p'] = percdict[strength]
        #specific degree
        G.node[n]['SP_r'] = 1.0 - G.node[n]['G_r']
        G.node[n]['SP_n'] = 1.0 - G.node[n]['G_n']
        G.node[n]['SP_p'] = 1.0 - G.node[n]['G_p']
    return G