示例#1
0
def betweenComDetect(G1):
    G = G1.copy()
    curEdge = nx.edge_betweenness(G)
    res = []
    mod1 = -1000000
    while G.number_of_edges() > 0:
        edge = Counter(curEdge).most_common()[0][0]
        G.remove_edge(*edge)
        curEdge = nx.edge_betweenness(G)
        com1 = list(nx.connected_components(G))
        mod2 = __modularity__(G1, com1)
        if mod2 > mod1:
            mod1 = mod2
            com2 = com1
    return {x: i for i in range(len(com2)) for x in com2[i]}
def dhc(Gc):
    H = Gc.copy()
    partitions = {}
    partition_graph = {}
    lvl = 1
    # adding the original graph as the first lvl
    sub_graphs = nx.connected_component_subgraphs(H)
    partitions.update({lvl: [k.nodes() for k in sub_graphs]})
    while (nx.number_connected_components(H) != H.number_of_nodes()):
        eb = nx.edge_betweenness(
            H)  # use edge betweenness to find the best edge to remove
        sorted_eb = sorted(eb.items(),
                           reverse=True,
                           key=operator.itemgetter(1))
        k, v = sorted_eb[0][0]
        ncc_before = nx.number_connected_components(H)
        H.remove_edge(k, v)
        ncc_after = nx.number_connected_components(H)
        if ncc_before == ncc_after: continue
        else:
            lvl += 1
            sub_graphs = nx.connected_component_subgraphs(H)
            partitions.update({lvl: [k.nodes() for k in sub_graphs]})
            partition_graph.update({lvl: H.copy()})

    return partition_graph, partitions
def HeadTailCommunityDetection(G, finaledgelist):
    H = nx.connected_component_subgraphs(G)
    for subgraph in H:
        result = nx.edge_betweenness(subgraph, False, None)
        edges = result.keys()
        values = result.values()
        mean = getMean(values)
        edgelist = []
        edgetemp = subgraph.edges()
        if len(edgetemp) <= 2:
            for edge in edgetemp:
                finaledgelist.append(edge)
        else:
            for index in range(len(values)):
                if values[index] <= mean:
                    edgelist.append(edges[index])
            if (
                    float(len(edgelist)) / float(len(edges))
            ) <= 0.6:  #change the head/tail division rule here, here is for tail percentage, so if the rule is 40/60, the value should be assigned 0.6 as in the code.
                for edge in edgelist:
                    finaledgelist.append(edge)
            else:
                Gsub = nx.Graph()
                for edge in edgelist:
                    Gsub.add_edge(edge[0], edge[1])
                HeadTailCommunityDetection(Gsub, finaledgelist)
示例#4
0
    def execute(self):
        iterTime = 1
        while len(self._G.edges()) != 0:
            # 找到边介数最大的边,删除边
            edge = max(nx.edge_betweenness(self._G).items(), key=lambda item: item[1])[0]
            self._G.remove_edge(edge[0], edge[1])
            components = [list(c) for c in list(nx.connected_components(self._G))]
            cur_Q = 0
            if len(components) != len(self._partition):
                cur_Q = cal_Q(components, self._G_cloned)
                if cur_Q > self._max_Q:
                    self._max_Q = cur_Q
                    self._partition = components
                    iterRound = iterTime
                    writeClu(self._G, self._partition)
            print("该轮结束的划分结果:" + str(self._partition))
            print("该轮结束时的模块度Q:" + str(cur_Q))
            print("################第" + str(iterTime) + "轮结束##################")
            # file_object = open('res/%sres.txt' % str(graphName), 'a')
            # file_object.write(str(iterTime) + '|Q:' + str(cur_Q) + '|Result:' + str(self._partition) + '\n')
            # file_object.close()
            iterTime += 1

        print(self._max_Q)
        print(self._partition)
示例#5
0
    def test_networkx_roundtrip(self):
        print("\n---------- NetworkX Data Roundtrip Test Start -----------\n")

        g = nx.newman_watts_strogatz_graph(100, 3, 0.5)
        nodes = g.nodes()
        edges = g.edges()

        # Add some attributes
        g.graph["name"] = "original"
        g.graph["density"] = nx.density(g)

        nx.set_node_attributes(g, "betweenness", nx.betweenness_centrality(g))
        nx.set_node_attributes(g, "degree", nx.degree(g))
        nx.set_node_attributes(g, "closeness", nx.closeness_centrality(g))

        nx.set_edge_attributes(g, "eb", nx.edge_betweenness(g))

        cyjs1 = util.from_networkx(g)
        g2 = util.to_networkx(cyjs1)

        self.assertEqual(len(g2.nodes()), len(nodes))
        self.assertEqual(len(g2.edges()), len(edges))

        edge_set = set(list(map(lambda x: (int(x[0]), int(x[1])), g2.edges())))
        self.assertEqual(0, len(edge_set.difference(set(edges))))

        node_original = g.node[1]
        node_generated = g2.node["1"]

        print(node_original)
        print(node_generated)

        self.assertEqual(node_original["degree"], node_generated["degree"])
        self.assertEqual(node_original["betweenness"], node_generated["betweenness"])
        self.assertEqual(node_original["closeness"], node_generated["closeness"])
示例#6
0
def build_graph():
    pair_list = TwitterUser.get_top_100_pair()
    DG = nx.DiGraph()
    DG.add_edges_from([(foer, twitter_user) for twitter_user, foer in
        pair_list])
    betweenness = nx.betweenness_centrality(DG)
    closeness = nx.closeness_centrality(DG)
    edge_betweenness = nx.edge_betweenness(DG)
    clustering_co = nx.clustering(nx.Graph(DG))
    page_rank = nx.pagerank(DG)
    for twitter_id in DG.nodes():
        t = TwitterUser.get_by_id(twitter_id)
        node = DG.node[twitter_id]
        node['user_id'] = t.user_id
        node['label'] = t.scrn_name
        node['follower_count'] = t.foer_cnt
        node['friend_count'] = t.friend_cnt
        node['status_count'] = t.status_cnt
        node['location'] = t.location
        node['verified'] = t.verified
        node['twitter_age'] = (date.today() - t.created_at).days
        node['daily_tweet'] = t.status_cnt*1.0/node['twitter_age']
        node['indegree'] = len([(id, foer) for id, foer 
            in pair_list if id == twitter_id])
        node['outdegree'] = len([(id, foer) for id, foer 
            in pair_list if foer == twitter_id])
        node['cluster'] = clustering_co[twitter_id]
        node['betweenness'] = betweenness[twitter_id]
        node['closeness'] = closeness[twitter_id]
        node['page_rank'] = page_rank[twitter_id]
    for out_n, in_n in DG.edges():
        DG[out_n][in_n]['edge_betweenness'] = edge_betweenness[(out_n,in_n)]

    return DG
示例#7
0
    def test_networkx_roundtrip(self):
        print('\n---------- NetworkX Data Roundtrip Test Start -----------\n')

        g = nx.newman_watts_strogatz_graph(100, 3, 0.5)
        nodes = g.nodes()
        edges = g.edges()

        # Add some attributes
        g.graph['name'] = 'original'
        g.graph['density'] = nx.density(g)

        nx.set_node_attributes(g, 'betweenness', nx.betweenness_centrality(g))
        nx.set_node_attributes(g, 'degree', nx.degree(g))
        nx.set_node_attributes(g, 'closeness', nx.closeness_centrality(g))

        nx.set_edge_attributes(g, 'eb', nx.edge_betweenness(g))

        cyjs1 = util.from_networkx(g)
        g2 = util.to_networkx(cyjs1)

        self.assertEqual(len(g2.nodes()), len(nodes))
        self.assertEqual(len(g2.edges()), len(edges))

        edge_set = set(list(map(lambda x: (int(x[0]), int(x[1])), g2.edges())))
        self.assertEqual(0, len(edge_set.difference(set(edges))))

        node_original = g.node[1]
        node_generated = g2.node['1']

        print(node_original)
        print(node_generated)

        self.assertEqual(node_original['degree'], node_generated['degree'])
        self.assertEqual(node_original['betweenness'], node_generated['betweenness'])
        self.assertEqual(node_original['closeness'], node_generated['closeness'])
示例#8
0
def GN_Algorithm(G, G_original):

    partition = [[n for n in G.nodes()]]
    max_q = 0.0

    while len(G.edges()) != 0:
        # calculate betweenness
        betweenness_dict = nx.edge_betweenness(G)
        betweenness_max_edge = max(betweenness_dict.items(),
                                   key=lambda item: item[1])[0]

        G.remove_edge(betweenness_max_edge[0], betweenness_max_edge[1])
        components = [list(c) for c in list(nx.connected_components(G))]

        if (len(components) != len(partition)):
            cal_q = calculate_Q(components, G_original)

            # We want to fine the biggest modularity value!
            if cal_q > max_q:
                max_q = cal_q
                partition = components

        # if do not use Q
        '''
		if len(components)==num_groups:
			partition=components
			max_q=calculate_Q(components,G_original)
			break
		'''

    return partition
示例#9
0
def girvan_newman_partition(G, partition_count, protected_edges=None):
    ranked_betweenness_edges = nx.edge_betweenness(G)
    inverse_map = make_inverse_dict(ranked_betweenness_edges)

    max_edge_key = max(ranked_betweenness_edges.values())
    max_edge_choice = random.choice(range(len(inverse_map[max_edge_key])))
    max_edge = inverse_map[max_edge_key][max_edge_choice]

    if protected_edges:
        ranks = ranked_betweenness_edges.values()
        ranks.sort()
        ranks.reverse()
        if len(ranked_betweenness_edges.keys()) > len(protected_edges):
            #imperfect, but faster than computing set differences
            max_edge = find_unprotected_edge(ranks, copy.copy(inverse_map),
                                             max_edge, protected_edges)
        else:
            candidates = list(
                set(ranked_betweenness_edges.keys()).difference(
                    set(protected_edges)))
            if len(candidates) > 0:
                while max_edge not in candidates:
                    max_edge = find_unprotected_edge(ranks,
                                                     copy.copy(inverse_map),
                                                     max_edge, protected_edges)

    G.remove_edge(max_edge[0], max_edge[1])

    if nx.number_connected_components(G) >= partition_count:
        return [max_edge]
    else:
        return [max_edge] + girvan_newman_partition(G, partition_count,
                                                    protected_edges)
def edge_centrality(net):
    values ={}
    
    bet = nx.edge_betweenness(net,normalized= True)
    flow = nx.edge_current_flow_betweenness_centrality(net,normalized= True)
    load = nx.edge_load(net)
    com = nx.communicability(net)
    bet_list =[]
    flow_list = []
    load_list = []
    com_list = []
    for edge,value in bet.iteritems() :
        origin,end = edge
        value_flow = max(flow.get(edge),flow.get((end,origin)))
        values[edge] = [value,value_flow,load.get(edge),com.get(origin).get(end)]
        bet_list.append(value)
        flow_list.append(value_flow)
        load_list.append(load.get(edge))
        com_list.append(com.get(origin).get(end))
    file3 = open("bl.csv",'w')
    for xt in [bet_list,load_list,flow_list,com_list] :
        for yt in [bet_list,load_list,flow_list,com_list] :
            corr(xt,yt,file3)
        print
        file3.write("\n")
    file3.close()
    return values
def edge_centrality(net):
    values = {}

    bet = nx.edge_betweenness(net, normalized=True)
    flow = nx.edge_current_flow_betweenness_centrality(net, normalized=True)
    load = nx.edge_load(net)
    com = nx.communicability(net)
    bet_list = []
    flow_list = []
    load_list = []
    com_list = []
    for edge, value in bet.iteritems():
        origin, end = edge
        value_flow = max(flow.get(edge), flow.get((end, origin)))
        values[edge] = [
            value, value_flow,
            load.get(edge),
            com.get(origin).get(end)
        ]
        bet_list.append(value)
        flow_list.append(value_flow)
        load_list.append(load.get(edge))
        com_list.append(com.get(origin).get(end))
    file3 = open("bl.csv", 'w')
    for xt in [bet_list, load_list, flow_list, com_list]:
        for yt in [bet_list, load_list, flow_list, com_list]:
            corr(xt, yt, file3)
        print
        file3.write("\n")
    file3.close()
    return values
示例#12
0
    def test_networkx_roundtrip(self):
        print('\n---------- NetworkX Data Roundtrip Test Start -----------\n')

        g = nx.newman_watts_strogatz_graph(100, 3, 0.5)
        nodes = g.nodes()
        edges = g.edges()

        # Add some attributes
        g.graph['name'] = 'original'
        g.graph['density'] = nx.density(g)

        nx.set_node_attributes(g, 'betweenness', nx.betweenness_centrality(g))
        nx.set_node_attributes(g, 'degree', nx.degree(g))
        nx.set_node_attributes(g, 'closeness', nx.closeness_centrality(g))

        nx.set_edge_attributes(g, 'eb', nx.edge_betweenness(g))

        cyjs1 = util.from_networkx(g)
        g2 = util.to_networkx(cyjs1)

        self.assertEqual(len(g2.nodes()), len(nodes))
        self.assertEqual(len(g2.edges()), len(edges))

        edge_set = set(list(map(lambda x: (int(x[0]), int(x[1])), g2.edges())))
        self.assertEqual(0, len(edge_set.difference(set(edges))))

        node_original = g.node[1]
        node_generated = g2.node['1']

        print(node_original)
        print(node_generated)

        self.assertEqual(node_original['degree'], node_generated['degree'])
        self.assertEqual(node_original['betweenness'], node_generated['betweenness'])
        self.assertEqual(node_original['closeness'], node_generated['closeness'])
def compute_best_community(original_g):
    max_modularity = -1
    total_nodes = nx.number_of_nodes(original_g)
    community_count = 1
    g = original_g
    communities = []

    #Generate all the communities: Loop thru taking the entire graph as 1 community to each node as a seperate community
    while community_count < total_nodes:
        betweenness = nx.edge_betweenness(g)
        max_betweenness = max(betweenness.iteritems(), key = operator.itemgetter(1))[0]
        g.remove_edge(max_betweenness[0], max_betweenness[1])
        connected_subgraphs = nx.connected_components(g)

        connected_subgraphs_list = convert_generator_list(connected_subgraphs)

        community_dict = categorize_nodes(connected_subgraphs_list)

        modularity = community.modularity(community_dict, original_g)

        if modularity > max_modularity:
            max_modularity = modularity
            communities = list(connected_subgraphs_list)
        community_count += 1

    communities = format_list(communities)

    return communities, max_modularity
示例#14
0
    def execute(self):
        iterTime = 1
        while len(self._G.edges()) != 0:
            # 找到边介数最大的边,删除边
            maxD = -float('Inf')
            edgeDic = nx.edge_betweenness(self._G)
            for edge in edgeDic:
                thisD = edgeDic.get(edge)
                if thisD > maxD:
                    maxD = thisD
                    removeE = edge
            self._G.remove_edge(removeE[0], removeE[1])
            components = [
                list(c) for c in list(nx.connected_components(self._G))
            ]
            cur_Q = 0
            if len(components) != len(self._partition):
                cur_Q = cal_Q(components, self._G_cloned)
                if cur_Q > self._max_Q:
                    self._max_Q = cur_Q
                    self._partition = components
                    iterRound = iterTime
                    writeClu(self._G, self._partition)
            print("该轮结束的划分结果:" + str(self._partition))
            print("该轮结束时的模块度Q:" + str(cur_Q))
            print("################第" + str(iterTime) +
                  "轮结束##################")

            iterTime += 1

        print("最大Q值出现在:第" + str(iterRound) + "轮。最大Q值为:" + str(self._max_Q))
        for clu in self._partition:
            print(sorted(clu))
def compute_best_community(original_g):
    max_modularity = -1
    total_nodes = nx.number_of_nodes(original_g)
    community_count = 1
    g = original_g
    communities = []

    # Generate all the communities: Loop thru taking the entire graph as 1 community to each node as a seperate community
    while community_count < total_nodes:
        betweenness = nx.edge_betweenness(g)
        max_betweenness = max(betweenness.iteritems(), key=operator.itemgetter(1))[0]
        g.remove_edge(max_betweenness[0], max_betweenness[1])
        connected_subgraphs = nx.connected_components(g)

        connected_subgraphs_list = convert_generator_list(connected_subgraphs)

        community_dict = categorize_nodes(connected_subgraphs_list)

        modularity = community.modularity(community_dict, original_g)

        if modularity > max_modularity:
            max_modularity = modularity
            communities = list(connected_subgraphs_list)
        community_count += 1

    communities = format_list(communities)

    return communities, max_modularity
示例#16
0
 def getEdgeBetweennessList(self):
     edgelist = []
     edgeBetwness = NX.edge_betweenness(self)
     # sort the edges based on betweeness centrality in reverse order
     # (highest first)
     edgelist = [(edge[0], edge[1], betwcen)
                 for edge, betwcen in list(edgeBetwness.items())]
     return(edgelist)
示例#17
0
 def getEdgeBetweennessList(self):
     edgelist = []
     edgeBetwness = NX.edge_betweenness(self)
     # sort the edges based on betweeness centrality in reverse order
     # (highest first)
     edgelist = [(edge[0], edge[1], betwcen)
                 for edge, betwcen in edgeBetwness.items()]
     return(edgelist)
示例#18
0
def getresults(graph):
    print("Analisando grafo...")
    print("Média do grau dos nodos: " + str(nx.average_degree_connectivity(graph)))
    print("Coeficiente de clusterização: " + str(nx.average_clustering(graph)))
    for g in nx.connected_component_subgraphs(graph):
        print("Distância média dos nós: " + str(nx.average_shortest_path_length(g)))
    print("Betweenness das arestas: " + str(nx.edge_betweenness(graph)))
    print("Betweenness dos nodos: " + str(nx.betweenness_centrality(graph)))
def betweeness_calculation(graph):
    betweeness = nx.edge_betweenness(graph,
                                     k=None,
                                     normalized=False,
                                     weight=None,
                                     seed=None)
    graph.remove_edge(
        *(max(betweeness.iteritems(), key=operator.itemgetter(1))[0]))
    return graph
示例#20
0
def get_max_bt_edge(g):
    bt = networkx.edge_betweenness(g)  # 获得边的betweenness
    a = 0.0
    b = (0, 0)
    for i1 in bt.items():
        if i1[1] >= a:
            a = i1[1]
            b = i1[0]
    return b
def betweeness_calculation(graph):
    betweeness = nx.edge_betweenness(graph, k=None, normalized=False, weight=None, seed=None)
    #print betweeness
    maxval = max(betweeness.iteritems(), key=operator.itemgetter(1))[1]
    keys = [k for k,v in betweeness.items() if v==maxval]
    #print keys
    for edge in keys:
        graph.remove_edge(*edge)
    #print max(betweeness.iteritems(), key=operator.itemgetter(1))[0]
    return graph
示例#22
0
 def get_pre_recall():
     while len(self._G1.edges()) != 0:
         edge = max(nx.edge_betweenness(self._G1).items(), key=lambda item: item[1])[0]
         self._G1.remove_edge(edge[0], edge[1])
         components = [list(c) for c in list(nx.connected_components(self._G1))]
         if len(components) != len(self._partition):
             cur_Q = cal_Q(components, self._G1)
             if cur_Q > self._max_Q:
                 self._max_Q = cur_Q
                 self._partition = components
示例#23
0
def is_community(component):
    if len(component.nodes())<6:
        return True
    else:
        max = 0
        for edge,value in nx.edge_betweenness(component,normalized=False).iteritems():
            if value > max:
                max = value
        if max <= len(component.nodes())-1:
            return True
        else:
            return False
示例#24
0
    def _getBetweenCentralityGraph(self):
        '''
        returns a graph made of same nodes and edges as 'self' but weights of edges
        are replaced by 'edge betweenness centrality'
        '''
        centralityGraph = self.__class__()
        edgeBetwness = NX.edge_betweenness(self)
        for edge, betwnness in edgeBetwness.items():
            u = edge[0]
            v = edge[1]
            centralityGraph.add_edge(u, v, betwnness)

        return(centralityGraph)
示例#25
0
def girvan_newman(G):
    ranked_betweenness_edges = nx.edge_betweenness(G)
    inverse_map = make_inverse_dict(ranked_betweenness_edges)

    max_edge_key = max(ranked_betweenness_edges.values())
    max_edge_choice = random.choice(range(len(inverse_map[max_edge_key])))
    max_edge = inverse_map[max_edge_key][max_edge_choice]
    G.remove_edge(max_edge[0], max_edge[1])

    if len(G.edges()) <= 2:
        return [max_edge] + G.edges()
    else:
        return [max_edge] + girvan_newman(G)
示例#26
0
    def _getBetweenCentralityGraph(self):
        '''
        returns a graph made of same nodes and edges as 'self' but weights of edges
        are replaced by 'edge betweenness centrality'
        '''
        centralityGraph = self.__class__()
        edgeBetwness = NX.edge_betweenness(self)
        for edge, betwnness in list(edgeBetwness.items()):
            u = edge[0]
            v = edge[1]
            centralityGraph.add_edge(u, v, betwnness)

        return(centralityGraph)
示例#27
0
 def Kernighan_Lin(self):
     edges = list(self.G.edges.data())
     A_nodes = list(self.G.nodes)
     B_nodes = []
     if len(A_nodes) % 2 != 0:
         A_nodes.append("")
     for i in range(int(len(A_nodes) / 2)):
         B_nodes.append(A_nodes.pop())
     print(A_nodes)
     print(B_nodes)
     print(edges)
     print(edge_betweenness_centrality(self.G, True))
     print(edge_betweenness(self.G, True))
示例#28
0
 def execute(self):
     while len(self._G.edges()) != 0:
         edge = max(nx.edge_betweenness(self._G).items(),key=lambda item:item[1])[0]
         self._G.remove_edge(edge[0], edge[1])
         components = list(nx.connected_components(self._G))
         if len(components) != len(self._partition):
             cur_Q = cal_Q(components, self._G_cloned)
             if cur_Q > self._max_Q:
                 self._max_Q = cur_Q
                 self._partition = components
     print self._max_Q
     print self._partition
     return self._partition
示例#29
0
    def test_networkx_digraph_edge_attr(self):
        print('\n---------- Digraph Edge Att Test Start -----------\n')
        g = nx.DiGraph()
        g.add_path([0, 1, 2, 3, 4])
        eb = nx.edge_betweenness(g)
        nx.set_edge_attributes(g, 'eb', eb)
        cyjs = util.from_networkx(g)

        print(json.dumps(cyjs, indent=4))

        # There is only one edge, so this should be OK...
        edge = cyjs['elements']['edges'][0]
        self.assertEqual(3, len(edge['data']))
示例#30
0
    def test_networkx_digraph_edge_attr(self):
        print('\n---------- Digraph Edge Att Test Start -----------\n')
        g = nx.DiGraph()
        g.add_path([0, 1, 2, 3, 4])
        eb = nx.edge_betweenness(g)
        nx.set_edge_attributes(g, 'eb', eb)
        cyjs = util.from_networkx(g)

        print(json.dumps(cyjs, indent=4))

        # There is only one edge, so this should be OK...
        edge = cyjs['elements']['edges'][0]
        self.assertEqual(3, len(edge['data']))
def betweeness_calculation(graph):
    betweeness = nx.edge_betweenness(graph,
                                     k=None,
                                     normalized=False,
                                     weight=None,
                                     seed=None)
    #print betweeness
    maxval = max(betweeness.iteritems(), key=operator.itemgetter(1))[1]
    keys = [k for k, v in betweeness.items() if v == maxval]
    #print keys
    for edge in keys:
        graph.remove_edge(*edge)
    #print max(betweeness.iteritems(), key=operator.itemgetter(1))[0]
    return graph
示例#32
0
 def execute(self):
     while len(self._G.edges()) != 0:
         edge = max(nx.edge_betweenness(self._G).items(),
                    key=lambda item: item[1])[0]
         self._G.remove_edge(edge[0], edge[1])
         components = list(nx.connected_components(self._G))
         if len(components) != len(self._partition):
             cur_Q = cal_Q(components, self._G_cloned)
             if cur_Q > self._max_Q:
                 self._max_Q = cur_Q
                 self._partition = components
     print self._max_Q
     print self._partition
     return self._partition
示例#33
0
 def execute(self):
     while len(self._G.edges()) != 0:
         edge = max(nx.edge_betweenness(self._G).items(),
                    key=lambda item: item[1])[0]
         self._G.remove_edge(edge[0], edge[1])
         components = [
             list(c) for c in list(nx.connected_components(self._G))
         ]
         if len(components) != len(self._partition):
             cur_Q = modularity(self._G_cloned, components)
             if cur_Q > self._max_Q:
                 self._max_Q = cur_Q
                 self._partition = components
     print(self._max_Q)
     print(self._partition)
     return self._partition
	def execute(self):
		while len(self._G.edges()) > 0:
			# 1.计算所有边的edge betweenness
			edge = max(nx.edge_betweenness(self._G).items(), 
				key = lambda item:item[1])[0]
			# 2.移去edge betweenness最大的边
			self._G.remove_edge(edge[0], edge[1])
			# 获得移去边后的子连通图
			components = [list(c) for c in list(nx.connected_components(self._G))]
			if len(components) != len(self._partition):
				# 3.计算Q值
				cur_Q = cal_Q(components, self._G_cloned)
				if cur_Q > self._max_Q:
					self._max_Q = cur_Q
					self._partition = components
		print("max Q:", self._max_Q)
		return self._partition
示例#35
0
    def get_recall(f, G1,G2,pre, recall):
        G_cloned = G1.copy()
        G_tmp = G2.copy()
        partition = [[n for n in G.nodes()]]
        max_Q = 0.0
        max_Q = round()
        while len(G_tmp.edges()) != 0:
            edge = max(nx.edge_betweenness(G_tmp).items(),key=lambda item:item[1])[0]
            G_tmp.remove_edge(edge[0], edge[1])
            components = [list(c) for c in list(nx.connected_components(G_tmp))]
            if len(components) != len(partition):
                components_tmp = list2dict(components)
                cur_Q = community.modularity(components_tmp, G_cloned, weight='weight')
                if cur_Q > max_Q:
                    max_Q = cur_Q
                    partition = components

            G = nx.MultiGraph()
            head = f.readline().split()
            line = f.readline().split()
            mapdict = dict()
            pre=0
            recall=0
            TP_FP= 0
            TP = 0
            TP_FN = 0
            while line:
                NS(mapdict, line[2], line[0])
                line = f.readline().split()
            # 统计社区匹配用户IP的数量,进行跨社交网络社区匹配
            for IP in mapdict.keys():
                nodes = list(mapdict[IP].keys())
                for i in range(len(nodes)):
                    for j in range(i + 1, len(nodes)):
                        num = G.number_of_edges(nodes[i], nodes[j])
                        G.add_edge(nodes[i], nodes[j])
                        G[nodes[i]][nodes[j]][num]["share_IP"] = IP
                        G[nodes[i]][nodes[j]][num]["coun"] = min(mapdict[IP][str(nodes[i])], mapdict[IP][str(nodes[j])])
                        TP = TP + 1
                        TP_FN = TP_FN + 1
                        pre = TP / TP_FP
                        recall = TP / TP_FN
示例#36
0
def computeSubgraphClusters(G,start=0,divisions=2):
    edged_subgraphs = len(nx.connected_component_subgraphs(G))
    while edged_subgraphs < divisions:
        for pair in reversed(sorted(nx.edge_betweenness(G).iteritems(), key=itemgetter(1))[-1:]):
            G.remove_edge(pair[0][0], pair[0][1])
        new_subgraphs = nx.connected_component_subgraphs(G)
        proposed_subgraphs = []
        for subgraph in new_subgraphs:
            if len(subgraph.nodes()) > 1:
                proposed_subgraphs.append(subgraph)
            else:
                break
        #if we start losing clusters, stop
        #no guarantee we haven't hit a local maximum
        if len(proposed_subgraphs) < edged_subgraphs:
            break
        subgraphs = proposed_subgraphs
        edged_subgraphs = len(subgraphs)   
        print edged_subgraphs

    for index, subgraph in enumerate(subgraphs):
        s0 = nx.Graph()
        s0.add_edges_from(subgraph.edges(data=True))
        print
        print subgraph.nodes(data=True)
#        attrs = ["fillcolor", "style", "shape", "fontsize", "fontname", "fontcolor"]
#        for node in subgraph.nodes(data=True):
#            node_attr = {}
#            for attr in attrs:
#                node_attr[attr] = node[1][attr]
#
#            s0.node[node[0]] = node_attr

        #print s0.nodes(data=True)
        noteSubgraph = nx.to_agraph(s0)
        #print subgraph
        #print noteSubgraph.node_attr
        #noteSubgraph.graph_attr.update(bgcolor="#ffffff", overlap="scale", splines="true", size="10.5,8", dpi="96", maxiter="2400") #noteSubgraph.node_attr.update(fontsize="15", color="#666666", style="filled", fillcolor=colors[data[0]], fontcolor="white", fontname="Tuffy", shape="box")
        #noteSubgraph.layout(prog="neato")
        #noteSubgraph.draw("noteSubgraph%d.png" % (index + 1 + start), format="png")

    return G,subgraphs
示例#37
0
def test(G_orig, number_of_clusters, clusters):
    G = deepcopy(G_orig)
    start = time.time()
    pos = nx.spring_layout(G)
    # print(len(G.edges))
    number_of_current_clusters = nx.number_connected_components(G)
    plt.figure()
    nx.draw(G_orig, pos=pos, node_color=clusters)
    plt.title('Correct labels')
    plt.show()
    k = len(G.nodes)
    if len(G.edges) > 1000:
        k = round(len(G.nodes) * 0.1)
    while number_of_current_clusters < number_of_clusters:
        # if len(G.edges) % 10 == 0:
        # print(len(G.edges))
        # print(number_of_current_clusters)
        #plt.figure()
        #nx.draw(G, pos=pos)
        #plt.show()
        edge_centrality = nx.edge_betweenness(G, k)
        u, v = max(edge_centrality, key=lambda key: edge_centrality[key])
        G.remove_edge(u, v)
        number_of_current_clusters = nx.number_connected_components(G)
    # print(len(G.edges))
    # print(number_of_current_clusters)

    res = [c for c in nx.connected_components(G)]
    our_clusters = []
    for node in G.nodes:
        for i in range(len(res)):
            if node in res[i]:
                our_clusters.append(i)
                break
    # print(our_clusters)
    plt.cla()
    plt.title('Clustering result')
    nx.draw(G_orig, pos=pos, node_color=our_clusters)
    plt.show()
    end = time.time()
    # print(end - start)
    return G
示例#38
0
def custom_remove_edge(component):
    #remove edge
    removed_edge = None
    #check if broken
    max = 0
    for current_edge, edge_betweenness_value in nx.edge_betweenness(component).iteritems():
        if edge_betweenness_value > max:
            removed_edge = current_edge
            max  = edge_betweenness_value
    #Find edge with this value, remove it
    component.remove_edge(removed_edge[0],removed_edge[1])
    if removed_edge is None:
        raise ValueError('Removed edge should not be None')
    count = 0
    for component in list(nx.connected_component_subgraphs(component)):
        count += 1
    if count >= 2:
        return True,removed_edge
    else:
        return False,removed_edge
def RecalculatedEdgeBetweennessAttack(G, remove_fraction = 1.0):
    """ Recalculated Edge Betweenness Attack
    """
    n = G.number_of_nodes()
    m = int(G.number_of_edges() * (remove_fraction + 0.0))

    tot_ND = [0] * (m + 1)
    tot_T = [0] * (m + 1)
    ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
    tot_ND[0] = ND
    tot_T[0] = 0

    for i in range(m):
        all_edgeBetweenness = nx.edge_betweenness(G)
        (u, v) = max(all_edgeBetweenness, key = all_edgeBetweenness.get)
        G.remove_edge(u, v)
        ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
        tot_ND[i+1] = ND
        tot_T [i+1] = i + 1
        
    return (tot_ND, tot_T) 
def InitialEdgeBetweennessAttack(G, remove_fraction = 1.0):
    """ Initial Edge Betweenness Attack
    """
    n = G.number_of_nodes()
    m = int(G.number_of_edges() * (remove_fraction + 0.0))

    tot_ND = [0] * (m + 1)
    tot_T = [0] * (m + 1)
    ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
    tot_ND[0] = ND
    tot_T[0] = 0

    all_edgeBetweenness = nx.edge_betweenness(G)
    sorted_betweenness = sorted(all_edgeBetweenness.items(), key = operator.itemgetter(1), reverse=True)
    for i in range(m):
        (u, v), b = sorted_betweenness[i]
        G.remove_edge(u, v)
        ND, ND_lambda = ECT.get_number_of_driver_nodes(G)
        tot_ND[i+1] = ND
        tot_T[i+1] = i + 1

    return (tot_ND, tot_T)
示例#41
0
def dhc(Gc):
    H = Gc.copy()
    partitions = {}
    partition_graph = {}
    lvl = 1
    # adding the original graph as the first lvl
    sub_graphs = nx.connected_component_subgraphs(H)
    partitions.update({lvl:[k.nodes() for k in sub_graphs]})
    while(nx.number_connected_components(H)!=H.number_of_nodes()):
        eb = nx.edge_betweenness(H) # use edge betweenness to find the best edge to remove
        sorted_eb = sorted(eb.items(), reverse=True, key=operator.itemgetter(1))
        k,v = sorted_eb[0][0]
        ncc_before = nx.number_connected_components(H)
        H.remove_edge(k,v)
        ncc_after = nx.number_connected_components(H)
        if ncc_before == ncc_after: continue
        else:
            lvl +=1
            sub_graphs = nx.connected_component_subgraphs(H)
            partitions.update({lvl:[k.nodes() for k in sub_graphs]})
            partition_graph.update({lvl:H.copy()})

    return partition_graph, partitions
示例#42
0
def HeadTailCommunityDetection(G,finaledgelist):
    H=nx.connected_component_subgraphs(G)
    for subgraph in H:
        result=nx.edge_betweenness(subgraph, False, None)
        edges=result.keys()
        values=result.values()
        mean = getMean(values)
        edgelist=[]
        edgetemp=subgraph.edges();
        if len(edgetemp)<=2:
            for edge in edgetemp:
                finaledgelist.append(edge)
        else:
            for index in range(len(values)):
                    if values[index] <= mean:
                        edgelist.append(edges[index])
            if  (float(len(edgelist))/float(len(edges)))<=0.6: #change the head/tail division rule here, here is for tail percentage, so if the rule is 40/60, the value should be assigned 0.6 as in the code.
                for edge in edgelist:
                    finaledgelist.append(edge)
            else:
                Gsub= nx.Graph()
                for edge in edgelist:
                    Gsub.add_edge(edge[0],edge[1])
                HeadTailCommunityDetection(Gsub,finaledgelist)
示例#43
0
def __HeadTailCommunityDetection(G, finaledgelist, head_tail_ratio=0.6):

    H = nx.connected_components(G)

    for s in H:
        subgraph = nx.subgraph(G, s)
        result = nx.edge_betweenness(subgraph, normalized=False)
        edges = list(result.keys())
        values = list(result.values())
        mean = np.mean(values)
        edgelist = []
        edgetemp = subgraph.edges()
        if len(edgetemp) <= 2:
            for edge in edgetemp:
                finaledgelist.append(edge)
        else:
            for index in range(len(values)):
                if values[index] <= mean:
                    edgelist.append(edges[index])

            if float(len(edgelist)) / float(
                    len(edges)
            ) <= head_tail_ratio:  # change the head/tail division rule here, here is for tail percentage,
                # so if the rule is 40/60, the value should be assigned 0.6 as in the code.
                for edge in edgelist:
                    finaledgelist.append(edge)
            else:
                Gsub = nx.Graph()
                for edge in edgelist:
                    Gsub.add_edge(edge[0], edge[1])
                try:
                    __HeadTailCommunityDetection(Gsub, finaledgelist,
                                                 head_tail_ratio)
                except:
                    pass
    return finaledgelist
示例#44
0
def GN(G):
    G_cloned = G.copy()
    G_tmp = G.copy()
    partition = [[n for n in G.nodes()]]
    max_Q = 0.0

    while len(G_tmp.edges()) != 0:
        edge = max(nx.edge_betweenness(G_tmp).items(),
                   key=lambda item: item[1])[0]
        G_tmp.remove_edge(edge[0], edge[1])
        components = [list(c) for c in list(nx.connected_components(G_tmp))]
        #        print( len(G_tmp.edges()))
        if len(components) != len(partition):
            components_tmp = list2dict(components)
            cur_Q = community.modularity(components_tmp,
                                         G_cloned,
                                         weight='weight')
            #            print(cur_Q)
            if cur_Q > max_Q:
                max_Q = cur_Q
                partition = components
    print("max_Q = ", max_Q)
    #    print ("partitions are: ", partition)
    return partition
    #        writer.writerows(zip(T, ND))

    #    ND4, T4 = InitialEdgeDegreeAttack(G4)
    #    with open("results/test4.csv", "w") as f:
    #        writer = csv.writer(f, delimiter=',')
    #        writer.writerows(zip(T, ND))
        
    #    ND5, T5 = RecalculatedEdgeDegreeAttack(G5)
    #    with open("results/test5.csv", "w") as f:
    #        writer = csv.writer(f, delimiter=',')
    #        writer.writerows(zip(T, ND))

    G = nx.Graph()
    G.add_edges_from([(0, 1), (0, 2), (2, 3)])
    d = G.degree(G.nodes())
    b = nx.edge_betweenness(G)
    print d
    print b

    d = {}
    for u, v in G.edges():
        edge_degree = G.degree(u) * G.degree(v)
        d[(u, v)] = edge_degree
    print 'initial degrees:', d

    sorted_degrees = sorted(d.items(), key = operator.itemgetter(1), reverse=True)
    print 'sorted_degrees:', sorted_degrees

    G1 = nx.barabasi_albert_graph(200, 3)
    print 'edge num:', G1.number_of_edges()
def edgebet(net):
    return distri(nx.edge_betweenness(net, normalized = True).values(),'betweenness')
def construct_ccig(sentences, concepts, title=None, use_cd=True, betweenness_threshold_coef=1.0, max_c_size=10,
                   min_c_size=3, IDF=None):
    """
     Given a segmented text and a list of concepts,
     construct concept community interaction graph.
     :param sentences: a list of sentences.
     :param concepts: a list of concepts.
     :return: a concept community interaction graph.
     """
    g = nx.Graph()

    concepts = list(set(concepts))
    concepts = remove_values_from_list(concepts, EMPTY_VERTEX_NAME)

    if len(sentences) == 0 or len(concepts) == 0:
        print("No concept in concepts list.")
        return None
    if len(concepts) > 70:
        print("Too many concepts.")
        return None

    # get concept communities
    if use_cd:
        concept_communities = get_concept_communities(sentences, concepts, betweenness_threshold_coef, max_c_size,
                                                      min_c_size)

    else:
        concept_communities = [[c] for c in concepts]

    if use_cd:
        cname_sentidxs = assign_sentences_to_concept_communities(
            sentences, concept_communities, IDF)
    else:
        cname_sentidxs = assign_sentences_to_concepts(sentences, concepts)

    # initialize vertex properties
    concept_vertexidxs_map = {}

    for c in concepts:
        concept_vertexidxs_map[c] = []

    g.add_node(0, name=EMPTY_VERTEX_NAME, concepts=[], sentidxs=cname_sentidxs[EMPTY_VERTEX_NAME])
    # g.add_node(0)
    # g.node[0]['name'] = EMPTY_VERTEX_NAME
    # g.node[0]['concepts'] = []
    # g.node[0]['sentidxs'] = cname_sentidxs[EMPTY_VERTEX_NAME]

    # print(g.node[0])
    i = 1

    for community in concept_communities:
        cname = community2name(community)

        if len(cname_sentidxs[cname]) == 0:
            continue

        g.add_node(i, name=cname, concepts=community, sentidxs=cname_sentidxs[cname])

        for concept in community:
            concept_vertexidxs_map[concept].append(i)
        i = i + 1

    # edges by connective entences
    # dic
    eprop_name = {}
    eprop_concepts = {}
    eprop_sentidxs = {}
    eprop_weight_numsent = {}
    eprop_weight_tfidf = {}

    for sent_idx in range(len(sentences)):
        sent = sentences[sent_idx]
        words = str(sent).split()
        intersect = set(words).intersection(set(concepts))

        if len(intersect) == 0:
            continue

        related_vertexidxs = []

        for c in intersect:
            related_vertexidxs.extend(concept_vertexidxs_map[c])
        related_vertexidxs = list(set(related_vertexidxs))

        # print("related_vertex_idx:")
        # print(related_vertexidxs)

        num_related_v = len(related_vertexidxs)

        if num_related_v < 2:
            continue

        for j in range(num_related_v):
            v1_idx = related_vertexidxs[j]
            for k in range(j, num_related_v):
                if j == k:
                    continue
                v2_idx = related_vertexidxs[k]

                source_idx = min(v1_idx, v2_idx)
                target_idx = max(v1_idx, v2_idx)

                e = (source_idx, target_idx)
                if not g.has_edge(source_idx, target_idx):
                    # g.add_edge(source_idx, target_idx)

                    eprop_sentidxs[e] = [sent_idx]
                    eprop_concepts[e] = list(intersect)

                    g.add_edge(source_idx, target_idx)

                    # g.add_edges_from([(source_idx, target_idx, dict(sentidxs=eprop_sentidxs[e])),
                    #                   (source_idx, target_idx, dict(concepts=eprop_concepts[e]))])

                else:
                    old_idxs = list(eprop_sentidxs[e])
                    old_idxs.append(sent_idx)
                    eprop_sentidxs[e] = old_idxs

                    old_concepts = list(eprop_concepts[e])
                    old_concepts.extend(intersect)
                    eprop_concepts[e] = list(set(old_concepts))

                g[source_idx][target_idx]['sentidxs'] = eprop_sentidxs[e]
                g[source_idx][target_idx]['concepts'] = eprop_concepts[e]

    # assign vertex names and weights
    for e in g.edges():
        eprop_name[e] = " ".join(eprop_concepts[e])
        eprop_weight_numsent[e] = float(len(eprop_sentidxs[e]))
        eprop_weight_tfidf[e] = 0.0

        g[e[0]][e[1]]['weight_numsent'] = eprop_weight_numsent[e]
        g[e[0]][e[1]]['weight_tfidf'] = eprop_weight_tfidf[e]

    # edges by node text similarity
    WEIGHT_THRESHOLD = 0.001  # NOTICE: smaller threshold leads to more edges

    numv = g.number_of_nodes()

    for i in range(numv):
        for j in range(i, numv):
            if j == i:
                continue
            v1 = g.node[i]
            v2 = g.node[j]
            idxs1 = list(set(v1['sentidxs']))
            idxs2 = list(set(v2['sentidxs']))

            text1 = [sentences[s] for s in idxs1]
            text1 = " ".join(text1)
            text2 = [sentences[s] for s in idxs2]
            text2 = " ".join(text2)

            w = tfidf_cos_sim(text1, text2, IDF)

            if w >= WEIGHT_THRESHOLD:
                e = (i, j)
                if not g.has_edge(i, j):
                    eprop_sentidxs[e] = []
                    eprop_concepts[e] = []
                    eprop_weight_numsent[e] = 0.0
                    eprop_name[e] = ""
                    g.add_edges_from([
                        (i, j, dict(sentidxs=eprop_sentidxs[e])),
                        (i, j, dict(concepts=eprop_concepts[e])),
                        (i, j, dict(weight_numsent=eprop_weight_numsent[e])),
                        (i, j, dict(weight_name=eprop_name[e]))
                    ])
                eprop_weight_tfidf[e] = w
                g[i][j]['weight_tfidf'] = eprop_weight_tfidf[e]
    if title is not None:
        g.add_nodes_from('TITLE', name=TITLE_VERTEX_NAME, sentidxs=[], concepts=[])

    #g.add_nodes_from('T', name=TITLE_VERTEX_NAME, sentidxs=[], concepts=[])
    # calculate vertex scores
    pr = nx.pagerank(g, weight='weight_tfidf')
    bt = nx.betweenness_centrality(g, weight='weight_tfidf')
    #print(bt)
    try:
        katz = nx.katz_centrality(g, weight='weight_tfidf')
    except:
        katz = [0.0 for i in range(numv)]
    #numv = len(pr)
    for i in g.nodes():
        #print(i)
        g.node[i]['pagerank'] = pr[i]
        g.node[i]['betweenness'] = bt[i]
        g.node[i]['katz'] = katz[i]

    ebt = nx.edge_betweenness(g, weight='weight_tfidf')
    #print(ebt)
    #print(g.nodes())
    for i in range(len(g.nodes())):
        for j in range(i, len(g.nodes())):
            if j == i:
                continue
            if g.has_edge(i, j):
                g[i][j]['betweenness'] = ebt[(i, j)]

    return g
def get_betweenness(graph):
    return nx.edge_betweenness(graph)
 def test_edge_betweenness(self):
     hg_bc = nx.edge_betweenness(self.hg)
     g_bc = nx.edge_betweenness(self.g)
     self.assertEqual(hg_bc, g_bc)
def betweeness_calculation(graph):
    betweeness = nx.edge_betweenness(graph, k=None, normalized=False, weight=None, seed=None)
    graph.remove_edge(*(max(betweeness.iteritems(), key=operator.itemgetter(1))[0]))
    return graph
def edgebet(net):
    return distri(
        nx.edge_betweenness(net, normalized=True).values(), 'betweenness')
示例#52
0
code = set()
net_file.readline()
for line in net_file.readlines():

    flight = line.split(",")
    name1 = flight[1].split("\"")
    name2 = flight[2].split("\"")
    if float(flight[0]) > 400:
        G.add_edge(name1[1], name2[1], weight=float(flight[0]) / 365)
        G_un.add_edge(name1[1], name2[1])
        if name1[1] not in code:
            code.add(name1[1])
        if name2[1] not in code:
            code.add(name2[1])

between = net.edge_betweenness(G_un)
be = net.edge_betweenness(G)
jaccard = net.jaccard_coefficient(G)
locals = dict()
weight = dict()
for city in code:
    weight[city] = 0
for s, e in G.edges():
    weight[e] += G.get_edge_data(s, e)['weight']
    weight[s] += G.get_edge_data(s, e)['weight']

jacc = dict()
for u, v, j in jaccard:
    jacc[(u, v)] = j
print(G.degree())
degree_diff = dict()