Exemplo n.º 1
0
def calc_graph_measures(data_matrix, thresh=0):
    from networkx import eccentricity
    from networkx.algorithms.efficiency import global_efficiency
    from networkx.algorithms.shortest_paths.generic import average_shortest_path_length
    from networkx.algorithms.centrality import betweenness_centrality
    from networkx.algorithms.cluster import average_clustering
    from networkx.algorithms.community.modularity_max import greedy_modularity_communities
    from networkx.algorithms.community.quality import performance

    def _avg_values(results):
        values = []
        if isinstance(results, dict):
            for k in results:
                values.append(results[k])
        elif isinstance(results, list):
            for tup in results:
                values.append(tup[1])

        return np.mean(values)

    below_thresh_indices = np.abs(data_matrix) < thresh
    data_matrix[below_thresh_indices] = 0
    if isinstance(data_matrix, np.ndarray):
        graph = networkx.convert_matrix.from_numpy_matrix(np.real(data_matrix))
    if isinstance(data_matrix, pd.DataFrame):
        graph = networkx.convert_matrix.from_pandas_adjacency(data_matrix)

    degree = list(graph.degree)
    global_eff = global_efficiency(graph)
    b_central = betweenness_centrality(graph)
    modularity = performance(graph, greedy_modularity_communities(graph))
    try:
        ecc = eccentricity(graph)
    except networkx.exception.NetworkXError:
        ecc = [(0, 0)]

    try:
        clust = average_clustering(graph)
    except networkx.exception.NetworkXError:
        clust = 0

    try:
        char_path = average_shortest_path_length(graph)
    except networkx.exception.NetworkXError:
        char_path = 0

    graph_dict = {'degree': _avg_values(degree),
                  'eccentricity': _avg_values(ecc),
                  'global_efficiency': global_eff,
                  'characteristic_path_length': char_path,
                  'betweenness_centrality': _avg_values(b_central),
                  'clustering_coefficient': clust,
                  'modularity': modularity}

    return graph_dict
Exemplo n.º 2
0
def shanghai_highest_bc_attack_efficiency():
    result = list()
    for i in range(0, 100):
        fraction = i * d
        print("Highest BC attack with fraction %.3f" % fraction)
        g = shanghai_highest_bc_attack(fraction)
        result.append({
            'fraction': "%.3f" % fraction,
            'efficiency': global_efficiency(g)
        })
    with open(os.path.join(base_dir, 'shanghai_attack/highest_bc_attack.csv'),
              'a') as f:
        w = csv.DictWriter(f, ['fraction', 'efficiency'])
        w.writeheader()
        w.writerows(result)
Exemplo n.º 3
0
def largest_degree_attack_efficiency():
    result = list()
    for i in range(0, 100):
        fraction = i * d
        print("Largest degree attack with fraction %.3f" % fraction)
        g = largest_degree_attack(fraction)
        result.append({
            'fraction': "%.3f" % fraction,
            'efficiency': global_efficiency(g)
        })
    with open(os.path.join(base_dir, 'attack/largest_degree_attack.csv'),
              'a') as f:
        w = csv.DictWriter(f, ['fraction', 'efficiency'])
        w.writeheader()
        w.writerows(result)
Exemplo n.º 4
0
def shanghai_worker(id, q, lock, result):
    while True:
        try:
            fraction = q.get(block=False)
            print(">>> Worker {}:".format(id))
            print("Random attack with fraction: %.3f" % fraction)
            # Do 100 times and get average num
            e = 0
            for j in range(0, 100):
                print(">>>Worker {} No {} attack.".format(id, j))
                g = shanghai_random_attack(fraction)
                e += global_efficiency(g)
            e = e / 100
            print(">>> Worker {}:".format(id))
            print("Get e: %f" % e)
            with lock:
                result.put({'fraction': "%.3f" % fraction, 'efficiency': e})
        except Empty:
            return
def get_result(attack, protect):
    result = list()
    attack_name = attack.__name__.replace('_attack_list', '')
    protect_name = protect.__name__.replace('_attack_list', '')
    for i in range(0, 25):
        print('{} attack with {} protect:'.format(attack_name, protect_name))
        attack_fraction = d * i
        print('attack fraction: %.2f' % attack_fraction)
        j = 0
        g = get_shanghai_subway_graph()
        attack_list = attack(g, fraction=attack_fraction)
        protect_fraction = d * j
        print('\t protect fraction: %.2f' % protect_fraction)
        protect_list = protect(g, fraction=protect_fraction)
        for node in attack_list:
            if node not in protect_list:
                g.remove_node(node)
        result.append({
            'attack_fraction': "%.2f" % attack_fraction,
            'protect_fraction': "%.2f" % protect_fraction,
            'efficiency': global_efficiency(g)
        })
    return result
def read_graph_file(file_path):
    edge_list = []
    f = open(file_path)
    line = f.readline()
    while line:
        new_edge = line.split()
        new_edge[0] = int(new_edge[0])
        new_edge[1] = int(new_edge[1])
        # new_edge[2] = int(new_edge[2])
        edge_list.append(tuple(new_edge))
        line = f.readline()
    return edge_list

def run_girvan_newman(Graph):
    comp = girvan_newman(Graph)
    k = 20
    limited = itertools.takewhile(lambda c: len(c) <= k, comp)
    for communities in limited:
        print(tuple(sorted(c) for c in communities))


# G_elegans = nx.DiGraph()
# G_elegans.add_weighted_edges_from(read_graph_file("../data/celegans_n306.txt"))
# run_girvan_newman(G_elegans)


G_elegans = nx.Graph()
G_elegans.add_edges_from(read_graph_file("../data/Erdos-Renyi.txt"))
print global_efficiency(G_elegans)