コード例 #1
0
def test_generate_matrix():
    G = nx.read_weighted_edgelist("toy.edges")
    A = nx.to_numpy_matrix(G)
    x, l = eigen_centrality(A)

    B = generate_matrix(x, l*x, 0.02, nm.ravel(A * nm.ones((6, 1))))
    y, m = eigen_centrality(B, maxiter=10000)
    assert(vec_dist(x, y) < 0.13)
    assert((l / m) - 1 < 0.1)
コード例 #2
0
def test_build_matrix():
    G = nx.read_weighted_edgelist("toy.edges")
    A = nx.to_numpy_matrix(G)
    x, l = eigen_centrality(A)

    B = build_matrix(x, l, 0.02)
    y, m = eigen_centrality(B, maxiter=10000)
    assert(vec_dist(x, y) < 0.13)
    assert((l / m) - 1 < 0.1)
コード例 #3
0
def test_synthetic_modularity_matrix():
    G = nx.read_weighted_edgelist("toy.edges")
    A = nx.to_numpy_matrix(G)
    K = nm.ones((1, 6)) * A
    x, l = leading_eigenvector(A - nm.transpose(K) * K /
                               float(sum(nm.ravel(K))))

    B = synthetic_modularity_matrix(A, 0.001)
    K = nm.ones((1, 6)) * B
    y, m = leading_eigenvector(B - nm.transpose(K) * K /
                               float(sum(nm.ravel(K))))
    assert(vec_dist(x, y) < 0.1)
    assert((l / m) - 1 < 0.1)
コード例 #4
0
def get_statistics(G, H, A, B, x, l, duration):
    eg.info("Computing statistics...")
    if not H:
        eg.info("Building networkx graph...")
        H = gu.simm_matrix_2_graph(B)
    eg.info("Computing centrality...")
    y, m = (-1, -1)  # eg.eigen_centrality(B, maxiter=100000)
    #  nx.write_weighted_edgelist(H,  "pgp_spectre0.9_" +
    #                            str(random.randint(0, 99999999)) + ".edges")

    eg.info("Computing centrality distance...")
    eigen_err = eg.vec_dist(x, y)
    eg.info("Computing clustering ratio...")
    clust_err = gu.average_clustering(A) / gu.average_clustering(B)
    # clust_err = nm.average(nx.clustering(G).values()) /\
    #     nm.average(nx.clustering(H).values())
    eg.info("Computing lambda ratio...")
    lambda_err = abs(l / m)
    eg.info("Computing degree correlation...")
    degree_corr = gu.correlation(gu.get_degrees(A), gu.get_degrees(B))
    eg.info("Check connectivity...")
    if nx.is_connected(H):
        conn = 1
    else:
        conn = 0

    eg.info("Distance distribution correlation...")
    distance_dist_corr = -1  # gu.correlate_dist_dict(gu.node_distance_dist(A),
    #                    gu.node_distance_dist(B))
    eg.info("Degree betweenness correlation...")
    degree_bet_corr = -1  # gu.correlate_dist_dict(gu.get_degree_betweeness(A),
    #                 gu.get_degree_betweeness(B))
    eg.info("K-coreness correlation...")
    kcore_corr = -1  # gu.correlate_dist_dict(gu.get_kcoreness(A),
    #                    gu.get_kcoreness(B))
    eg.info("Common neighbourhood correlation...")
    common_neigh_corr = -1  # gu.correlate_dist_dict(gu.get_common_neigh_dist(A)
    #                    ,gu.get_common_neigh_dist(B))
    eg.info("Modularity ratio...")
    Gm = gu.norm_modularity(G)
    Hm = gu.norm_modularity(H)
    modularity_ratio = Gm[0] / Hm[0]
    partition_ratio = Gm[1] / float(Hm[1])
    eg.info("Avg neighbourhood degree correlation...")
    avg_neigh_deg_corr = -1  # gu.correlate_dist_dict(
    # gu.get_avg_neighbour_degree(A),  gu.get_avg_neighbour_degree(B))
    eg.info("Done with stats")
    return (eigen_err, degree_corr, clust_err, lambda_err, distance_dist_corr,
            degree_bet_corr, kcore_corr, common_neigh_corr, modularity_ratio,
            partition_ratio, avg_neigh_deg_corr, conn, duration)
コード例 #5
0
def test_eigen_centrality():
    with open("toy.edges", "r") as net:
        G = nx.read_weighted_edgelist(net)
        A = nx.to_numpy_matrix(G)
        x, l = eigen_centrality(A)

        y = nm.matrix("0.200305; 0.200305; 0.256073; \
                      0.182829; 0.080244; 0.080244")

        assert(vec_dist(x, y) < 0.001)
        assert(abs(l - 2.2784) < 0.001)

    A = nm.matrix("-0.333333   0.500000   0.666667  -0.166667  -0.500000 \
                   -0.166667;   0.500000  -0.750000   0.500000  -0.250000 \
                   0.250000  -0.250000;  0.666667   0.500000  -0.333333 \
                   -0.166667  -0.500000  -0.166667; -0.166667  -0.250000 \
                   -0.166667  -0.083333   0.750000  -0.083333; -0.500000 \
                   0.250000  -0.500000   0.750000  -0.750000   0.750000; \
                   -0.166667  -0.250000  -0.166667  -0.083333   0.750000 \
                   -0.083333")
    x, l = leading_eigenvector(A)
    y = nm.matrix("0.21001; -0.46692;  0.21001; -0.32423;  0.69536; -0.32423")
    assert(vec_dist(x, y) < 0.001)
    assert(abs(l + 1.9193) < 0.001)
コード例 #6
0
def write_statistics(A, B, label, net, x, l, output=True):
    eg.info("Computing new centrality..")
    G = nx.from_numpy_matrix(A)
    H = nx.from_numpy_matrix(B)
    nx.write_weighted_edgelist(
        H,
        net + "_" + label + "_" + str(random.randint(0, 99999999)) + ".edges")
    y, m = eg.eigen_centrality(B, maxiter=100000)

    eg.info("Printing out results..")
    eigen_err = eg.vec_dist(x, y)
    clust_err = nm.average(nx.clustering(G).values()) /\
        nm.average(nx.clustering(H).values())
    lambda_err = abs(l / m)

    degree_corr = gu.correlation(sorted(nx.degree(G)), sorted(nx.degree(H)))
    if nx.is_connected(H):
        conn = 1
    else:
        conn = 0
    out = (
        str(label) + "," + str(net.split(".")[0].split("/")[-1]) + "," +
        str(eigen_err) + "," + str(degree_corr) + "," + str(clust_err) + "," +
        str(lambda_err) + "," + str(-1) + "," + str(-1) + "," +
        #  str(gu.correlate_dist_dict(gu.node_distance_dist(A),
        #                             gu.node_distance_dist(B))) + "," +
        #  str(gu.correlate_dist_dict(gu.get_degree_betweeness(A),
        #                             gu.get_degree_betweeness(B))) + "," +
        str(gu.correlate_dist_dict(gu.get_kcoreness(A), gu.get_kcoreness(B))) +
        "," + str(
            gu.correlate_dist_dict(gu.get_common_neigh_dist(A),
                                   gu.get_common_neigh_dist(B))) + "," +
        str(-1) + "," +
        #  str(community.modularity(
        #      community.best_partition(G), G) /
        #      community.modularity(
        #      community.best_partition(H), H)) + "," +
        str(
            gu.correlate_dist_dict(gu.get_avg_neighbour_degree(A),
                                   gu.get_avg_neighbour_degree(B))) + "," +
        str(conn))
    if output:
        print(out, file=sys.stderr)
    return out
コード例 #7
0
def get_statistics2(G, H, A, B, x, l):
    eg.info("Computing statistics...")
    if not H:
        eg.info("Building networkx graph...")
        H = gu.simm_matrix_2_graph(B)
        gu.connect_components(H)

    eg.info("Computing centrality...")
    y, m = eg.eigen_centrality(B, maxiter=100000)
    eg.info("Computing lambda ratio...")
    lambda_err = abs(l / m)
    eg.info("Computing centrality distance...")
    eigen_err = eg.vec_dist(x, y)

    inputs = [("avg_neigh_deg_corr", A, B), ("mod_ratio", G, H),
              ("comm_neigh_corr", A, B), ("kcore_corr", A, B),
              ("deg_bet_corr", A, B), ("dist_dist", A, B),
              ("degree_corr", G, H), ("clust_ratio", G, H)]
    mets = parallelism.launch_workers(inputs,
                                      stat_worker,
                                      inputs_per_worker=1,
                                      parallelism=4)
    res = {}
    for el in mets:
        res.update(el)

    eg.info("Check connectivity...")
    if nx.is_connected(H):
        conn = 1
    else:
        conn = 0

    eg.info("Done with stats")
    return (eigen_err, res['degree_corr'], res['clust_ratio'], lambda_err,
            res['dist_dist'], res['deg_bet_corr'], res['kcore_corr'],
            res['comm_neigh_corr'], res['mod_ratio'],
            res['avg_neigh_deg_corr'], conn)