Example #1
0
def test_partition_distance():
    q = load_sample_group_qball()
    d = load_sample_group_dsi()

    q = np.mean(q, axis=2)
    d = np.mean(d, axis=2)

    qi, _ = bct.modularity_und(q)
    di, _ = bct.modularity_und(d)

    vi, mi = bct.partition_distance(qi, di)

    print(vi, mi)
    assert np.allclose(vi, 0.1964, atol=0.01)
    assert np.allclose(mi, 0.6394, atol=0.01)
Example #2
0
def entropic_similarity(a1, a2):
    ma, _ = bct.modularity_und(a1)
    mb, _ = bct.modularity_und(a2)

    vi, _ = bct.partition_distance(ma, mb)
    return 1 - vi
Example #3
0
#**********************************

# plot number of communities as a function of threshold
thresholds = np.arange(.15, .35, .01)
partition_distances = []
cluster_size = []
for t in thresholds:
    # simulate threhsold 100 times
    clusters = simulate(100,
                        fun=lambda: Graph_Analysis(
                            corr_data, threshold=t, weight=True, display=False)
                        [0].vs['community'])
    distances = []
    for i, c1 in enumerate(clusters):
        for c2 in clusters[i:]:
            distances.append(bct.partition_distance(c1, c2)[0])
    partition_distances += zip([t] * len(distances), distances)
    cluster_size += (zip([t] * len(clusters), [max(c) for c in clusters]))

plt.figure(figsize=(16, 12))
sns.stripplot(x=list(zip(*cluster_size))[0],
              y=list(zip(*cluster_size))[1],
              jitter=.4)
plt.ylabel('Number of detected communities', size=20)
plt.xlabel('Threshold', size=20)

plt.figure(figsize=(16, 12))
#sns.stripplot(x = zip(*partition_distances)[0], y = zip(*partition_distances)[1], jitter = .2)
sns.boxplot(x=list(zip(*partition_distances))[0],
            y=list(zip(*partition_distances))[1])
plt.ylabel('Partition distance over 100 repetitions', size=20)
Example #4
0
def entropic_similarity(a1, a2):
    ma, _ = bct.modularity_und(a1)
    mb, _ = bct.modularity_und(a2)

    vi, _ = bct.partition_distance(ma, mb)
    return 1 - vi