def read_data(size=3, sel_label=["1", "2"]):
    # Reading one scan( both raw and smooth) per subject and their DX label
    sub_info = get_subject_info(size)
    X1 = []  # raw
    X2 = []  # smooth
    y = []
    N = 148

    for sub in sub_info.keys():
        rw_all, sm_all = readSubjectFiles(sub, "row")
        t = 0
        assert len(
            rw_all) == len(sm_all), "Size mismatch in sub " + sub + " " + str(
                len(rw_all)) + " " + str(len(sm_all))

        if sub_info[sub][0]["DX"] not in sel_label:
            continue
        else:
            if sub_info[sub][0]["DX"] == '1':
                count = 1
            elif sub_info[sub][0]["DX"] == '2':
                count = 1
            elif sub_info[sub][0]["DX"] == '3':
                count = 1

        for c in range(count):
            X1.append(rw_all[c])
            y.append(sub_info[sub][c]["DX"])
            X2.append(sm_all[c])

    y = [0 if a == sel_label[0] else 1 for a in y]
    print(collections.Counter(y))
    return X1, X2, y
示例#2
0
def calculate_psnr_all(sub_names, p=0.5):
    noise_rw = 0
    noise_th = 0
    noise_sm = 0
    noise_d = 0
    total = 0
    for sub in sub_names:
        _, connectome_list = readSubjectFiles(sub, method="row")
        # connectome_list = threshold_all(connectome_list, vmin=0.001)
        connectome_list_noisy = add_noise_all(connectome_list, p)
        smoothed_connectomes_noisy, M, E = optimize_longitudinal_connectomes(
            connectome_list_noisy, Args.dfw, Args.sw, Args.lmw, Args.lmd)
        degraded_connectomes, _, _, _, _, _ = initialize_connectomes(
            connectome_list_noisy)
        connectome_list_th = threshold_all(connectome_list_noisy, vmin=0.01)

        for t in range(0, len(connectome_list)):
            noise_rw = noise_rw + calculate_psnr(connectome_list[t],
                                                 connectome_list_noisy[t])
            noise_th = noise_th + calculate_psnr(connectome_list[t],
                                                 connectome_list_th[t])
            noise_d = noise_d + calculate_psnr(connectome_list[t],
                                               degraded_connectomes[t])
            noise_sm = noise_sm + calculate_psnr(connectome_list[t],
                                                 smoothed_connectomes_noisy[t])
            total = total + 1

    noise_rw = noise_rw / total
    noise_th = noise_th / total
    noise_d = noise_d / total
    noise_sm = noise_sm / total
    print("\nNoise raw: ", noise_rw, "\nNoise th: ", noise_th, "\nNoise d: ",
          noise_d, "\nNoise sm: ", noise_sm)
    return noise_rw, noise_th, noise_d, noise_sm
def entropy_eval():
    sub_names = get_subject_names(3)
    #sub_names = ["027_S_5110"]
    hub_count = 10
    rw_en_list = []
    sm_en_list = []
    for sub in sub_names:
        rw, sm = readSubjectFiles(sub, "row")
        rw = threshold_all(rw, vmin=Args.threshold)
        rw_hub_match = get_hub_match(rw, hub_count)
        rw_en = get_entropy_list(rw_hub_match)
        sm_hub_match = get_hub_match(sm, hub_count)
        sm_en = get_entropy_list(sm_hub_match)
        rw_en_list.append(rw_en)
        sm_en_list.append(sm_en)
        print("Raw: ", rw_en)
        print("Smooth: ", sm_en)
        '''
        plt.bar(np.arange(0, 148, 1), rw_hub_match)
        plt.ylim(0, 2)
        plt.show()
        plt.bar(np.arange(0, 148, 1), sm_hub_match)
        plt.ylim(0,2)
        plt.show()
        '''

        # Write node file
        from utils.readFile import write_node_file
        node_color_rw = []
        node_color_sm = []
        for h in rw_hub_match:
            if h == 0:
                node_color_rw.append('1')
            else:
                node_color_rw.append('2')

        for h in sm_hub_match:
            if h == 0:
                node_color_sm.append('1')
            else:
                node_color_sm.append('2')
        '''
        # Write files
        write_node_file(rw_hub_match, node_color_rw, sub + '_hub_rw' + '.node')
        write_node_file(sm_hub_match, node_color_sm, sub + '_hub_sw' + '.node')
        with open(sub + '_signal_rw.txt', 'w') as f:
            for val in rw_hub_match:
                f.write(str(val) + '\n')
        with open(sub + '_signal_sm.txt', 'w') as f:
            for val in sm_hub_match:
                f.write(str(val) + '\n')
        '''

    print("Mean rw: ", np.mean(rw_en), "\nMean sm: ", np.mean(sm_en))

    import pickle as pk
    with open('hub_eval.pkl', 'wb') as f:
        print(pk.dump([rw_en_list, sm_en_list], f))
示例#4
0
def eval_low_range_consistency():
    sub = "027_S_5110"
    rw, sm = readSubjectFiles(sub, method="row")
    th = 0.0002
    rw_th = threshold_all(rw, vmin=th)
    hist_rw = get_sparse_hist(rw_th)
    hist_sm = get_sparse_hist(sm)
    print("Raw: ",
          (hist_rw == 1).sum(axis=None) / (hist_rw > 0).sum(axis=None))
    print("Smt: ",
          (hist_sm == 1).sum(axis=None) / (hist_sm > 0).sum(axis=None))
    write_low_range_connectivity(hist_rw, hist_sm)
示例#5
0
def main_sim_net():
    sub = "027_S_2336"
    rw, sm = readSubjectFiles(sub, method="row")
    baseline_rw, _ = sort_matrix(rw[0], True)
    baseline_sm, _ = sort_matrix(sm[0], True)
    # baseline_rw = binarize(baseline_rw, ht)
    # baseline_sm = binarize(baseline_sm, ht)
    # baseline_rw = simulated_community_structure()
    # baseline_sm = simulated_community_structure()

    th = [i / 10 for i in range(12, 25)]
    T = 7
    recalculate = False
    if recalculate:
        psnr = np.empty(shape=(4, len(th)))
        for i, p in enumerate(th):
            print("p = ", p)
            sim_long_net = simulate_longitudinal_network(baseline_rw,
                                                         noise_level=p)
            for sl in sim_long_net:
                print((sl == 0).sum(axis=None))
            sim_long_net_int, M, E = optimize_longitudinal_connectomes(
                sim_long_net, Args.dfw, Args.sw, Args.lmw, Args.lmd)
            sim_long_net_deg = initialize_connectomes(sim_long_net)
            sim_long_net_th = threshold_all(sim_long_net, vmin=0.02)

            psnr[0][i] = calculate_psnr_list(sim_long_net, [baseline_rw] * T)
            psnr[1][i] = calculate_psnr_list(sim_long_net_int,
                                             [baseline_sm] * T)
            psnr[2][i] = calculate_psnr_list(sim_long_net_deg,
                                             [baseline_sm] * T)
            psnr[3][i] = calculate_psnr_list(sim_long_net_th,
                                             [baseline_rw] * T)

            with open('psnr.pkl', 'wb') as f:
                pickle.dump(psnr, f)

            print("PSNR sim = ", psnr[0][i], "\nPSNR int = ", psnr[1][i],
                  "\nPSNR_th = ", psnr[3][i], "\nPSNR_deg = ", psnr[2][i])
    else:
        with open('psnr.pkl', 'rb') as f:
            psnr = pickle.load(f)

    plot_psnr(psnr,
              th,
              color=['r-', 'b--*', 'y--.', 'go'],
              labels=[
                  'Simulated data w/ noise', 'Our method',
                  'Our degraded method', 'Thresholded method'
              ])
def compute_eigen_spectrum(sub):
    th = Args.threshold  # threshold for raw connectome to have comaparable sparsity with smoothed version
    connectome_list, smoothed_connectomes = readSubjectFiles(sub, method="row")
    connectome_list = [
        threshold(connectome, vmin=th) for connectome in connectome_list
    ]

    raw_spectrum_list = [
        get_graph_specturm(connectome) for connectome in connectome_list
    ]
    smth_spectrum_list = [
        get_graph_specturm(connectome) for connectome in smoothed_connectomes
    ]

    return raw_spectrum_list, smth_spectrum_list
def main(sub="027_S_5110"):
    data, _ = readSubjectFiles(sub, "whole", sort=False)

    for t in range(0, len(data)):
        data[t], order = sort_matrix(data[t], False)

    # Plot Raw
    rw_links = get_top_links(data[1], count=500, offset=0, weight=True)
    #face_color, edge_color = mp_ring_colors(data)
    face_color, edge_color = get_lobe_wise_color_ring(len(data[1]))
    plot_circle(face_color,
                edge_color,
                rw_links,
                save=False,
                fname='demo_cplot')
示例#8
0
def measure_change():
    sub = "027_S_2336"
    rw, sm = readSubjectFiles(sub, method="row")
    baseline_rw, _ = sort_matrix(rw[0], True)
    baseline_sm, _ = sort_matrix(sm[0], True)

    T = 7
    p = 1
    sim_long_net = simulate_longitudinal_network(baseline_rw,
                                                 noise_level=p,
                                                 count=T)
    sim_long_net_int, M, E = optimize_longitudinal_connectomes(
        sim_long_net, Args.dfw, Args.sw, Args.lmw, Args.lmd)
    sim_long_net_deg = initialize_connectomes(sim_long_net)
    sim_long_net_th = threshold_all(sim_long_net, vmin=0.0002)

    sim = central_difference_of_links(sim_long_net)
    sim_sm = central_difference_of_links(sim_long_net_int)
    sim_th = central_difference_of_links(sim_long_net_th)
    sim_deg = central_difference_of_links(sim_long_net_deg)

    print(
        "Simulated data with noise: %f\nOur method: %f\nOur method degraded: %f\nThresholded: %f\n"
        % (sim, sim_sm, sim_deg, sim_th))
def mean_std_voi(sub_names):
    voi_rw = []
    voi_sm = []
    for sub in sub_names:
        connectome_list, smoothed_connectomes = readSubjectFiles(sub,
                                                                 method="row")

        voi_rw = voi_rw + [
            voi_between_community_structure(v1, v2) for v1 in connectome_list
            for v2 in connectome_list if v1 is not v2
        ]
        voi_sm = voi_sm + [
            voi_between_community_structure(v1, v2)
            for v1 in smoothed_connectomes
            for v2 in smoothed_connectomes if v1 is not v2
        ]

    voi_rw_mean = np.mean(voi_rw)
    voi_rw_std = np.std(voi_rw)

    voi_sm_mean = np.mean(voi_sm)
    voi_sm_std = np.std(voi_sm)

    return voi_rw_mean, voi_rw_std, voi_sm_mean, voi_sm_std
import os

from args import Args
from test import optimize_longitudinal_connectomes
from utils.helper import add_noise_all
from utils.readFile import readSubjectFiles

if __name__ == '__main__':
    args = Args()
    data_dir = os.path.join(os.path.join(args.root_directory, os.pardir), 'AD-Data_Organized')
    sub = '027_S_2336'
    connectome_list, smoothed_connectome = readSubjectFiles(sub)
    connectome_list_noisy = add_noise_all(connectome_list)
    smoothed_connectomes_noisy, M, E = optimize_longitudinal_connectomes(connectome_list_noisy, Args.dfw, Args.sw, Args.lmw,
                                                                   Args.lmd)

    # Compute noise in raw
    noise_rw = 0
    noise_sm = 0
    for t in range(0, len(connectome_list)):
        noise_rw = noise_rw + abs(connectome_list - connectome_list_noisy).sum()
        noise_sm = noise_sm + abs(smoothed_connectome - smoothed_connectomes_noisy).sum()

    print("Raw: ", noise_rw,
          "\nSM: ", noise_sm)
示例#11
0
 def __init__(self, subject=None):
     if subject is not None:
         self.rw_data, self.smth_data = readSubjectFiles(subject,
                                                         method="row")
         self.T = len(self.rw_data)
from bct import *
from hypo_test import *
from compare_network import n_comp
from utils.helper import get_centrality_measure

if __name__ == '__main__':
    sub_info = get_subject_info()
    count = 0
    min_bound = 6

    for key in list(sub_info.keys()):
        if sub_info[key].__len__() < min_bound:
            del sub_info[key]
            count = count + 1
        else:
            rw, sm = readSubjectFiles(key)
            for t in range(0, len(sub_info[key])):
                print(key, t)
                sub_info[key][t]["raw_data"] = rw[t]
                sub_info[key][t]["sm_data"] = sm[t]
                sub_info[key][t]["cc_rw"] = np.mean(clustering_coef_wu(rw[t]))
                sub_info[key][t]["cc_sm"] = np.mean(clustering_coef_wu(sm[t]))
                sub_info[key][t]["mod_rw"] = n_comp(rw[t])
                sub_info[key][t]["mod_sm"] = n_comp(sm[t])
                sub_info[key][t]["d_rw"] = np.max(strengths_und(rw[t]))
                sub_info[key][t]["d_sm"] = np.max(strengths_und(sm[t]))
                #_, sub_info[key][t]["bc_rw"], _ = flow_coef_bd(rw[t])
                #_, sub_info[key][t]["bc_sm"], _ = flow_coef_bd(sm[t])

    #two_sample_t_test(sub_info)
    print(group_consistency(sub_info))
    #diff_eigval /= normalizer
    print(diff_eigval)


def diff_eig_vec(temp_net, eig_num=0):
    print("*** Angular difference between "
          "eigen vectors of the temporal networks ***")

    t = len(temp_net)
    n = len(temp_net[0])
    eigvec = np.zeros((n, t))

    for i in range(0, t):
        D = np.diag(temp_net[i].sum(axis=1))
        L = D - temp_net[i]
        _, eig = LA.eigh(L)
        eigvec[:, i] = eig[:, eig_num]

    ang_dist = np.abs(np.dot(eigvec.T, eigvec))
    np.set_printoptions(formatter={'float': lambda x: "{0:0.2f}".format(x)})
    print(ang_dist)


if __name__ == '__main__':
    # Input
    s_name = '027_S_5109'
    _, temp_net = readSubjectFiles(s_name)
    temp_net, _ = readSubjectFiles(s_name)
    diff_eig_vec(temp_net, 4)
    # diff_eig_val(temp_net)