Esempio n. 1
0
def each_subj_seperate(main_subj_folder, mni_atlas_file_name, idx, atlas_type):
    for sub in glob.glob(f'{main_subj_folder}{os.sep}*{os.sep}'):
        sn = sub.split(os.sep)[-2]
        num_mat_name = sub + 'non-weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy'
        if os.path.exists(num_mat_name):
            num_mat = np.load(num_mat_name)
            ncb_num = get_node_betweenness_centrality(num_mat)

            add_mat_name = sub + 'weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy'
            add_mat = np.load(add_mat_name)
            ncb_add = get_node_betweenness_centrality(add_mat)

            weighted_by_atlas, weights_dict = weight_atlas_by_add(
                mni_atlas_file_name, ncb_num, idx)
            save_as_nii(weighted_by_atlas, mni_atlas_file_name,
                        f'Num_node-centrality-betweenness_' + atlas_type,
                        sub[:-1])

            weighted_by_atlas, weights_dict = weight_atlas_by_add(
                mni_atlas_file_name, ncb_add, idx)
            save_as_nii(weighted_by_atlas, mni_atlas_file_name,
                        f'ADD_node-centrality-betweenness_' + atlas_type,
                        sub[:-1])
Esempio n. 2
0
def grouped_together(main_subj_folder, mni_atlas_file_name, idx, atlas_type):

    ncb_num = []
    ncb_add = []
    for sub in glob.glob(f'{main_subj_folder}{os.sep}*{os.sep}'):
        sn = sub.split(os.sep)[-2]
        num_mat_name = sub + 'non-weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy'
        if os.path.exists(num_mat_name):
            num_mat = np.load(num_mat_name)
            ncb_num.append(get_node_betweenness_centrality(num_mat))

            add_mat_name = sub + 'weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy'
            add_mat = np.load(add_mat_name)
            ncb_add.append(get_node_betweenness_centrality(add_mat))

    ncb_num = np.asarray(ncb_num)
    ncb_add = np.asarray(ncb_add)
    ncb_num[ncb_num == 0] = np.nan
    ncb_add[ncb_add == 0] = np.nan

    ncb_num_mean = np.nanmean(ncb_num, axis=0)
    ncb_add_mean = np.nanmean(ncb_add, axis=0)
    ncb_num_mean[np.isnan(ncb_num_mean)] = 0
    ncb_add_mean[np.isnan(ncb_add_mean)] = 0

    weighted_by_atlas, weights_dict = weight_atlas_by_add(
        mni_atlas_file_name, ncb_num_mean, idx)
    save_as_nii(weighted_by_atlas, mni_atlas_file_name,
                f'Num_node-centrality-betweenness_' + atlas_type,
                main_subj_folder)

    weighted_by_atlas, weights_dict = weight_atlas_by_add(
        mni_atlas_file_name, ncb_add_mean, idx)
    save_as_nii(weighted_by_atlas, mni_atlas_file_name,
                f'ADD_node-centrality-betweenness_' + atlas_type,
                main_subj_folder)
Esempio n. 3
0
            eff_add_dict = merge_dict(eff_add_dict, eff_add)

            wos1.append(table1.find_value_by_scan_Language('Word Order Score 1', sn))
            lws.append(table1.find_value_by_scan_Language('Learning words slope', sn))

    eff_num_mat =np.zeros((n_subj,len(eff_num_dict)))
    eff_add_mat = np.zeros((n_subj,len(eff_add_dict)))
    for k in eff_num_dict.keys():
        eff_num_mat[:, k] = eff_num_dict[k]

    for k in eff_add_dict.keys():
        eff_add_mat[:, k] = eff_add_dict[k]

    volume_type = 'Num'
    r, p = calc_corr(wos1,eff_num_mat, fdr_correct=False, remove_outliers=True)
    weighted_by_atlas,weights_dict = weight_atlas_by_add(mni_atlas_file_name,r,idx)
    save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_LocEff-WOS_th_r_'+atlas_type, main_subj_folders)


    r, p = calc_corr(lws,eff_num_mat, fdr_correct=False, remove_outliers=True)
    weighted_by_atlas,weights_dict = weight_atlas_by_add(mni_atlas_file_name,r,idx)
    save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_LocEff-LWS_th_r_'+atlas_type, main_subj_folders)

    volume_type = 'ADD'

    r, p = calc_corr(wos1,eff_add_mat, fdr_correct=False, remove_outliers=True)
    weighted_by_atlas,weights_dict = weight_atlas_by_add(mni_atlas_file_name,r,idx)
    save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_LocEff-WOS_th_r_'+atlas_type, main_subj_folders)


    r, p = calc_corr(lws,eff_add_mat, fdr_correct=False, remove_outliers=True)
Esempio n. 4
0
eff_num_mat = np.zeros((len(num_ne_dict[0]), len(num_ne_dict)))
eff_add_mat = np.zeros((len(add_ne_dict[0]), len(add_ne_dict)))
for k in num_ne_dict.keys():
    eff_num_mat[:, k] = num_ne_dict[k]

for k in add_ne_dict.keys():
    eff_add_mat[:, k] = add_ne_dict[k]

idx = np.load(r'F:\data\V7\HCP\cm_num_lookup.npy')
mni_atlas_file_name = r'F:\data\atlases\aal300\AAL150_fixed.nii'
nii_base = r'F:\data\atlases\yeo\yeo7_1000\Schaefer2018_1000Parcels_17Networks_order_FSLMNI152_2mm.nii'

main_subj_folders = r'F:\data\V7\HCP'

r, p = calc_corr_mat(eff_num_mat, eff_add_mat, fdr_correct=True)
weighted_by_atlas, weights_dict = weight_atlas_by_add(mni_atlas_file_name, r,
                                                      idx)
save_as_nii_aal(weighted_by_atlas, mni_atlas_file_name, nii_base,
                f'aal300_LocEff_correlation_ADD_Num_fdr', main_subj_folders)

r, p = calc_corr_mat(eff_num_mat, eff_add_mat, fdr_correct=False)
weighted_by_atlas, weights_dict = weight_atlas_by_add(mni_atlas_file_name, r,
                                                      idx)
save_as_nii_aal(weighted_by_atlas, mni_atlas_file_name, nii_base,
                f'aal300_LocEff_correlation_ADD_Num', main_subj_folders)

eff_add_mean = np.nanmean(eff_add_mat, axis=0)
weighted_by_atlas, weights_dict = weight_atlas_by_add(mni_atlas_file_name,
                                                      eff_add_mean, idx)
save_as_nii_aal(weighted_by_atlas, mni_atlas_file_name, nii_base,
                f'aal300_LocEff_ADD', main_subj_folders)