def each_subj_seperate(main_subj_folder, mni_atlas_file_name, idx, atlas_type): for sub in glob.glob(f'{main_subj_folder}{os.sep}*{os.sep}'): sn = sub.split(os.sep)[-2] num_mat_name = sub + 'non-weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy' if os.path.exists(num_mat_name): num_mat = np.load(num_mat_name) ncb_num = get_node_betweenness_centrality(num_mat) add_mat_name = sub + 'weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy' add_mat = np.load(add_mat_name) ncb_add = get_node_betweenness_centrality(add_mat) weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, ncb_num, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'Num_node-centrality-betweenness_' + atlas_type, sub[:-1]) weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, ncb_add, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'ADD_node-centrality-betweenness_' + atlas_type, sub[:-1])
def grouped_together(main_subj_folder, mni_atlas_file_name, idx, atlas_type): ncb_num = [] ncb_add = [] for sub in glob.glob(f'{main_subj_folder}{os.sep}*{os.sep}'): sn = sub.split(os.sep)[-2] num_mat_name = sub + 'non-weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy' if os.path.exists(num_mat_name): num_mat = np.load(num_mat_name) ncb_num.append(get_node_betweenness_centrality(num_mat)) add_mat_name = sub + 'weighted_wholebrain_5d_labmask_yeo7_200_nonnorm.npy' add_mat = np.load(add_mat_name) ncb_add.append(get_node_betweenness_centrality(add_mat)) ncb_num = np.asarray(ncb_num) ncb_add = np.asarray(ncb_add) ncb_num[ncb_num == 0] = np.nan ncb_add[ncb_add == 0] = np.nan ncb_num_mean = np.nanmean(ncb_num, axis=0) ncb_add_mean = np.nanmean(ncb_add, axis=0) ncb_num_mean[np.isnan(ncb_num_mean)] = 0 ncb_add_mean[np.isnan(ncb_add_mean)] = 0 weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, ncb_num_mean, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'Num_node-centrality-betweenness_' + atlas_type, main_subj_folder) weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, ncb_add_mean, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'ADD_node-centrality-betweenness_' + atlas_type, main_subj_folder)
wos1.append(table1.find_value_by_scan_Language('Word Order Score 1', sn)) lws.append(table1.find_value_by_scan_Language('Learning words slope', sn)) eff_num_mat =np.zeros((n_subj,len(eff_num_dict))) eff_add_mat = np.zeros((n_subj,len(eff_add_dict))) for k in eff_num_dict.keys(): eff_num_mat[:, k] = eff_num_dict[k] for k in eff_add_dict.keys(): eff_add_mat[:, k] = eff_add_dict[k] volume_type = 'Num' r, p = calc_corr(wos1,eff_num_mat, fdr_correct=False, remove_outliers=True) weighted_by_atlas,weights_dict = weight_atlas_by_add(mni_atlas_file_name,r,idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_LocEff-WOS_th_r_'+atlas_type, main_subj_folders) r, p = calc_corr(lws,eff_num_mat, fdr_correct=False, remove_outliers=True) weighted_by_atlas,weights_dict = weight_atlas_by_add(mni_atlas_file_name,r,idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_LocEff-LWS_th_r_'+atlas_type, main_subj_folders) volume_type = 'ADD' r, p = calc_corr(wos1,eff_add_mat, fdr_correct=False, remove_outliers=True) weighted_by_atlas,weights_dict = weight_atlas_by_add(mni_atlas_file_name,r,idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_LocEff-WOS_th_r_'+atlas_type, main_subj_folders) r, p = calc_corr(lws,eff_add_mat, fdr_correct=False, remove_outliers=True) weighted_by_atlas,weights_dict = weight_atlas_by_add(mni_atlas_file_name,r,idx)
return r_th, p_corr if __name__ == '__main__': subj_main_folder = 'F:\data\V7\TheBase4Ever' atlas_type = 'yeo7_200' atlas_main_folder = r'C:\Users\Admin\my_scripts\aal\yeo' volume_type = 'ADD' vol_mat, mni_atlas_file_name, idx, subj_idx = volume_based_var( atlas_type, volume_type, atlas_main_folder, subj_main_folder) num_of_subj = np.shape(vol_mat)[0] ages = age_var(subj_main_folder, subj_idx) r, p = corr_stats(vol_mat, ages) weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, r, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_AGE_r_' + atlas_type, subj_main_folder) r_th, p_corr = multi_comp_correction(r, p) weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, r_th, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'{volume_type}_AGE_th_r_' + atlas_type, subj_main_folder)
return t, p, t_th if __name__ == '__main__': wt = 'ADD' main_folder = r'F:\data\balance' atlas_name = 'bna' atlas_main_folder = r'F:\data\atlases\BNA' #vol_by_atlas(wt, main_folder, atlas_name, atlas_main_folder) before_subj = glob.glob(main_folder + f'{os.sep}e*{os.sep}before{os.sep}*') after_subj = glob.glob(main_folder + f'{os.sep}e*{os.sep}after{os.sep}*') before_vol_mat, mni_atlas_file_name, idx, subj_idx = volume_based_var( atlas_name, wt, atlas_main_folder, before_subj) after_vol_mat, mni_atlas_file_name, idx, subj_idx = volume_based_var( atlas_name, wt, atlas_main_folder, after_subj) t, p, t_th = multi_t_test(before_vol_mat, after_vol_mat, fdr_correct=False) weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, t_th, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'before_vs_after_t_th_' + atlas_name, main_folder) weighted_by_atlas, weights_dict = weight_atlas_by_add( mni_atlas_file_name, t, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, f'before_vs_after_t_' + atlas_name, main_folder)
add_mat = all_subj_add_vals(add_file_name, atlas_labels, subj_main_folder, idx) from scipy.stats import linregress r_vec = [] p_vec = [] for i in range(np.shape(add_mat)[1]): x = fa_mat[:, i] y = add_mat[:, i] r, p = linregress(x, y)[2:4] r_vec.append(r) p_vec.append(p) weighted_by_atlas, weights_dict = weight_atlas_by_add(mni_atlas_file_name, p_vec, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, r'FA_MD_p_' + atlas_type, subj_main_folder) weighted_by_atlas, weights_dict = weight_atlas_by_add(mni_atlas_file_name, r_vec, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name, r'FA_MD_r_' + atlas_type, subj_main_folder) r_vec = np.asarray(r_vec) r_vec[np.asarray(p_vec) > 0.05] = 0 r_vec = list(r_vec) weighted_by_atlas, weights_dict = weight_atlas_by_add(mni_atlas_file_name, r_vec, idx) save_as_nii(weighted_by_atlas, mni_atlas_file_name,