def get_analysis_mask(path, subjects, directory, roi_list): ######## Get matrix infos ############### conn_test = ConnectivityLoader(path, subjects, directory, roi_list) # Get nan mask to correctly fill matrix nan_mask = conn_test.get_results(['Samatha', 'Vipassana']) # Transform matrix into float of ones mask_ = np.float_(~np.bool_(nan_mask)) # Get the upper part of the matrix mask_ = np.triu(mask_, k=1) return mask_
def write_correlation_matrices(directory, condition): subjects = np.loadtxt( '/media/robbis/DATA/fmri/monks/attributes_struct.txt', dtype=np.str) roi_list = np.loadtxt( '/media/robbis/DATA/fmri/templates_fcmri/findlab_rois.txt', delimiter=',', dtype=np.str) path = '/media/robbis/DATA/fmri/monks/0_results/' conn = ConnectivityLoader(path, subjects, directory, roi_list) nan_mask = conn.get_results(['Samatha', 'Vipassana']) #nan_mask = conn.get_results(['Rest']) ds = conn.get_dataset() mask_ = np.float_(~np.bool_(nan_mask)) mask_ = np.triu(mask_, k=1) mask_indices = np.nonzero(mask_) ds_ = ds[np.logical_and(ds.targets == condition, ds.sa.groups == 'E')] array_ = ds_.samples.mean(0) mask_[mask_indices] = array_ matrix = np.nan_to_num(copy_matrix(mask_, diagonal_filler=0)) names_lr, colors_lr, index_, coords, networks = get_atlas_info('findlab') plot_connectomics( matrix, 20 + 8 * np.abs(matrix.sum(axis=1))**2, save_path=os.path.join(path, directory), prename=condition + '_correlation', save=True, colormap='bwr', vmin=np.abs(matrix).max() * -1, vmax=np.abs(matrix).max(), node_names=names_lr, node_colors=colors_lr, node_coords=coords, node_order=index_, networks=networks, threshold=0.5, title=condition + ' Correlation', zscore=False, ) w_aggregate = aggregate_networks(matrix, roi_list.T[-2]) _, idx = np.unique(networks, return_index=True) plot_connectomics(w_aggregate, 5 * np.abs(w_aggregate.sum(axis=1))**2, save_path=os.path.join(path, directory), prename=condition + '_aggregate_correlation', save=True, colormap='bwr', vmin=-1 * w_aggregate.max(), vmax=w_aggregate.max(), node_names=np.unique(networks), node_colors=colors_lr[idx], node_coords=coords[idx], node_order=np.arange(0, len(idx)), networks=np.unique(networks), threshold=4, zscore=False)
delimiter=',', dtype=np.str) style_ = 'Samatha' cv_repetitions = 250 cv_fraction = 0.5 num_exp_subjects = subjects[subjects.T[1] == group_].shape[0] cv = ShuffleSplit(num_exp_subjects, n_iter=cv_repetitions, test_size=cv_fraction) algorithm = SVR(kernel='linear', C=1) # Load data conn = ConnectivityLoader(path, subjects, r, roi_list) conn.get_results(conditions) ds = conn.get_dataset() ds = ds[np.logical_and(ds.sa.meditation == style_, ds.sa.groups == group_)] # Select data X = ds.samples y = np.float_(ds.sa.expertise) * 0.01 # preprocess X_ = zscore(X, axis=1) # Sample-wise y_ = zscore(y) c = Correlation(X_) corr = c.transform(X_, y_)[0]