def get_node_size(matrix, absolute=True): matrix = np.nan_to_num(copy_matrix(matrix, diagonal_filler=0)) if absolute == True: matrix = np.abs(matrix) return matrix.sum(axis=0)
def plot_states_matrices( X, labels, node_number=[6, 5, 8, 10, 4, 5, 7], node_networks=['DAN', 'VAN', 'SMN', 'VIS', 'AUD', 'LAN', 'DMN'], use_centroid=False, n_cols=3, save_fig=False, save_path="/media/robbis/DATA/fmri/movie_viviana", save_name_condition=None, **kwargs): """ Plots the centroids in square matrix form. It could be used with original data and labels but also with the original centroids if you set use_centroids as True. """ position = [sum(node_number[:i + 1]) for i in range(len(node_number))] if not use_centroid: centroids = get_centroids(X, labels)(X, labels) n_states = len(np.unique(labels)) else: centroids = X.copy() n_states = X.shape[0] position_label = [ -0.5 + position[i] - node_number[i] / 2. for i in range(len(node_number)) ] n_rows = np.ceil(n_states / float(n_cols)) fig = pl.figure() for i in np.arange(n_states): ax = fig.add_subplot(n_rows, n_cols, i + 1) matrix_ = copy_matrix(array_to_matrix(centroids[i]), diagonal_filler=0) n_nodes = matrix_.shape[0] ax.imshow(matrix_, interpolation='nearest', vmin=0, cmap=pl.cm.bwr) for _, end_network in zip(node_networks, position): ax.vlines(end_network - 0.5, -0.5, n_nodes - 0.5) ax.hlines(end_network - 0.5, -0.5, n_nodes - 0.5) ax.set_title('State ' + str(i + 1)) ax.set_xticks(position_label) ax.set_xticklabels(node_networks) ax.set_yticks(position_label) ax.set_yticklabels(node_networks) #pl.colorbar() if save_fig: fname = "%s_state_%s.png" % (str(save_name_condition), str(i + 1)) fig.savefig(os.path.join(save_path, fname)) pl.close('all') return fig
def plot_states_matrices(X, labels, node_number=[6,5,8,10,4,5,7], node_networks=['DAN','VAN','SMN','VIS','AUD','LAN','DMN'], use_centroid=False, n_cols=3, save_fig=False, save_path="/media/robbis/DATA/fmri/movie_viviana", save_name_condition=None, **kwargs ): """ Plots the centroids in square matrix form. It could be used with original data and labels but also with the original centroids if you set use_centroids as True. """ position = [sum(node_number[:i+1]) for i in range(len(node_number))] if not use_centroid: centroids = get_centroids(X, labels) n_states = len(np.unique(labels)) else: centroids = X.copy() n_states = X.shape[0] position_label = [-0.5+position[i]-node_number[i]/2. for i in range(len(node_number))] n_rows = np.ceil(n_states / float(n_cols)) print n_rows, n_cols fig = pl.figure() for i in np.arange(n_states): ax = fig.add_subplot(n_rows, n_cols, i+1) matrix_ = copy_matrix(array_to_matrix(centroids[i]), diagonal_filler=0) n_nodes = matrix_.shape[0] ax.imshow(matrix_, interpolation='nearest', vmin=0, cmap=pl.cm.inferno) for _, end_network in zip(node_networks, position): ax.vlines(end_network-0.5, -0.5, n_nodes-0.5) ax.hlines(end_network-0.5, -0.5, n_nodes-0.5) ax.set_title('State '+str(i+1)) ax.set_xticks(position_label) ax.set_xticklabels(node_networks) ax.set_yticks(position_label) ax.set_yticklabels(node_networks) #pl.colorbar() if save_fig: fname = "%s_state_%s.png" % (str(save_name_condition), str(i+1)) fig.savefig(os.path.join(save_path, fname)) pl.close('all') return fig
def aggregate_networks(matrix, roi_list, aggregation_fx=np.sum): """ Function used to aggregate matrix values using aggregative information provided by roi_list Parameters ---------- matrix : numpy 2D array, shape n x n Connectivity matrix in squared form roi_list : list of string, length = n List of each ROI's network name. Each element represents the network that includes the ROI in that particular position. Returns ------- aggregate_matrix : numpy 2D array, p x p The matrix obtained, by pairwise network sum of nodes within networks. """ unique_rois = np.unique(roi_list) n_roi = unique_rois.shape[0] aggregate_matrix = np.zeros((n_roi, n_roi), dtype=np.float) network_pairs = itertools.combinations(unique_rois, 2) indexes = np.vstack(np.triu_indices(n_roi, k=1)).T # This is to fill upper part of the aggregate matrix for i, (n1, n2) in enumerate(network_pairs): x = indexes[i][0] y = indexes[i][1] mask1 = roi_list == n1 mask2 = roi_list == n2 # Build the mask of the intersection between mask_roi = np.meshgrid(mask1, mask1)[1] * np.meshgrid(mask2, mask2)[0] value = aggregation_fx(matrix * mask_roi) #value /= np.sum(mask_roi) aggregate_matrix[x, y] = value # Copy matrix in the lower part aggregate_matrix = copy_matrix(aggregate_matrix) # This is to fill the diagonal with within-network sum of elements for i, n in enumerate(unique_rois): diag_matrix, _ = network_connections(matrix, n, roi_list) aggregate_matrix[i, i] = aggregation_fx(diag_matrix) # aggregate_matrix[i, i] = np.mean(diag_matrix) return aggregate_matrix
def aggregate_networks(matrix, roi_list): """ Function used to aggregate matrix values using aggregative information provided by roi_list Parameters ---------- matrix : numpy 2D array, shape n x n Connectivity matrix in squared form roi_list : list of string, length = n List of each ROI's network name. Each element represents the network that includes the ROI in that particular position. Returns ------- aggregate_matrix : numpy 2D array, p x p The matrix obtained, by pairwise network sum of nodes within networks. """ unique_rois = np.unique(roi_list) n_roi = unique_rois.shape[0] aggregate_matrix = np.zeros((n_roi, n_roi), dtype=np.float) network_pairs = itertools.combinations(unique_rois, 2) indexes = np.vstack(np.triu_indices(n_roi, k=1)).T # This is to fill upper part of the aggregate matrix for i, (n1, n2) in enumerate(network_pairs): x = indexes[i][0] y = indexes[i][1] mask1 = roi_list == n1 mask2 = roi_list == n2 # Build the mask of the intersection between mask_roi = np.meshgrid(mask1, mask1)[1] * np.meshgrid(mask2, mask2)[0] value = np.sum(matrix * mask_roi) #value /= np.sum(mask_roi) aggregate_matrix[x, y] = value # Copy matrix in the lower part aggregate_matrix = copy_matrix(aggregate_matrix) # This is to fill the diagonal with within-network sum of elements for i, n in enumerate(unique_rois): diag_matrix, _ = network_connections(matrix, n, roi_list) aggregate_matrix[i, i] = np.sum(diag_matrix) # aggregate_matrix[i, i] = np.mean(diag_matrix) return aggregate_matrix
def get_feature_selection_matrix(feature_set, n_features, mask): h_values_, _ = np.histogram(feature_set.flatten(), bins=np.arange(0, n_features + 1)) mask = np.triu(mask, k=1) mask_indices = np.nonzero(mask) mask[mask_indices] = h_values_ return np.nan_to_num(copy_matrix(mask, diagonal_filler=0))
def get_feature_selection_matrix(feature_set, n_features, mask): h_values_, _ = np.histogram(feature_set.flatten(), bins=np.arange(0, n_features+1)) mask = np.triu(mask, k=1) mask_indices = np.nonzero(mask) mask[mask_indices] = h_values_ return np.nan_to_num(copy_matrix(mask, diagonal_filler=0))
def transform(self, ds): data = np.dstack([copy_matrix(array_to_matrix(a)) for a in ds.samples]) data = np.hstack([d for d in data[:, :]]).T attr = self._edit_attr(ds, data.shape) ds_ = Dataset.from_wizard(data) ds_ = add_attributes(ds_, attr) return ds_
def plot_seaborn(features, node_names=None, node_idx=None, **kwargs): from mne.viz import circular_layout node_angles = circular_layout( node_names.tolist(), node_names[node_idx].tolist(), start_pos=90, group_boundaries=[0, len(node_names) / 2. + 1]) matrix = copy_matrix(array_to_matrix(features.values[0]), diagonal_filler=0.) return plot_connectivity_seaborn(matrix, node_names=node_names, con_thresh=400, node_angles=node_angles, node_colors=sns.dark_palette( kwargs['color']))
def get_feature_weights_matrix(weights, sets, mask, indices=None): """ Function used to compute the average weight matrix in case of several cross-validation folds and feature selection for each fold. Parameters ---------- weights : ndarray shape=(n_folds, n_selected_features) The weights matrix with the shape specified in the signature sets : ndarray shape=(n_folds, n_selected_features) This represents the index in the square matrix of the feature selected by the algorithm in each cross-validation fold mask : ndarray shape=(n_roi, n_roi) The mask matrix of the valid ROIs selected. Important: this matrix should be triangular with the lower part set to zero. indices : tuple This is equal to np.nonzero(mask) Returns ------- matrix: ndarray n_roi x n_roi It returns the average weights across cross-validation fold in square form. """ if indices is None: indices = np.nonzero(mask) weights = weights.squeeze() filling_vector = np.zeros(np.count_nonzero(mask)) counting_vector = np.zeros(np.count_nonzero(mask)) for s, w in zip(sets, weights): filling_vector[s] += zscore(w) counting_vector[s] += 1 avg_weigths = np.nan_to_num(filling_vector / counting_vector) mask[indices] = avg_weigths matrix = np.nan_to_num(copy_matrix(mask, diagonal_filler=0)) return matrix
def plot_center_matrix(X, clustering, n_cluster=5, **kwargs): configuration = { 'node_number':[6,5,8,10,4,5,7], 'node_networks':['DAN','VAN','SMN','VIS','AUD','LAN','DMN'], 'save_fig':True, 'save_path':"/media/robbis/DATA/fmri/movie_viviana", 'save_name_condition':None } configuration.update(**kwargs) node_number = configuration['node_number'] node_networks = configuration['node_networks'] position = [sum(node_number[:i+1]) for i in range(len(node_number))] position_label = [-0.5+position[i]-node_number[i]/2. for i in range(len(node_number))] matrix_indices = np.arange(n_cluster**2).reshape(n_cluster, n_cluster) + 1 fig = pl.figure(figsize=(25,20)) for i in range(n_cluster-1): centers = get_centroids(X, clustering[i]) for j, matrix in enumerate(centers): pos = matrix_indices[j,i+1] ax = fig.add_subplot(n_cluster, n_cluster, pos) matrix = copy_matrix(array_to_matrix(matrix), diagonal_filler=0) total_nodes = matrix.shape[0] ax.imshow(matrix, interpolation='nearest', vmin=0) for name, n_nodes in zip(node_networks, position): ax.vlines(n_nodes-0.5, -0.5, total_nodes-0.5) ax.hlines(n_nodes-0.5, -0.5, total_nodes-0.5) ax.set_xticks(position_label) ax.set_xticklabels(node_networks) ax.set_yticks(position_label) ax.set_yticklabels(node_networks) return fig
def plot_center_matrix(X, clustering, n_cluster=5, **kwargs): configuration = { 'node_number': [6, 5, 8, 10, 4, 5, 7], 'node_networks': ['DAN', 'VAN', 'SMN', 'VIS', 'AUD', 'LAN', 'DMN'], 'save_fig': True, 'save_path': "/media/robbis/DATA/fmri/movie_viviana", 'save_name_condition': None } configuration.update(**kwargs) node_number = configuration['node_number'] node_networks = configuration['node_networks'] position = [sum(node_number[:i + 1]) for i in range(len(node_number))] position_label = [ -0.5 + position[i] - node_number[i] / 2. for i in range(len(node_number)) ] matrix_indices = np.arange(n_cluster**2).reshape(n_cluster, n_cluster) + 1 fig = pl.figure(figsize=(25, 20)) for i in range(n_cluster - 1): centers = get_centroids(X, clustering[i]) for j, matrix in enumerate(centers): pos = matrix_indices[j, i + 1] ax = fig.add_subplot(n_cluster, n_cluster, pos) matrix = copy_matrix(array_to_matrix(matrix), diagonal_filler=0) total_nodes = matrix.shape[0] ax.imshow(matrix, interpolation='nearest', vmin=0) for _, n_nodes in zip(node_networks, position): ax.vlines(n_nodes - 0.5, -0.5, total_nodes - 0.5) ax.hlines(n_nodes - 0.5, -0.5, total_nodes - 0.5) ax.set_xticks(position_label) ax.set_xticklabels(node_networks) ax.set_yticks(position_label) ax.set_yticklabels(node_networks) return fig
def get_feature_weights_matrix(weights, sets, mask, indices): """ Function used to compute the average weight matrix in case of several cross-validation folds and feature selection for each fold. Parameters ---------- weights : ndarray shape n_folds x n_selected_features The weights matrix with the shape specified in the signature sets : ndarray shape n_folds x n_selected_features This represents the index in the square matrix of the feature selected by the algorithm in each cross-validation fold mask : ndarray shape n_roi x n_roi The mask matrix of the valid ROIs selected. Important: this matrix should be triangular with the lower part set to zero. indices : tuple This is equal to np.nonzero(mask) Returns ------- matrix: ndarray n_roi x n_roi It returns the average weights across cross-validation fold in square form. """ weights = weights.squeeze() filling_vector = np.zeros(np.count_nonzero(mask)) counting_vector = np.zeros(np.count_nonzero(mask)) for s, w in zip(sets, weights): filling_vector[s] += zscore(w) counting_vector[s] += 1 avg_weigths = np.nan_to_num(filling_vector/counting_vector) mask[indices] = avg_weigths matrix = np.nan_to_num(copy_matrix(mask, diagonal_filler=0)) return matrix
def plot_condition_centers(X, labels, **kwargs): configuration = { 'node_number': [6, 5, 8, 10, 4, 5, 7], 'node_networks': ['DAN', 'VAN', 'SMN', 'VIS', 'AUD', 'LAN', 'DMN'], 'save_fig': True, 'save_path': "/media/robbis/DATA/fmri/movie_viviana", 'save_name_condition': None, 'vmax': 1 } configuration.update(**kwargs) vmax = configuration['vmax'] node_number = configuration['node_number'] node_networks = configuration['node_networks'] centroids = get_centroids(X, labels) position = [sum(node_number[:i + 1]) for i in range(len(node_number))] position_label = [ -0.5 + position[i] - node_number[i] / 2. for i in range(len(node_number)) ] n_rows = np.floor(np.sqrt(len(np.unique(labels)))) n_cols = np.ceil(len(np.unique(labels)) / n_rows) fig = pl.figure(figsize=(16, 13)) for j, matrix in enumerate(centroids): ax = fig.add_subplot(n_rows, n_cols, j + 1) matrix = copy_matrix(array_to_matrix(matrix), diagonal_filler=0) total_nodes = matrix.shape[0] ax.imshow(matrix, interpolation='nearest', vmin=0, vmax=vmax) for _, n_nodes in zip(node_networks, position): ax.vlines(n_nodes - 0.5, -0.5, total_nodes - 0.5) ax.hlines(n_nodes - 0.5, -0.5, total_nodes - 0.5) ax.set_xticks(position_label) ax.set_xticklabels(node_networks, rotation=45) ax.set_yticks(position_label) ax.set_yticklabels(node_networks) return fig
def plot_states_matrices(X, labels, save_path, condition_label, node_number=[6,5,8,10,4,5,7], node_networks = ['DAN','VAN','SMN','VIS','AUD','LAN','DMN'], use_centroid=False, ): position = [sum(node_number[:i+1]) for i in range(len(node_number))] if not use_centroid: centroids = get_centroids(X, labels) total_nodes = len(np.unique(labels)) else: centroids = X.copy() total_nodes = X.shape[0] position_label = [-0.5+position[i]-node_number[i]/2. for i in range(len(node_number))] for i in np.arange(total_nodes): fig = pl.figure() matrix_ = copy_matrix(array_to_matrix(centroids[i])) total_nodes = matrix_.shape[0] pl.imshow(matrix_, interpolation='nearest', vmin=0, vmax=1) for name, n_nodes in zip(node_networks, position): pl.vlines(n_nodes-0.5, -0.5, total_nodes-0.5) pl.hlines(n_nodes-0.5, -0.5, total_nodes-0.5) pl.title('State '+str(i+1)) pl.xticks(position_label, node_networks) pl.yticks(position_label, node_networks) pl.colorbar() fname = "%s_state_%s.png" % (condition_label, str(i+1)) pl.savefig(os.path.join(save_path, fname))
def plot_condition_centers(X, labels, **kwargs): configuration = { 'node_number':[6,5,8,10,4,5,7], 'node_networks':['DAN','VAN','SMN','VIS','AUD','LAN','DMN'], 'save_fig':True, 'save_path':"/media/robbis/DATA/fmri/movie_viviana", 'save_name_condition':None, 'vmax':1 } configuration.update(**kwargs) vmax = configuration['vmax'] node_number = configuration['node_number'] node_networks = configuration['node_networks'] centroids = get_centroids(X, labels) position = [sum(node_number[:i+1]) for i in range(len(node_number))] position_label = [-0.5+position[i]-node_number[i]/2. for i in range(len(node_number))] n_rows = np.floor(np.sqrt(len(np.unique(labels)))) n_cols = np.ceil(len(np.unique(labels))/n_rows) fig = pl.figure(figsize=(16,13)) for j, matrix in enumerate(centroids): ax = fig.add_subplot(n_rows, n_cols, j+1) matrix = copy_matrix(array_to_matrix(matrix), diagonal_filler=0) total_nodes = matrix.shape[0] ax.imshow(matrix, interpolation='nearest', vmin=0, vmax=vmax) for _, n_nodes in zip(node_networks, position): ax.vlines(n_nodes-0.5, -0.5, total_nodes-0.5) ax.hlines(n_nodes-0.5, -0.5, total_nodes-0.5) ax.set_xticks(position_label) ax.set_xticklabels(node_networks, rotation=45) ax.set_yticks(position_label) ax.set_yticklabels(node_networks) return fig
def write_correlation_matrices(directory, condition): subjects = np.loadtxt( '/media/robbis/DATA/fmri/monks/attributes_struct.txt', dtype=np.str) roi_list = np.loadtxt( '/media/robbis/DATA/fmri/templates_fcmri/findlab_rois.txt', delimiter=',', dtype=np.str) path = '/media/robbis/DATA/fmri/monks/0_results/' conn = ConnectivityLoader(path, subjects, directory, roi_list) nan_mask = conn.get_results(['Samatha', 'Vipassana']) #nan_mask = conn.get_results(['Rest']) ds = conn.get_dataset() mask_ = np.float_(~np.bool_(nan_mask)) mask_ = np.triu(mask_, k=1) mask_indices = np.nonzero(mask_) ds_ = ds[np.logical_and(ds.targets == condition, ds.sa.groups == 'E')] array_ = ds_.samples.mean(0) mask_[mask_indices] = array_ matrix = np.nan_to_num(copy_matrix(mask_, diagonal_filler=0)) names_lr, colors_lr, index_, coords, networks = get_atlas_info('findlab') plot_connectomics( matrix, 20 + 8 * np.abs(matrix.sum(axis=1))**2, save_path=os.path.join(path, directory), prename=condition + '_correlation', save=True, colormap='bwr', vmin=np.abs(matrix).max() * -1, vmax=np.abs(matrix).max(), node_names=names_lr, node_colors=colors_lr, node_coords=coords, node_order=index_, networks=networks, threshold=0.5, title=condition + ' Correlation', zscore=False, ) w_aggregate = aggregate_networks(matrix, roi_list.T[-2]) _, idx = np.unique(networks, return_index=True) plot_connectomics(w_aggregate, 5 * np.abs(w_aggregate.sum(axis=1))**2, save_path=os.path.join(path, directory), prename=condition + '_aggregate_correlation', save=True, colormap='bwr', vmin=-1 * w_aggregate.max(), vmax=w_aggregate.max(), node_names=np.unique(networks), node_colors=colors_lr[idx], node_coords=coords[idx], node_order=np.arange(0, len(idx)), networks=np.unique(networks), threshold=4, zscore=False)
w_array = feature_weights[med].copy() f_nz = f_array[np.nonzero(f_array)] # We selected only feature selected often threshold = f_nz.mean() + 0.5 * f_nz.std() # f_array[f_array < threshold] = 0 # w_array[f_array < threshold] = 0 # Weights selected based on chosen features # zscoring weights w_nz = w_array[np.nonzero(w_array)] w_nz = (w_nz - np.mean(w_nz)) / np.std(w_nz) w_array[np.nonzero(w_array)] = w_nz f_matrix = copy_matrix(array_to_matrix(f_array, nan_mask), diagonal_filler=0) w_matrix = copy_matrix(array_to_matrix(w_array, nan_mask), diagonal_filler=0) title = "%s %s" % (med, l_) # f_matrix[f_matrix == 0] = np.nan ################################################################################## condition = med w_aggregate = aggregate_networks(w_matrix, roi_list.T[-2]) names_lr, colors_lr, index_, coords, networks = get_atlas_info("findlab") _, idx = np.unique(networks, return_index=True) ########################################################################## plot_connectomics(
def analyze_results(directory, conditions, n_permutations=1000.): """Write the results of the regression analysis Parameters ---------- directory : string or list of strings Path or list of paths where put results. condition : string or list of strings Conditions to be analyzed. Returns ------- fig : instance of matplotlib.pyplot.Figure The figure handle. """ res_path = '/media/robbis/DATA/fmri/monks/0_results/' subjects = np.loadtxt('/media/robbis/DATA/fmri/monks/attributes_struct.txt', dtype=np.str) path = '/media/robbis/DATA/fmri/monks/' roi_list = [] roi_list = np.loadtxt('/media/robbis/DATA/fmri/templates_fcmri/findlab_rois.txt', delimiter=',', dtype=np.str) if isinstance(directory, str): directory = [directory] if isinstance(conditions, str): conditions = [conditions] for dir_ in directory: for cond_ in conditions: fname_ = os.path.join(res_path, dir_, cond_+'_values_1000_50.npz') results_ = np.load(fname_) values_ = results_['arr_0'].tolist() errors_ = values_['error'] #values_['errors_'] sets_ = values_['features'] #values_['sets_'] weights_ = values_['weights'] #values_['weights_'] samples_ = values_['subjects'] #values_['samples_'] fname_ = os.path.join(res_path, dir_, cond_+'_permutation_1000_50.npz') results_ = np.load(fname_) values_p = results_['arr_0'].tolist() errors_p = values_p['error'] #values_p['errors_p'] sets_p = values_p['features'] #values_p['sets_p'] weights_p = values_p['weights'] #values_p['weights_p'] samples_p = values_p['subjects'] #values_p['samples_p'] errors_p = np.nanmean(errors_p, axis=1) print('-----------'+dir_+'-------------') print(cond_) print ('MSE = '+str(errors_[:,0].mean())+' -- p '+ \ str(np.count_nonzero(errors_p[:,0] < errors_[:,0].mean())/n_permutations)) print('COR = '+str(np.nanmean(errors_[:,1]))+' -- p '+ \ str(np.count_nonzero(errors_p[:,1] > np.nanmean(errors_[:,1]))/n_permutations)) directory_ = dir_ learner_ = "SVR_C_1" prename = "%s_%s" %(cond_, learner_) ######## Get matrix infos ############### conn_test = ConnectivityLoader(res_path, subjects, directory_, roi_list) # Get nan mask to correctly fill matrix nan_mask = conn_test.get_results(['Samatha', 'Vipassana']) # Transform matrix into float of ones mask_ = np.float_(~np.bool_(nan_mask)) # Get the upper part of the matrix mask_ = np.triu(mask_, k=1) mask_indices = np.nonzero(mask_) n_bins = np.count_nonzero(mask_) ###### Plot of distributions of errors and permutations ######### #errors_p = np.nanmean(errors_p, axis=1) fig_ = pl.figure() bpp = pl.boxplot(errors_p, showfliers=False, showmeans=True, patch_artist=True) bpv = pl.boxplot(errors_, showfliers=False, showmeans=True, patch_artist=True) fname = "%s_perm_1000_boxplot.png" %(prename) for box_, boxp_ in zip(bpv['boxes'], bpp['boxes']): box_.set_facecolor('lightgreen') boxp_.set_facecolor('lightslategrey') pl.xticks(np.array([1,2]), ['MSE', 'COR']) pl.savefig(os.path.join(res_path, directory_, fname)) pl.close() n_permutations = np.float(errors_p[:,0].shape[0]) ##### Plot of connection distributions ######## pl.figure() h_values_p, _ = np.histogram(sets_p.flatten(), bins=np.arange(0, n_bins+1)) #pl.plot(zscore(h_values_p)) pl.hist(zscore(h_values_p), bins=25) fname = "%s_features_set_dist.png" %(prename) pl.savefig(os.path.join(res_path, directory_, fname)) pl.figure() h_values_, _ = np.histogram(sets_.flatten(), bins=np.arange(0, n_bins+1)) pl.plot(zscore(h_values_)) fname = "%s_features_set_cross_validation.png" %(prename) pl.savefig(os.path.join(res_path, directory_, fname)) pl.close('all') ######## Plot connectivity stuff ########### weights_ = weights_.squeeze() filling_vector = np.zeros(np.count_nonzero(mask_)) counting_vector = np.zeros(np.count_nonzero(mask_)) for s, w in zip(sets_, weights_): filling_vector[s] += zscore(w) counting_vector[s] += 1 # Calculate the average weights and then zscore avg_weigths = np.nan_to_num(filling_vector/counting_vector) mask_[mask_indices] = avg_weigths matrix_ = np.nan_to_num(copy_matrix(mask_, diagonal_filler=0)) names_lr, colors_lr, index_, coords, _ = get_atlas_info(dir_) ''' matrix_[matrix_ == 0] = np.nan matrix_[np.abs(matrix_) < 1] = np.nan ''' size_w = np.zeros_like(matrix_) size_w[mask_indices] = np.abs(avg_weigths) size_w = np.nan_to_num(copy_matrix(size_w, diagonal_filler=0)) size_w = np.sum(size_w, axis=0) f, _ = plot_connectivity_circle_edited(matrix_[index_][:,index_], names_lr[index_], node_colors=colors_lr[index_], node_size=2*size_w[index_]**2, con_thresh = 1.4, title=cond_, node_angles=circular_layout(names_lr, list(names_lr), ), fontsize_title=19, fontsize_names=13, fontsize_colorbar=13, colorbar_size=0.3, colormap='bwr', #colormap=cm_, vmin=-3., vmax=3., fig=pl.figure(figsize=(16,16)) ) fname = "%s_features_weight.png" %(prename) f.savefig(os.path.join(res_path, directory_, fname), facecolor='black', dpi=150) for d_ in ['x', 'y', 'z']: fname = "%s_connectome_feature_weight_%s.png" %(prename, d_) fname = os.path.join(res_path, directory_, fname) plot_connectome(matrix_, coords, colors_lr, 2*size_w**2, 1.4, fname, #cmap=pl.cm.bwr, title=None, display_=d_, #max_=3., #min_=3. ) fname = "%s_connections_list_feature_weights.txt" %(prename) fname = os.path.join(res_path, directory_, fname) #print_connections(matrix_, names_lr, fname) ######### mask_ = np.float_(~np.bool_(nan_mask)) mask_ = np.triu(mask_, k=1) mask_indices = np.nonzero(mask_) mask_[mask_indices] = h_values_ matrix_ = np.nan_to_num(copy_matrix(mask_, diagonal_filler=0)) size_ = np.zeros_like(matrix_) size_[mask_indices] = counting_vector size_ = np.nan_to_num(copy_matrix(size_, diagonal_filler=0)) size_ = np.sum(size_, axis=0) f, _ = plot_connectivity_circle_edited(matrix_[index_][:,index_], names_lr[index_], node_colors=colors_lr[index_], node_size=size_[index_]*5, con_thresh = 15., title=cond_, node_angles=circular_layout(names_lr, list(names_lr), ), fontsize_title=19, fontsize_names=13, fontsize_colorbar=13, colorbar_size=0.3, #colormap='bwr', #colormap='terrain', #vmin=40, fig=pl.figure(figsize=(16,16)) ) fname = "%s_features_choices.png" %(prename) f.savefig(os.path.join(res_path, directory_, fname), facecolor='black', dpi=150) for d_ in ['x', 'y', 'z']: fname = "%s_connectome_feature_choices_%s.png" %(prename, d_) fname = os.path.join(res_path, directory_, fname) plot_connectome(matrix_, coords, colors_lr, 4.*size_, 15., fname, title=None, max_=50., min_=0., display_=d_ ) fname = "%s_connections_list_feature_choices.txt" %(prename) fname = os.path.join(res_path, directory_, fname) #print_connections(matrix_, names_lr,fname) pl.close('all')
def write_correlation_matrices(directory, condition): subjects = np.loadtxt('/media/robbis/DATA/fmri/monks/attributes_struct.txt', dtype=np.str) roi_list = np.loadtxt('/media/robbis/DATA/fmri/templates_fcmri/findlab_rois.txt', delimiter=',', dtype=np.str) path = '/media/robbis/DATA/fmri/monks/0_results/' conn = ConnectivityLoader(path, subjects, directory, roi_list) nan_mask = conn.get_results(['Samatha', 'Vipassana']) #nan_mask = conn.get_results(['Rest']) ds = conn.get_dataset() mask_ = np.float_(~np.bool_(nan_mask)) mask_ = np.triu(mask_, k=1) mask_indices = np.nonzero(mask_) ds_ = ds[np.logical_and(ds.targets == condition, ds.sa.groups == 'E')] array_ = ds_.samples.mean(0) mask_[mask_indices] = array_ matrix = np.nan_to_num(copy_matrix(mask_, diagonal_filler=0)) names_lr, colors_lr, index_, coords, networks = get_atlas_info('findlab') plot_connectomics(matrix, 20+8*np.abs(matrix.sum(axis=1))**2, save_path=os.path.join(path, directory), prename=condition+'_correlation', save=True, colormap='bwr', vmin=np.abs(matrix).max()*-1, vmax=np.abs(matrix).max(), node_names=names_lr, node_colors=colors_lr, node_coords=coords, node_order=index_, networks=networks, threshold=0.5, title=condition+' Correlation', zscore=False, ) w_aggregate = aggregate_networks(matrix, roi_list.T[-2]) _, idx = np.unique(networks, return_index=True) plot_connectomics(w_aggregate, 5*np.abs(w_aggregate.sum(axis=1))**2, save_path=os.path.join(path, directory), prename=condition+'_aggregate_correlation', save=True, colormap='bwr', vmin=-1*w_aggregate.max(), vmax=w_aggregate.max(), node_names=np.unique(networks), node_colors=colors_lr[idx], node_coords=coords[idx], node_order=np.arange(0, len(idx)), networks=np.unique(networks), threshold=4, zscore=False )