def test_source_space(): "Test SourceSpace dimension" data_dir = mne.datasets.sample.data_path() subjects_dir = os.path.join(data_dir, 'subjects') annot_path = os.path.join(subjects_dir, '%s', 'label', '%s.%s.annot') for subject in ['fsaverage', 'sample']: mne_src = datasets._mne_source_space(subject, 'ico-4', subjects_dir) vertno = [mne_src[0]['vertno'], mne_src[1]['vertno']] ss = SourceSpace(vertno, subject, 'ico-4', subjects_dir) # labels for hemi_vertices, hemi in izip(ss.vertno, ('lh', 'rh')): labels, _, names = read_annot(annot_path % (subject, hemi, 'aparc')) start = 0 if hemi == 'lh' else len(ss.lh_vertno) hemi_tag = '-' + hemi for i, v in enumerate(hemi_vertices, start): label = labels[v] if label == -1: eq_(ss.parc[i], 'unknown' + hemi_tag) else: eq_(ss.parc[i], names[label] + hemi_tag) # connectivity conn = ss.connectivity() mne_conn = mne.spatial_src_connectivity(mne_src) assert_array_equal(conn, _matrix_graph(mne_conn)) # sub-space connectivity sssub = ss[ss.dimindex('superiortemporal-rh')] ss2 = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc') ss2sub = ss2[ss2.dimindex('superiortemporal-rh')] assert_array_equal(sssub.connectivity(), ss2sub.connectivity())
def test_spatial_src_connectivity(): """Test spatial connectivity functionality.""" # oct src = read_source_spaces(fname_src) assert src[0]['dist'] is not None # distance info con = spatial_src_connectivity(src).toarray() con_dist = spatial_src_connectivity(src, dist=0.01).toarray() assert (con == con_dist).mean() > 0.75 # ico src = read_source_spaces(fname_src_fs) con = spatial_src_connectivity(src).tocsr() con_tris = spatial_tris_connectivity(grade_to_tris(5)).tocsr() assert con.shape == con_tris.shape assert_array_equal(con.data, con_tris.data) assert_array_equal(con.indptr, con_tris.indptr) assert_array_equal(con.indices, con_tris.indices) # one hemi con_lh = spatial_src_connectivity(src[:1]).tocsr() con_lh_tris = spatial_tris_connectivity(grade_to_tris(5)).tocsr() con_lh_tris = con_lh_tris[:10242, :10242].tocsr() assert_array_equal(con_lh.data, con_lh_tris.data) assert_array_equal(con_lh.indptr, con_lh_tris.indptr) assert_array_equal(con_lh.indices, con_lh_tris.indices)
def test_vol_connectivity(): """Test volume connectivity.""" vol = read_source_spaces(fname_vsrc) pytest.raises(ValueError, spatial_src_connectivity, vol, dist=1.) connectivity = spatial_src_connectivity(vol) n_vertices = vol[0]['inuse'].sum() assert_equal(connectivity.shape, (n_vertices, n_vertices)) assert (np.all(connectivity.data == 1)) assert (isinstance(connectivity, sparse.coo_matrix)) connectivity2 = spatio_temporal_src_connectivity(vol, n_times=2) assert_equal(connectivity2.shape, (2 * n_vertices, 2 * n_vertices)) assert (np.all(connectivity2.data == 1))
def test_spatial_src_connectivity(): """Test spatial connectivity functionality.""" # oct src = read_source_spaces(fname_src) assert src[0]['dist'] is not None # distance info with pytest.warns(RuntimeWarning, match='will have holes'): con = spatial_src_connectivity(src).toarray() con_dist = spatial_src_connectivity(src, dist=0.01).toarray() assert (con == con_dist).mean() > 0.75 # ico src = read_source_spaces(fname_src_fs) con = spatial_src_connectivity(src).tocsr() con_tris = spatial_tris_connectivity(grade_to_tris(5)).tocsr() assert con.shape == con_tris.shape assert_array_equal(con.data, con_tris.data) assert_array_equal(con.indptr, con_tris.indptr) assert_array_equal(con.indices, con_tris.indices) # one hemi con_lh = spatial_src_connectivity(src[:1]).tocsr() con_lh_tris = spatial_tris_connectivity(grade_to_tris(5)).tocsr() con_lh_tris = con_lh_tris[:10242, :10242].tocsr() assert_array_equal(con_lh.data, con_lh_tris.data) assert_array_equal(con_lh.indptr, con_lh_tris.indptr) assert_array_equal(con_lh.indices, con_lh_tris.indices)
def test_source_space(): "Test SourceSpace dimension" for subject in ['fsaverage', 'sample']: mne_src = datasets._mne_source_space(subject, 'ico-4', subjects_dir) vertno = [mne_src[0]['vertno'], mne_src[1]['vertno']] ss = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc') # connectivity conn = ss.connectivity() mne_conn = mne.spatial_src_connectivity(mne_src) assert_array_equal(conn, _matrix_graph(mne_conn)) # sub-space connectivity sssub = ss[ss.dimindex('superiortemporal-rh')] ss2 = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc') ss2sub = ss2[ss2.dimindex('superiortemporal-rh')] assert_array_equal(sssub.connectivity(), ss2sub.connectivity())
def test_source_space(): "Test SourceSpace dimension" for subject in ['fsaverage', 'sample']: mne_src = datasets._mne_source_space(subject, 'ico-4', subjects_dir) vertno = [mne_src[0]['vertno'], mne_src[1]['vertno']] ss = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc') # connectivity conn = ss.connectivity() mne_conn = mne.spatial_src_connectivity(mne_src) assert_array_equal(conn, connectivity_from_coo(mne_conn)) # sub-space connectivity sssub = ss[ss.dimindex('superiortemporal-rh')] ss2 = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc') ss2sub = ss2[ss2.dimindex('superiortemporal-rh')] assert_array_equal(sssub.connectivity(), ss2sub.connectivity())
def test_source_space(): "Test SourceSpace dimension" for subject in ['fsaverage', 'sample']: path = os.path.join(subjects_dir, subject, 'bem', subject + '-ico-4-src.fif') mne_src = mne.read_source_spaces(path) vertno = [mne_src[0]['vertno'], mne_src[1]['vertno']] ss = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc') # connectivity conn = ss.connectivity() mne_conn = mne.spatial_src_connectivity(mne_src) assert_array_equal(conn, connectivity_from_coo(mne_conn)) # sub-space connectivity sssub = ss[ss.dimindex('superiortemporal-rh')] ss2 = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc') ss2sub = ss2[ss2.dimindex('superiortemporal-rh')] assert_array_equal(sssub.connectivity(), ss2sub.connectivity())
return f_mway_rm(np.swapaxes(args, 1, 0), factor_levels=factor_levels, effects=effects, return_pvals=return_pvals)[0] ############################################################################### # Compute clustering statistic # ---------------------------- # # To use an algorithm optimized for spatio-temporal clustering, we # just pass the spatial connectivity matrix (instead of spatio-temporal). # as we only have one hemisphere we need only need half the connectivity print('Computing connectivity.') connectivity = mne.spatial_src_connectivity(src[:1]) # Now let's actually do the clustering. Please relax, on a small # notebook and one single thread only this will take a couple of minutes ... pthresh = 0.0005 f_thresh = f_threshold_mway_rm(n_subjects, factor_levels, effects, pthresh) # To speed things up a bit we will ... n_permutations = 128 # ... run fewer permutations (reduces sensitivity) print('Clustering.') T_obs, clusters, cluster_p_values, H0 = clu = \ spatio_temporal_cluster_test(X, connectivity=connectivity, n_jobs=1, threshold=f_thresh, stat_fun=stat_fun, n_permutations=n_permutations, buffer_size=None)
os.chdir(filedir+'/'+SubjList[0]+'/'+ExpID+'/Datafiles/EpochData') Epochs = mne.read_epochs('ProperEpochData-epo.fif', preload=True) epochs = Epochs.copy().pick_types(meg=True) conditions = list(epochs.event_id.keys()) conditions2 = [i for i in conditions if i != 'target'] times = epochs.times sfreq = epochs.info['sfreq'] del Epochs, epochs MRIsubject = 'fsaverage' subjects_dir = '' src = mne.read_source_spaces(subjects_dir+'/'+MRIsubject+'/bem/%s-%s-src.fif' % (MRIsubject, useFsaveModel)) connectivity = mne.spatial_src_connectivity(src[-1:]) # use right hemi only nuseVerts = src[-1]['nuse'] #- make directory for saving results -# os.chdir(filedir+'/GrandAverage/Datafiles') for direcname in ['StatisticalData', 'SourceData', ExpID, srcdir1]: if not os.path.exists('./'+direcname): os.mkdir('./'+direcname) os.chdir('./'+direcname) savedir = os.getcwd() if not os.path.exists('./WithinTrialComparisons'): os.mkdir('./WithinTrialComparisons') os.chdir('./WithinTrialComparisons') savedir += '/WithinTrialComparisons'
subjects_dir = "/scratch/jeffhanna/freesurfer/subjects/" proc_dir = "/scratch/jeffhanna/ATT_dat/proc/" spacing = "ico4" conds = ["audio","visselten","visual"] wavs = ["4000Hz","4000cheby","7000Hz","4000fftf"] band = opt.band indep_var = "Angenehm" n_freqs = 1 n_srcs = 5124 n_subjs = len(subjs) perm_n = opt.perm # setup connectivity fs_src = mne.read_source_spaces("{}{}_{}-src.fif".format(proc_dir,"fsaverage", spacing)) cnx = mne.spatial_src_connectivity(fs_src) del fs_src connectivity = _setup_connectivity(cnx, n_srcs, n_freqs) exclude = np.load("{}fsaverage_{}_exclude.npy".format(proc_dir,spacing)) include = np.ones(cnx.shape[0],dtype="bool") include[exclude] = 0 # threshold for clustering threshold = dict(start=0, step=0.2) #random_state = 42 random = np.random.RandomState() df_laut = pd.read_pickle("/scratch/jeffhanna/ATT_dat/behave/laut") df_ang = pd.read_pickle("/scratch/jeffhanna/ATT_dat/behave/ang") predictor_vars = ["Laut","Subj","Block","Wav"]
############################################################################### # Finally, we want to compare the overall activity levels in each condition, # the diff is taken along the last axis (condition). The negative sign makes # it so condition1 > condition2 shows up as "red blobs" (instead of blue). X = np.abs(X) # only magnitude X = X[:, :, :, 0] - X[:, :, :, 1] # make paired contrast ############################################################################### # Compute statistic # ----------------- # # To use an algorithm optimized for spatio-temporal clustering, we # just pass the spatial connectivity matrix (instead of spatio-temporal) print('Computing connectivity.') connectivity = mne.spatial_src_connectivity(src) # Note that X needs to be a multi-dimensional array of shape # samples (subjects) x time x space, so we permute dimensions X = np.transpose(X, [2, 1, 0]) # Now let's actually do the clustering. This can take a long time... # Here we set the threshold quite high to reduce computation. p_threshold = 0.001 t_threshold = -stats.distributions.t.ppf(p_threshold / 2., n_subjects - 1) print('Clustering.') T_obs, clusters, cluster_p_values, H0 = clu = \ spatio_temporal_cluster_1samp_test(X, connectivity=connectivity, n_jobs=1, threshold=t_threshold, buffer_size=None) # Now select the clusters that are sig. at p < 0.05 (note that this value # is multiple-comparisons corrected).
X1[:, :, :] += stc.data[:, :, np.newaxis] # make the activity bigger for the second set of subjects X2[:, :, :] += 3 * stc.data[:, :, np.newaxis] # We want to compare the overall activity levels for each subject X1 = np.abs(X1) # only magnitude X2 = np.abs(X2) # only magnitude ############################################################################### # Compute statistic # ----------------- # # To use an algorithm optimized for spatio-temporal clustering, we # just pass the spatial connectivity matrix (instead of spatio-temporal) print('Computing connectivity.') connectivity = spatial_src_connectivity(src) # Note that X needs to be a list of multi-dimensional array of shape # samples (subjects_k) x time x space, so we permute dimensions X1 = np.transpose(X1, [2, 1, 0]) X2 = np.transpose(X2, [2, 1, 0]) X = [X1, X2] # Now let's actually do the clustering. This can take a long time... # Here we set the threshold quite high to reduce computation. p_threshold = 0.0001 f_threshold = stats.distributions.f.ppf(1. - p_threshold / 2., n_subjects1 - 1, n_subjects2 - 1) print('Clustering.') T_obs, clusters, cluster_p_values, H0 = clu =\ spatio_temporal_cluster_test(X, connectivity=connectivity, n_jobs=1,
'mne_dSPM_inverse_morph_highpass-%sHz-faces_eq' % (l_freq, ))) faces.append(stc.magnitude().crop(None, 0.8).data.T) stc = mne.read_source_estimate( op.join( data_path, 'mne_dSPM_inverse_morph_highpass-%sHz-scrambled_eq' % (l_freq, ))) scrambled.append(stc.magnitude().crop(None, 0.8).data.T) tstep = stc.tstep ############################################################################### # Set up our contrast and initial p-value threshold X = np.array(faces, float) - np.array(scrambled, float) fsaverage_src = mne.read_source_spaces( op.join(subjects_dir, 'fsaverage', 'bem', 'fsaverage-5-src.fif')) connectivity = spatial_src_connectivity(fsaverage_src) # something like 0.01 is a more typical value here (or use TFCE!), but # for speed here we'll use 0.001 (fewer clusters to handle) p_threshold = 0.001 t_threshold = -stats.distributions.t.ppf(p_threshold / 2., len(X) - 1) ############################################################################### # Here we could do an exact test with ``n_permutations=2**(len(X)-1)``, # i.e. 32768 permutations, but this would take a long time. For speed and # simplicity we'll do 1024. stat_fun = partial(ttest_1samp_no_p, sigma=1e-3) T_obs, clusters, cluster_p_values, H0 = clu = \ spatio_temporal_cluster_1samp_test( X, connectivity=connectivity, n_jobs=N_JOBS, threshold=t_threshold, stat_fun=stat_fun, buffer_size=None, seed=0, step_down_p=0.05,
print(__doc__) #%% file paths save_dir = '/media/cbru/SMEDY/scripts_speech_rest/stats/mantel/' SUBJECTS_DIR = '/media/cbru/SMEDY/DATA/MRI_data/MRI_orig/' results_dir = '/media/cbru/SMEDY/results/mantel_correlations/2019_05_simple_model/' read_dir = '/media/cbru/SMEDY/DATA/correlations_mantel/2019_05_simple_model/' #%% compute connectivity src_fname = SUBJECTS_DIR + '/fsaverage/bem/fsaverage-ico-5-src.fif' src = mne.read_source_spaces(src_fname) print('Computing connectivity.') connectivity_sparse = spatial_src_connectivity(src) connectivity = connectivity_sparse.toarray() np.save(save_dir + 'connectivity', connectivity_sparse) #%% cluster correction # for each permutation: # 1. Compute the test statistic for each voxel individually. # 2. Threshold the test statistic values. # 3. Cluster voxels that exceed this threshold (with the same sign) based on adjacency. # 4. Retain the size of the largest cluster (measured, e.g., by a simple voxel count, # or by the sum of voxel t-values within the cluster) to build the null distribution. # define conditions modes = {'iq', 'read', 'mem', 'phon'}