Beispiel #1
0
def Ara_contr(evt_list, tmin, tmax, conf_type, stcs_path, n_subjects=14, template='fsaverage'):
    con_stcs = []
    for evt in evt_list[:2]:
        fn_stc_list1 = glob.glob(subjects_dir+'/fsaverage/dSPM_ROIs/*[0-9]/*fibp1-45,evtW_%s_bc-lh.stc' %evt)
        for fn_stc1 in fn_stc_list1[:n_subjects]:
            stc1 = mne.read_source_estimate(fn_stc1, subject=template)
            stc1.crop(tmin, tmax)
            con_stcs.append(stc1.data)
    cons = np.array(con_stcs).transpose(1,2,0) 
    
    #tmin = stc1.tmin 
    tstep = stc1.tstep 
    fsave_vertices = stc1.vertices
    del stc1
   
    incon_stcs = []
    for evt in evt_list[2:]:
        fn_stc_list2 = glob.glob(subjects_dir+'/fsaverage/dSPM_ROIs/*[0-9]/*fibp1-45,evtW_%s_bc-lh.stc' %evt)
        for fn_stc2 in fn_stc_list2[:n_subjects]:
            stc2 = mne.read_source_estimate(fn_stc2, subject=template)
            stc2.crop(tmin, tmax)
            incon_stcs.append(stc2.data)
    incons = np.array(incon_stcs).transpose(1,2,0)  
    del stc2
    X = [cons[:, :, :], incons[:, :, :]]
    #import pdb
    #pdb.set_trace()
    # save data matrix
    X = np.array(X).transpose(1,2,3,0)
    X = np.abs(X)  # only magnitude
    np.savez(stcs_path + '%s.npz' %conf_type, X=X, tstep=tstep, fsave_vertices=fsave_vertices)
    return tstep, fsave_vertices, X
def test_volume_stc():
    """Test volume STCs
    """
    N = 100
    data = np.arange(N)[:, np.newaxis]
    datas = [data, data, np.arange(2)[:, np.newaxis]]
    vertno = np.arange(N)
    vertnos = [vertno, vertno[:, np.newaxis], np.arange(2)[:, np.newaxis]]
    vertno_reads = [vertno, vertno, np.arange(2)]
    for data, vertno, vertno_read in zip(datas, vertnos, vertno_reads):
        stc = VolSourceEstimate(data, vertno, 0, 1)
        fname_temp = op.join(tempdir, 'temp-vl.stc')
        stc_new = stc
        for _ in xrange(2):
            stc_new.save(fname_temp)
            stc_new = read_source_estimate(fname_temp)
            assert_true(isinstance(stc_new, VolSourceEstimate))
            assert_array_equal(vertno_read, stc_new.vertno)
            assert_array_almost_equal(stc.data, stc_new.data)
    # now let's actually read a MNE-C processed file
    stc = read_source_estimate(fname_vol, 'sample')
    assert_true(isinstance(stc, VolSourceEstimate))

    assert_true('sample' in repr(stc))
    stc_new = stc
    assert_raises(ValueError, stc.save, fname_vol, ftype='whatever')
    for _ in xrange(2):
        fname_temp = op.join(tempdir, 'temp-vol.w')
        stc_new.save(fname_temp, ftype='w')
        stc_new = read_source_estimate(fname_temp)
        assert_true(isinstance(stc_new, VolSourceEstimate))
        assert_array_equal(stc.vertno, stc_new.vertno)
        assert_array_almost_equal(stc.data, stc_new.data)

    # save the stc as a nifti file and export
    try:
        import nibabel as nib
        src = read_source_spaces(fname_vsrc)
        vol_fname = op.join(tempdir, 'stc.nii.gz')
        stc.save_as_volume(vol_fname, src,
                           dest='surf', mri_resolution=False)
        img = nib.load(vol_fname)
        assert_true(img.shape == src[0]['shape'] + (len(stc.times),))

        t1_img = nib.load(fname_t1)
        stc.save_as_volume(op.join(tempdir, 'stc.nii.gz'), src,
                           dest='mri', mri_resolution=True)
        img = nib.load(vol_fname)
        assert_true(img.shape == t1_img.shape + (len(stc.times),))
        assert_array_almost_equal(img.get_affine(), t1_img.get_affine(),
                                  decimal=5)

        # export without saving
        img = stc.as_volume(src, dest='mri', mri_resolution=True)
        assert_true(img.shape == t1_img.shape + (len(stc.times),))
        assert_array_almost_equal(img.get_affine(), t1_img.get_affine(),
                                  decimal=5)

    except ImportError:
        print 'Save as nifti test skipped, needs NiBabel'
def test_stc_methods():
    """Test stc methods lh_data, rh_data, bin(), center_of_mass(), resample()
    """
    fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg')
    stc = read_source_estimate(fname)

    # lh_data / rh_data
    assert_array_equal(stc.lh_data, stc.data[:len(stc.lh_vertno)])
    assert_array_equal(stc.rh_data, stc.data[len(stc.lh_vertno):])

    # bin
    bin = stc.bin(.12)
    a = np.array((1,), dtype=stc.data.dtype)
    a[0] = np.mean(stc.data[0, stc.times < .12])
    assert a[0] == bin.data[0, 0]

    assert_raises(ValueError, stc.center_of_mass, 'sample')
    stc.lh_data[:] = 0
    vertex, hemi, t = stc.center_of_mass('sample')
    assert_true(hemi == 1)
    # XXX Should design a fool-proof test case, but here were the results:
    assert_true(vertex == 92717)
    assert_true(np.round(t, 3) == 0.123)

    stc = read_source_estimate(fname)
    stc_new = deepcopy(stc)
    o_sfreq = 1.0 / stc.tstep
    # note that using no padding for this STC actually reduces edge ringing...
    stc_new.resample(2 * o_sfreq, npad=0, n_jobs=2)
    assert_true(stc_new.data.shape[1] == 2 * stc.data.shape[1])
    assert_true(stc_new.tstep == stc.tstep / 2)
    stc_new.resample(o_sfreq, npad=0)
    assert_true(stc_new.data.shape[1] == stc.data.shape[1])
    assert_true(stc_new.tstep == stc.tstep)
    assert_array_almost_equal(stc_new.data, stc.data, 5)
def test_volume_stc():
    """Test reading and writing volume STCs
    """
    N = 100
    data = np.arange(N)[:, np.newaxis]
    datas = [data, data, np.arange(2)[:, np.newaxis]]
    vertno = np.arange(N)
    vertnos = [vertno, vertno[:, np.newaxis], np.arange(2)[:, np.newaxis]]
    vertno_reads = [vertno, vertno, np.arange(2)]
    for data, vertno, vertno_read in zip(datas, vertnos, vertno_reads):
        stc = SourceEstimate(data, vertno, 0, 1)
        assert_true(stc.is_surface() is False)
        fname_temp = op.join(tempdir, 'temp-vl.stc')
        stc_new = stc
        for _ in xrange(2):
            stc_new.save(fname_temp)
            stc_new = read_source_estimate(fname_temp)
            assert_true(stc_new.is_surface() is False)
            assert_array_equal(vertno_read, stc_new.vertno)
            assert_array_almost_equal(stc.data, stc_new.data)
    # now let's actually read a MNE-C processed file
    stc = read_source_estimate(fname_vol, 'sample')
    assert_true('sample' in repr(stc))
    stc_new = stc
    assert_raises(ValueError, stc.save, fname_vol, ftype='whatever')
    for _ in xrange(2):
        fname_temp = op.join(tempdir, 'temp-vol.w')
        stc_new.save(fname_temp, ftype='w')
        stc_new = read_source_estimate(fname_temp)
        assert_true(stc_new.is_surface() is False)
        assert_array_equal(stc.vertno, stc_new.vertno)
        assert_array_almost_equal(stc.data, stc_new.data)
def test_volume_stc():
    """Test volume STCs."""
    tempdir = _TempDir()
    N = 100
    data = np.arange(N)[:, np.newaxis]
    datas = [data, data, np.arange(2)[:, np.newaxis]]
    vertno = np.arange(N)
    vertnos = [vertno, vertno[:, np.newaxis], np.arange(2)[:, np.newaxis]]
    vertno_reads = [vertno, vertno, np.arange(2)]
    for data, vertno, vertno_read in zip(datas, vertnos, vertno_reads):
        stc = VolSourceEstimate(data, vertno, 0, 1)
        fname_temp = op.join(tempdir, 'temp-vl.stc')
        stc_new = stc
        for _ in range(2):
            stc_new.save(fname_temp)
            stc_new = read_source_estimate(fname_temp)
            assert (isinstance(stc_new, VolSourceEstimate))
            assert_array_equal(vertno_read, stc_new.vertices)
            assert_array_almost_equal(stc.data, stc_new.data)

    # now let's actually read a MNE-C processed file
    stc = read_source_estimate(fname_vol, 'sample')
    assert (isinstance(stc, VolSourceEstimate))

    assert ('sample' in repr(stc))
    stc_new = stc
    pytest.raises(ValueError, stc.save, fname_vol, ftype='whatever')
    for ftype in ['w', 'h5']:
        for _ in range(2):
            fname_temp = op.join(tempdir, 'temp-vol.%s' % ftype)
            stc_new.save(fname_temp, ftype=ftype)
            stc_new = read_source_estimate(fname_temp)
            assert (isinstance(stc_new, VolSourceEstimate))
            assert_array_equal(stc.vertices, stc_new.vertices)
            assert_array_almost_equal(stc.data, stc_new.data)
Beispiel #6
0
def apply_norm(fn_stc, event, thr=95):        
    fn_list = get_files_from_list(fn_stc)
    for fname in fn_list:
        fn_path = os.path.split(fname)[0]
        stc = mne.read_source_estimate(fname)       
        name = os.path.basename(fname)
        subject = name.split('_')[0]
        fn_base = fn_path + '/%s_%s_baseline-lh.stc' %(subject,event)
        stc = mne.read_source_estimate(fname)
        stc_base = mne.read_source_estimate(fn_base)
        thre = np.percentile(stc_base.data, thr, axis=-1)
        data = stc.data
        cal_mean = data.mean(axis=-1)
        norm_data = (data.T / thre) - 1
        norm_data[norm_data < 0] = 0
        norm_data = norm_data.T
        norm_data[cal_mean == 0, :] = 0
        norm_mean = norm_data.mean(axis=-1)
        zc_data = norm_data.T/norm_data.max(axis=-1)
        zc_data = zc_data.T
        zc_data[norm_mean == 0, :] = 0
        #import pdb
        #pdb.set_trace()
        #print zc_data.min()
        stc.data.setfield(zc_data, np.float32)
        fn_nr = fname[:fname.rfind('-lh')] + '_norm_1'
        stc.save(fn_nr, ftype='stc') 
def load_data(stcs1_fname, stcs2_fname, dec):
    stcs1 = [mne.read_source_estimate(fname) for fname in stcs1_fname]
    stcs2 = [mne.read_source_estimate(fname) for fname in stcs2_fname]

	#This is just resampling in time, not space
    def resample_stc(stc, dec):
        """Resample stc inplace"""
        stc.data = stc.data[:,::dec].astype(np.float)
        stc.tstep *= dec
        stc.times = stc.times[::dec]
	
    if dec is not None:
        for stc in stcs1 + stcs2:
            resample_stc(stc, dec=dec)
            #stc.crop(.3,.5)
            stc.crop(0.1, None)  #cropping the time-window for faster runtime
   # print "Jane here"


    def average_stcs(stcs):
        mean_stc = copy.deepcopy(stcs[0])
        times = mean_stc.times
        n_sources, n_times = mean_stc.data.shape
        X = np.empty((len(stcs), n_sources, n_times))
        for i, stc in enumerate(stcs):
            if len(times) == len(stc.times):
                X[i] = stc.data
        mean_stc._data = np.mean(X, axis=0)
        return mean_stc, X
        print "Jane here"
	#X1, X2 are the full time,vertices,subject matrices; mean_stc1 and mean_stc2 are the grand-avgs
    mean_stc1, X1 = average_stcs(stcs1)
    mean_stc2, X2 = average_stcs(stcs2)
    return mean_stc1, X1, mean_stc2, X2
Beispiel #8
0
def test_sensitivity_maps():
    """Test sensitivity map computation"""
    fwd = mne.read_forward_solution(fwd_fname, surf_ori=True)
    proj_eog = read_proj(eog_fname)
    decim = 6
    for ch_type in ['eeg', 'grad', 'mag']:
        w = read_source_estimate(sensmap_fname % (ch_type, 'lh')).data
        stc = sensitivity_map(fwd, projs=None, ch_type=ch_type,
                              mode='free', exclude='bads')
        assert_array_almost_equal(stc.data, w, decim)
        assert_true(stc.subject == 'sample')
        # let's just make sure the others run
        if ch_type == 'grad':
            # fixed (2)
            w = read_source_estimate(sensmap_fname % (ch_type, '2-lh')).data
            stc = sensitivity_map(fwd, projs=None, mode='fixed',
                                  ch_type=ch_type, exclude='bads')
            assert_array_almost_equal(stc.data, w, decim)
        if ch_type == 'mag':
            # ratio (3)
            w = read_source_estimate(sensmap_fname % (ch_type, '3-lh')).data
            stc = sensitivity_map(fwd, projs=None, mode='ratio',
                                  ch_type=ch_type, exclude='bads')
            assert_array_almost_equal(stc.data, w, decim)
        if ch_type == 'eeg':
            # radiality (4), angle (5), remaining (6), and  dampening (7)
            modes = ['radiality', 'angle', 'remaining', 'dampening']
            ends = ['4-lh', '5-lh', '6-lh', '7-lh']
            for mode, end in zip(modes, ends):
                w = read_source_estimate(sensmap_fname % (ch_type, end)).data
                stc = sensitivity_map(fwd, projs=proj_eog, mode=mode,
                                      ch_type=ch_type, exclude='bads')
                assert_array_almost_equal(stc.data, w, decim)
Beispiel #9
0
def Ara_contr_base(evt_list, tmin, tmax, conf_type, stcs_path, n_subjects=14, template='fsaverage'):
    stcs = []
    bs_stcs = []
    for evt in evt_list:
        fn_stc_list1 = glob.glob(subjects_dir+'/fsaverage/dSPM_ROIs/*[0-9]/*fibp1-45,evtW_%s_bc-lh.stc' %evt)
        for fn_stc1 in fn_stc_list1[:n_subjects]:
            #fn_stc2 = fn_stc1.split(evt)[0] + evt[:2] +  fn_stc1.split(evt)[1]
            name = os.path.basename(fn_stc1)
            fn_path = os.path.split(fn_stc1)[0]
            subject = name.split('_')[0]
            fn_stc2 = fn_path + '/%s_%s_baseline-lh.stc' % (subject, evt[:2])
            stc1 = mne.read_source_estimate(fn_stc1, subject=template)
            stc1.crop(tmin, tmax)
            stcs.append(stc1.data)
            stc2 = mne.read_source_estimate(fn_stc2, subject=template)
            bs_stcs.append(stc2.data)
    stcs_ = np.array(stcs).transpose(1,2,0) 
    bsstcs = np.array(bs_stcs).transpose(1,2,0)
    #tmin = stc1.tmin 
    tstep = stc1.tstep 
    fsave_vertices = stc1.vertices
    del stc1, stc2
    X = [stcs_[:, :, :], bsstcs[:, :, :]]
    #import pdb
    #pdb.set_trace()
    # save data matrix
    X = np.array(X).transpose(1,2,3,0)
    X = np.abs(X)  # only magnitude
    np.savez(stcs_path + '%s.npz' %conf_type, X=X, tstep=tstep, fsave_vertices=fsave_vertices)
    return tstep, fsave_vertices, X  
def test_stc_methods():
    """Test stc methods lh_data, rh_data, bin, center_of_mass, resample"""
    stc = read_source_estimate(fname_stc)

    # lh_data / rh_data
    assert_array_equal(stc.lh_data, stc.data[:len(stc.lh_vertno)])
    assert_array_equal(stc.rh_data, stc.data[len(stc.lh_vertno):])

    # bin
    bin = stc.bin(.12)
    a = np.array((1,), dtype=stc.data.dtype)
    a[0] = np.mean(stc.data[0, stc.times < .12])
    assert a[0] == bin.data[0, 0]

    assert_raises(ValueError, stc.center_of_mass, 'sample')
    assert_raises(TypeError, stc.center_of_mass, 'sample',
                  subjects_dir=subjects_dir, surf=1)
    stc.lh_data[:] = 0
    vertex, hemi, t = stc.center_of_mass('sample', subjects_dir=subjects_dir)
    assert_true(hemi == 1)
    # XXX Should design a fool-proof test case, but here were the results:
    assert_equal(vertex, 124791)
    assert_equal(np.round(t, 2), 0.12)

    stc = read_source_estimate(fname_stc)
    stc.subject = 'sample'
    label_lh = read_labels_from_annot('sample', 'aparc', 'lh',
                                      subjects_dir=subjects_dir)[0]
    label_rh = read_labels_from_annot('sample', 'aparc', 'rh',
                                      subjects_dir=subjects_dir)[0]
    label_both = label_lh + label_rh
    for label in (label_lh, label_rh, label_both):
        assert_true(isinstance(stc.shape, tuple) and len(stc.shape) == 2)
        stc_label = stc.in_label(label)
        if label.hemi != 'both':
            if label.hemi == 'lh':
                verts = stc_label.vertices[0]
            else:  # label.hemi == 'rh':
                verts = stc_label.vertices[1]
            n_vertices_used = len(label.get_vertices_used(verts))
            assert_equal(len(stc_label.data), n_vertices_used)
    stc_lh = stc.in_label(label_lh)
    assert_raises(ValueError, stc_lh.in_label, label_rh)
    label_lh.subject = 'foo'
    assert_raises(RuntimeError, stc.in_label, label_lh)

    stc_new = deepcopy(stc)
    o_sfreq = 1.0 / stc.tstep
    # note that using no padding for this STC reduces edge ringing...
    stc_new.resample(2 * o_sfreq, npad=0, n_jobs=2)
    assert_true(stc_new.data.shape[1] == 2 * stc.data.shape[1])
    assert_true(stc_new.tstep == stc.tstep / 2)
    stc_new.resample(o_sfreq, npad=0)
    assert_true(stc_new.data.shape[1] == stc.data.shape[1])
    assert_true(stc_new.tstep == stc.tstep)
    assert_array_almost_equal(stc_new.data, stc.data, 5)
def test_stc_methods():
    """Test stc methods lh_data, rh_data, bin(), resample()."""
    stc_ = read_source_estimate(fname_stc)

    # Make a vector version of the above source estimate
    x = stc_.data[:, np.newaxis, :]
    yz = np.zeros((x.shape[0], 2, x.shape[2]))
    vec_stc_ = VectorSourceEstimate(
        np.concatenate((x, yz), 1),
        stc_.vertices, stc_.tmin, stc_.tstep, stc_.subject
    )

    for stc in [stc_, vec_stc_]:
        # lh_data / rh_data
        assert_array_equal(stc.lh_data, stc.data[:len(stc.lh_vertno)])
        assert_array_equal(stc.rh_data, stc.data[len(stc.lh_vertno):])

        # bin
        binned = stc.bin(.12)
        a = np.mean(stc.data[..., :np.searchsorted(stc.times, .12)], axis=-1)
        assert_array_equal(a, binned.data[..., 0])

        stc = read_source_estimate(fname_stc)
        stc.subject = 'sample'
        label_lh = read_labels_from_annot('sample', 'aparc', 'lh',
                                          subjects_dir=subjects_dir)[0]
        label_rh = read_labels_from_annot('sample', 'aparc', 'rh',
                                          subjects_dir=subjects_dir)[0]
        label_both = label_lh + label_rh
        for label in (label_lh, label_rh, label_both):
            assert (isinstance(stc.shape, tuple) and len(stc.shape) == 2)
            stc_label = stc.in_label(label)
            if label.hemi != 'both':
                if label.hemi == 'lh':
                    verts = stc_label.vertices[0]
                else:  # label.hemi == 'rh':
                    verts = stc_label.vertices[1]
                n_vertices_used = len(label.get_vertices_used(verts))
                assert_equal(len(stc_label.data), n_vertices_used)
        stc_lh = stc.in_label(label_lh)
        pytest.raises(ValueError, stc_lh.in_label, label_rh)
        label_lh.subject = 'foo'
        pytest.raises(RuntimeError, stc.in_label, label_lh)

        stc_new = deepcopy(stc)
        o_sfreq = 1.0 / stc.tstep
        # note that using no padding for this STC reduces edge ringing...
        stc_new.resample(2 * o_sfreq, npad=0, n_jobs=2)
        assert (stc_new.data.shape[1] == 2 * stc.data.shape[1])
        assert (stc_new.tstep == stc.tstep / 2)
        stc_new.resample(o_sfreq, npad=0)
        assert (stc_new.data.shape[1] == stc.data.shape[1])
        assert (stc_new.tstep == stc.tstep)
        assert_array_almost_equal(stc_new.data, stc.data, 5)
def test_morph_data():
    """Test morphing of data
    """
    subject_from = 'sample'
    subject_to = 'fsaverage'
    fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg')
    stc_from = read_source_estimate(fname)
    stc_from.crop(0.09, 0.1)  # for faster computation
    # After running this:
    #    stc_from.save('%s_audvis-meg-cropped' % subject_from)
    # this was run from a command line:
    #    mne_make_movie --stcin sample_audvis-meg-cropped-lh.stc
    #        --subject sample --morph fsaverage --smooth 12 --morphgrade 3
    #        --stc fsaverage_audvis-meg-cropped
    # XXX These files should eventually be moved to the sample dataset and
    # removed from mne/fiff/tests/data/
    fname = op.join(op.dirname(__file__), '..', 'fiff', 'tests', 'data',
                    'fsaverage_audvis-meg-cropped')
    stc_to = read_source_estimate(fname)
    stc_to1 = morph_data(subject_from, subject_to, stc_from,
                            grade=3, smooth=12, buffer_size=1000)
    stc_to1.save('%s_audvis-meg' % subject_to)
    stc_to2 = morph_data(subject_from, subject_to, stc_from,
                            grade=3, smooth=12, buffer_size=3)
    # indexing silliness here due to mne_make_movie's indexing oddities
    assert_array_almost_equal(stc_to.data, stc_to1.data[:, 0][:, None], 5)
    assert_array_almost_equal(stc_to1.data, stc_to2.data)
    # make sure precomputed morph matrices work
    vertices_to = grade_to_vertices(subject_to, grade=3)
    morph_mat = compute_morph_matrix(subject_from, subject_to,
                                     stc_from.vertno, vertices_to,
                                     smooth=12)
    stc_to3 = morph_data_precomputed(subject_from, subject_to,
                                     stc_from, vertices_to, morph_mat)
    assert_array_almost_equal(stc_to1.data, stc_to3.data)

    mean_from = stc_from.data.mean(axis=0)
    mean_to = stc_to1.data.mean(axis=0)
    assert_true(np.corrcoef(mean_to, mean_from).min() > 0.999)

    # test two types of morphing:
    # 1) make sure we can fill by morphing
    stc_to5 = morph_data(subject_from, subject_to, stc_from,
                            grade=None, smooth=12, buffer_size=3)
    assert_true(stc_to5.data.shape[0] == 163842 + 163842)

    # 2) make sure we can specify vertices
    vertices_to = [np.arange(10242), np.arange(10242)]
    stc_to3 = morph_data(subject_from, subject_to, stc_from,
                            grade=vertices_to, smooth=12, buffer_size=3)
    stc_to4 = morph_data(subject_from, subject_to, stc_from,
                            grade=5, smooth=12, buffer_size=3)
    assert_array_almost_equal(stc_to3.data, stc_to4.data)
def test_io_w():
    """Test IO for w files
    """
    stc = _fake_stc(n_time=1)
    w_fname = op.join(tempdir, 'fake')
    stc.save(w_fname, ftype='w')
    src = read_source_estimate(w_fname)
    src.save(op.join(tempdir, 'tmp'), ftype='w')
    src2 = read_source_estimate(op.join(tempdir, 'tmp-lh.w'))
    assert_array_almost_equal(src.data, src2.data)
    assert_array_almost_equal(src.lh_vertno, src2.lh_vertno)
    assert_array_almost_equal(src.rh_vertno, src2.rh_vertno)
Beispiel #14
0
def Ara_contr(evt_list, tmin, tmax, conf_type, out_path, n_subjects=14,
              template='fsaverage', subjects_dir=None):

    ''' Prepare arrays for the contrasts of conflicts perception
        and conflicts response.

        Parameter
        ---------
        evt_list: list
            The events list.
        tmin, tmax: float (s)
            The time period of data.
        conf_type: string
            The type of contrasts,'conf_per' or 'conf_res'
        out_path: string
            The path to store aranged arrays.
        n_subjects: int
            The amount subjects.
        subjects_dir: The total bath of all the subjects.
    '''
    con_stcs = []
    for evt in evt_list[:2]:
        fn_stc_list1 = glob.glob(subjects_dir + '/fsaverage/dSPM_ROIs/*[0-9]/*fibp1-45,evtW_%s_bc-lh.st-lh.stc' % evt)
        for fn_stc1 in fn_stc_list1[:n_subjects]:
            stc1 = mne.read_source_estimate(fn_stc1, subject=template)
            stc1.crop(tmin, tmax)
            con_stcs.append(stc1.data)
    cons = np.array(con_stcs).transpose(1, 2, 0)

    # tmin = stc1.tmin
    tstep = stc1.tstep
    fsave_vertices = stc1.vertices
    del stc1

    incon_stcs = []
    for evt in evt_list[2:]:
        fn_stc_list2 = glob.glob(subjects_dir + '/fsaverage/dSPM_ROIs/*[0-9]/*fibp1-45,evtW_%s_bc-lh.st-lh.stc' % evt)
        for fn_stc2 in fn_stc_list2[:n_subjects]:
            stc2 = mne.read_source_estimate(fn_stc2, subject=template)
            stc2.crop(tmin, tmax)
            incon_stcs.append(stc2.data)
    incons = np.array(incon_stcs).transpose(1, 2, 0)
    del stc2
    X = [cons[:, :, :], incons[:, :, :]]
    # import pdb
    # pdb.set_trace()
    # save data matrix
    X = np.array(X).transpose(1, 2, 3, 0)
    X = np.abs(X)  # only magnitude
    np.savez(out_path + '%s.npz' % conf_type, X=X, tstep=tstep,
             fsave_vertices=fsave_vertices)
    return tstep, fsave_vertices, X
def test_morph_data():
    """Test morphing of data
    """
    subject_from = 'sample'
    subject_to = 'fsaverage'
    fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg')
    stc_from = read_source_estimate(fname, subject='sample')
    fname = op.join(data_path, 'MEG', 'sample', 'fsaverage_audvis-meg')
    stc_to = read_source_estimate(fname)
    # make sure we can specify grade
    stc_from.crop(0.09, 0.1)  # for faster computation
    stc_to.crop(0.09, 0.1)  # for faster computation
    stc_to1 = stc_from.morph(subject_to, grade=3, smooth=12, buffer_size=1000,
                             subjects_dir=subjects_dir)
    stc_to1.save(op.join(tempdir, '%s_audvis-meg' % subject_to))
    # make sure we can specify vertices
    vertices_to = grade_to_vertices(subject_to, grade=3)
    stc_to2 = morph_data(subject_from, subject_to, stc_from,
                         grade=vertices_to, smooth=12, buffer_size=1000,
                         subjects_dir=subjects_dir)
    # make sure we can use different buffer_size
    stc_to3 = morph_data(subject_from, subject_to, stc_from,
                         grade=vertices_to, smooth=12, buffer_size=3,
                         subjects_dir=subjects_dir)

    assert_array_almost_equal(stc_to.data, stc_to1.data, 5)
    assert_array_almost_equal(stc_to1.data, stc_to2.data)
    assert_array_almost_equal(stc_to1.data, stc_to3.data)
    # make sure precomputed morph matrices work
    morph_mat = compute_morph_matrix(subject_from, subject_to,
                                     stc_from.vertno, vertices_to,
                                     smooth=12, subjects_dir=subjects_dir)
    stc_to3 = stc_from.morph_precomputed(subject_to, vertices_to, morph_mat)
    assert_array_almost_equal(stc_to1.data, stc_to3.data)

    mean_from = stc_from.data.mean(axis=0)
    mean_to = stc_to1.data.mean(axis=0)
    assert_true(np.corrcoef(mean_to, mean_from).min() > 0.999)

    # make sure we can fill by morphing
    stc_to5 = morph_data(subject_from, subject_to, stc_from, grade=None,
                         smooth=12, buffer_size=3, subjects_dir=subjects_dir)
    assert_true(stc_to5.data.shape[0] == 163842 + 163842)

    # test morphing to the same subject
    stc_to6 = stc_from.morph(subject_from, grade=stc_from.vertno, smooth=1,
                             subjects_dir=subjects_dir)
    mask = np.ones(stc_from.data.shape[0], dtype=np.bool)
    # XXX: there is a bug somewhere that causes a difference at 2 vertices..
    mask[6799] = False
    mask[6800] = False
    assert_array_almost_equal(stc_from.data[mask], stc_to6.data[mask], 5)
def test_io_stc():
    """Test IO for STC files
    """
    stc = read_source_estimate(fname)
    stc.save(op.join(tempdir, "tmp.stc"))
    stc2 = read_source_estimate(op.join(tempdir, "tmp.stc"))

    assert_array_almost_equal(stc.data, stc2.data)
    assert_array_almost_equal(stc.tmin, stc2.tmin)
    assert_true(len(stc.vertno) == len(stc2.vertno))
    for v1, v2 in zip(stc.vertno, stc2.vertno):
        assert_array_almost_equal(v1, v2)
    assert_array_almost_equal(stc.tstep, stc2.tstep)
Beispiel #17
0
def Ara_contr_base(evt_list, tmin, tmax, conf_type, out_path, n_subjects=14,
                   template='fsaverage', subjects_dir=None):

    ''' Prepare arrays for the data contrasts of prestimulus and post-stimulus.

        Parameter
        ---------
        evt_list: list
            The events list.
        tmin, tmax: float (s)
            The time period of data.
        conf_type: string
            The type of contrasts,'sti' or 'res'
        out_path: string
            The path to store aranged arrays.
        n_subjects: int
            The amount subjects.
        subjects_dir: The total bath of all the subjects.
    '''
    for evt in evt_list:
        stcs = []
        bs_stcs = []
        fn_stc_list1 = glob.glob(subjects_dir + '/fsaverage/dSPM_ROIs/*[0-9]/*fibp1-45,evtW_%s_bc-lh.st-lh.stc' % evt)
        for fn_stc1 in fn_stc_list1[:n_subjects]:
            # fn_stc2 = fn_stc1.split(evt)[0] + evt[:2] +  fn_stc1.split(evt)[1]
            name = os.path.basename(fn_stc1)
            fn_path = os.path.split(fn_stc1)[0]
            subject = name.split('_')[0]
            fn_stc2 = fn_path + '/%s_%s_baseline-lh.stc' % (subject, evt[:2])
            stc1 = mne.read_source_estimate(fn_stc1, subject=template)
            stc1.crop(tmin, tmax)
            stcs.append(stc1.data)
            stc2 = mne.read_source_estimate(fn_stc2, subject=template)
            bs_stcs.append(stc2.data)
        stcs = np.array(stcs).transpose(1, 2, 0)
        bs_stcs = np.array(bs_stcs).transpose(1, 2, 0)
        # tmin = stc1.tmin
        tstep = stc1.tstep
        fsave_vertices = stc1.vertices
        if stcs.shape[1] > bs_stcs.shape[1]:
            X = [stcs[:, :bs_stcs.shape[1], :], bs_stcs[:, :, :]]
        else:
            X = [stcs[:, :, :], bs_stcs[:, :stcs.shape[1], :]]
        del stcs, bs_stcs
        # save data matrix
        X = np.array(X).transpose(1, 2, 3, 0)
        X = np.abs(X)  # only magnitude
        np.savez(out_path + '%s_%s.npz' % (conf_type, evt), X=X, tstep=tstep,
                 fsave_vertices=fsave_vertices)
        del X
Beispiel #18
0
def test_surface_vector_source_morph():
    """Test surface and vector source estimate morph."""
    tempdir = _TempDir()

    inverse_operator_surf = read_inverse_operator(fname_inv_surf)

    stc_surf = read_source_estimate(fname_smorph, subject='sample')
    stc_surf.crop(0.09, 0.1)  # for faster computation

    stc_vec = _real_vec_stc()

    source_morph_surf = compute_source_morph(
        inverse_operator_surf['src'], subjects_dir=subjects_dir,
        smooth=1, warn=False)  # smooth 1 for speed
    assert source_morph_surf.subject_from == 'sample'
    assert source_morph_surf.subject_to == 'fsaverage'
    assert source_morph_surf.kind == 'surface'
    assert isinstance(source_morph_surf.src_data, dict)
    assert isinstance(source_morph_surf.src_data['vertices_from'], list)
    assert isinstance(source_morph_surf, SourceMorph)
    stc_surf_morphed = source_morph_surf.apply(stc_surf)
    assert isinstance(stc_surf_morphed, SourceEstimate)
    stc_vec_morphed = source_morph_surf.apply(stc_vec)
    with pytest.raises(ValueError, match='Only volume source estimates'):
        source_morph_surf.apply(stc_surf, output='nifti1')

    # check if correct class after morphing
    assert isinstance(stc_surf_morphed, SourceEstimate)
    assert isinstance(stc_vec_morphed, VectorSourceEstimate)

    # check __repr__
    assert 'surface' in repr(source_morph_surf)

    # check loading and saving for surf
    source_morph_surf.save(op.join(tempdir, '42.h5'))

    source_morph_surf_r = read_source_morph(op.join(tempdir, '42.h5'))

    assert (all([read == saved for read, saved in
                 zip(sorted(source_morph_surf_r.__dict__),
                     sorted(source_morph_surf.__dict__))]))

    # check wrong subject correction
    stc_surf.subject = None
    assert isinstance(source_morph_surf.apply(stc_surf), SourceEstimate)

    # degenerate
    stc_vol = read_source_estimate(fname_vol, 'sample')
    with pytest.raises(ValueError, match='stc_from was type'):
        source_morph_surf.apply(stc_vol)
def test_io_w():
    """Test IO for w files
    """
    w_fname = op.join(data_path, 'MEG', 'sample',
                      'sample_audvis-meg-oct-6-fwd-sensmap')

    src = read_source_estimate(w_fname)

    src.save(op.join(tempdir, 'tmp'), ftype='w')

    src2 = read_source_estimate(op.join(tempdir, 'tmp-lh.w'))

    assert_array_almost_equal(src.data, src2.data)
    assert_array_almost_equal(src.lh_vertno, src2.lh_vertno)
    assert_array_almost_equal(src.rh_vertno, src2.rh_vertno)
Beispiel #20
0
def test_sensitivity_maps():
    """Test sensitivity map computation."""
    fwd = mne.read_forward_solution(fwd_fname)
    fwd = mne.convert_forward_solution(fwd, surf_ori=True)
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        projs = read_proj(eog_fname)
        projs.extend(read_proj(ecg_fname))
    decim = 6
    for ch_type in ['eeg', 'grad', 'mag']:
        w = read_source_estimate(sensmap_fname % (ch_type, 'lh')).data
        stc = sensitivity_map(fwd, projs=None, ch_type=ch_type,
                              mode='free', exclude='bads')
        assert_array_almost_equal(stc.data, w, decim)
        assert_true(stc.subject == 'sample')
        # let's just make sure the others run
        if ch_type == 'grad':
            # fixed (2)
            w = read_source_estimate(sensmap_fname % (ch_type, '2-lh')).data
            stc = sensitivity_map(fwd, projs=None, mode='fixed',
                                  ch_type=ch_type, exclude='bads')
            assert_array_almost_equal(stc.data, w, decim)
        if ch_type == 'mag':
            # ratio (3)
            w = read_source_estimate(sensmap_fname % (ch_type, '3-lh')).data
            stc = sensitivity_map(fwd, projs=None, mode='ratio',
                                  ch_type=ch_type, exclude='bads')
            assert_array_almost_equal(stc.data, w, decim)
        if ch_type == 'eeg':
            # radiality (4), angle (5), remaining (6), and  dampening (7)
            modes = ['radiality', 'angle', 'remaining', 'dampening']
            ends = ['4-lh', '5-lh', '6-lh', '7-lh']
            for mode, end in zip(modes, ends):
                w = read_source_estimate(sensmap_fname % (ch_type, end)).data
                stc = sensitivity_map(fwd, projs=projs, mode=mode,
                                      ch_type=ch_type, exclude='bads')
                assert_array_almost_equal(stc.data, w, decim)

    # test corner case for EEG
    stc = sensitivity_map(fwd, projs=[make_eeg_average_ref_proj(fwd['info'])],
                          ch_type='eeg', exclude='bads')
    # test corner case for projs being passed but no valid ones (#3135)
    assert_raises(ValueError, sensitivity_map, fwd, projs=None, mode='angle')
    assert_raises(RuntimeError, sensitivity_map, fwd, projs=[], mode='angle')
    # test volume source space
    fname = op.join(sample_path, 'sample_audvis_trunc-meg-vol-7-fwd.fif')
    fwd = mne.read_forward_solution(fname)
    sensitivity_map(fwd)
def test_mixed_stc():
    """Test source estimate from mixed source space."""
    N = 90  # number of sources
    T = 2  # number of time points
    S = 3  # number of source spaces

    data = rng.randn(N, T)
    vertno = S * [np.arange(N // S)]

    # make sure error is raised if vertices are not a list of length >= 2
    pytest.raises(ValueError, MixedSourceEstimate, data=data,
                  vertices=[np.arange(N)])

    stc = MixedSourceEstimate(data, vertno, 0, 1)

    vol = read_source_spaces(fname_vsrc)

    # make sure error is raised for plotting surface with volume source
    pytest.raises(ValueError, stc.plot_surface, src=vol)

    tempdir = _TempDir()
    fname = op.join(tempdir, 'mixed-stc.h5')
    stc.save(fname)
    stc_out = read_source_estimate(fname)
    assert_array_equal(stc_out.vertices, vertno)
    assert_array_equal(stc_out.data, data)
    assert stc_out.tmin == 0
    assert stc_out.tstep == 1
    assert isinstance(stc_out, MixedSourceEstimate)
def upsample_stc_map(stc_file_prefix, subject_id, smoothing_steps=10,
                     subjects_dir=None):

    if subjects_dir is None:
        subjects_dir = os.environ["SUBJECTS_DIR"]

    stc = mne.read_source_estimate(stc_file_prefix)

    fs_sub_name = "Sub%02d" % subject_id

    lh_vert, lh_triag = freesurfer.load(path(subjects_dir) / fs_sub_name /
                                      "lh.orig")
    rh_vert, rh_triag = freesurfer.load(path(subjects_dir) / fs_sub_name /
                                      "rh.orig")

    lh_adj = mesh_edges(lh_triag)
    rh_adj = mesh_edges(rh_triag)

    lh_smoothing_mat = smoothing_matrix(stc.lh_vertno, lh_adj,
                                        smoothing_steps=smoothing_steps)
    rh_smoothing_mat = smoothing_matrix(stc.rh_vertno, rh_adj,
                                        smoothing_steps=smoothing_steps)

    lh_upsampled = lh_smoothing_mat.dot(stc.lh_data).ravel()
    rh_upsampled = rh_smoothing_mat.dot(stc.rh_data).ravel()

    lh_out_file = stc_file_prefix + "-upsampled-lh.mgz"
    rh_out_file = stc_file_prefix + "-upsampled-rh.mgz"

    freesurfer.save(nb.Nifti1Image(lh_upsampled[:, np.newaxis, np.newaxis],
                    affine=np.eye(4)), lh_out_file)

    freesurfer.save(nb.Nifti1Image(rh_upsampled[:, np.newaxis, np.newaxis],
                    affine=np.eye(4)), rh_out_file)
def test_stc_arithmetic():
    """Test arithmetic for STC files
    """
    fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg')
    stc = read_source_estimate(fname)
    data = stc.data.copy()

    out = list()
    for a in [data, stc]:
        a = a + a * 3 + 3 * a - a ** 2 / 2

        a += a
        a -= a
        with warnings.catch_warnings(record=True):
            warnings.simplefilter('always')
            a /= 2 * a
        a *= -a

        a += 2
        a -= 1
        a *= -1
        a /= 2
        a **= 3
        out.append(a)

    assert_array_equal(out[0], out[1].data)
    assert_array_equal(stc.sqrt().data, np.sqrt(stc.data))

    stc_mean = stc.mean()
    assert_array_equal(stc_mean.data, np.mean(stc.data, 1)[:, None])
Beispiel #24
0
def test_stc_to_label():
    """Test stc_to_label
    """
    src = read_source_spaces(src_fname)
    src_bad = read_source_spaces(src_bad_fname)
    stc = read_source_estimate(stc_fname, 'sample')
    os.environ['SUBJECTS_DIR'] = op.join(data_path, 'subjects')
    labels1 = stc_to_label(stc, src='sample', smooth=3)
    with warnings.catch_warnings(record=True) as w:  # connectedness warning
        warnings.simplefilter('always')
        labels2 = stc_to_label(stc, src=src, smooth=3)
    assert_true(len(w) == 1)
    assert_true(len(labels1) == len(labels2))
    for l1, l2 in zip(labels1, labels2):
        assert_labels_equal(l1, l2, decimal=4)

    with warnings.catch_warnings(record=True) as w:  # connectedness warning
        warnings.simplefilter('always')
        labels_lh, labels_rh = stc_to_label(stc, src=src, smooth=3,
                                            connected=True)
    assert_true(len(w) == 1)
    assert_raises(ValueError, stc_to_label, stc, 'sample', smooth=3,
                  connected=True)
    assert_raises(RuntimeError, stc_to_label, stc, src=src_bad, connected=True)
    assert_true(len(labels_lh) == 1)
    assert_true(len(labels_rh) == 1)
Beispiel #25
0
def apply_stcs(method='dSPM', event='LLst'):
    
    '''
       Normalize the individual STCs and average them across subjects.
        
       Parameters
       ----------
       method: string
          'dSPM' or 'MNE'.
       event: string
          the event name in the experimental conditions.
    '''
    import glob
    from scipy.signal import detrend
    from scipy.stats.mstats import zscore
    fn_list = glob.glob(subjects_dir+'/fsaverage/%s_ROIs/*/*,evtW_%s_bc-lh.stc' % (method, event))
    stcs = []
    for fname in fn_list:
        stc = mne.read_source_estimate(fname)
        #stc = stc.crop(tmin, tmax)
        cal_data = stc.data
        dt_data = detrend(cal_data, axis=-1)
        zc_data = zscore(dt_data, axis=-1)
        stc.data.setfield(zc_data, np.float32)
        stcs.append(stc)
    stcs = np.array(stcs)
    stc_avg = np.sum(stcs, axis=0)/stcs.shape[0]
    fn_avg = subjects_dir+'/fsaverage/%s_ROIs/%s' %(method,event)
    stc_avg.save(fn_avg, ftype='stc')
def test_stc_arithmetic():
    """Test arithmetic for STC files
    """
    fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg')
    stc = read_source_estimate(fname)
    data = stc.data.copy()

    out = list()
    for a in [data, stc]:
        a = a + a * 3 + 3 * a - a ** 2 / 2

        a += a
        a -= a
        a /= 2 * a
        a *= -a

        a += 2
        a -= 1
        a *= -1
        a /= 2
        a **= 3
        out.append(a)

    assert_array_equal(out[0], out[1].data)
    assert_array_equal(stc.sqrt().data, np.sqrt(stc.data))
def apply_rois(fn_stc, tmin, tmax, thr, min_subject='fsaverage'):
    #fn_avg = subjects_dir+'/fsaverage/%s_ROIs/%s-lh.stc' %(method,evt_st)
    stc_avg = mne.read_source_estimate(fn_stc)
    stc_avg = stc_avg.crop(tmin, tmax)
    src_pow = np.sum(stc_avg.data ** 2, axis=1)
    stc_avg.data[src_pow < np.percentile(src_pow, thr)] = 0.
    fn_src = subjects_dir+'/%s/bem/fsaverage-ico-5-src.fif' %min_subject
    src_inv = mne.read_source_spaces(fn_src)
    func_labels_lh, func_labels_rh = mne.stc_to_label(
                    stc_avg, src=src_inv, smooth=True,
                    subjects_dir=subjects_dir,
                    connected=True)
    # Left hemisphere definition
    i = 0
    labels_path = fn_stc[:fn_stc.rfind('-')] + '/ini'
    reset_directory(labels_path)
    while i < len(func_labels_lh):
        func_label = func_labels_lh[i]
        func_label.save(labels_path + '/ROI_%d' %(i))
        i = i + 1
    # right hemisphere definition
    j = 0
    while j < len(func_labels_rh):
        func_label = func_labels_rh[j]
        func_label.save(labels_path + '/ROI_%d' %(j))
        j = j + 1
Beispiel #28
0
def test_label_io_and_time_course_estimates():
    """Test IO for label + stc files."""
    stc = read_source_estimate(stc_fname)
    label = read_label(real_label_fname)
    stc_label = stc.in_label(label)

    assert (len(stc_label.times) == stc_label.data.shape[1])
    assert (len(stc_label.vertices[0]) == stc_label.data.shape[0])
Beispiel #29
0
def stc_ndvar(stc, subject, src, subjects_dir=None, name=None, check=True,
              parc='aparc'):
    """
    Convert one or more :class:`mne.SourceEstimate` objects to an :class:`NDVar`.

    Parameters
    ----------
    stc : SourceEstimate | list of SourceEstimates | str
        The source estimate object(s) or a path to an stc file.
    subject : str
        MRI subject (used for loading MRI in PySurfer plotting)
    src : str
        The kind of source space used (e.g., 'ico-4').
    subjects_dir : None | str
        The path to the subjects_dir (needed to locate the source space
        file).
    name : str | None
        Ndvar name.
    check : bool
        If multiple stcs are provided, check if all stcs have the same times
        and vertices.
    parc : None | str
        Parcellation to add to the source space.
    """
    subjects_dir = mne.utils.get_subjects_dir(subjects_dir)

    if isinstance(stc, basestring):
        stc = mne.read_source_estimate(stc)

    # construct data array
    if isinstance(stc, _BaseSourceEstimate):
        case = False
        x = stc.data
    else:
        case = True
        stcs = stc
        stc = stcs[0]
        if check:
            times = stc.times
            vertno = stc.vertno
            for stc_ in stcs[1:]:
                assert np.array_equal(stc_.times, times)
                assert np.array_equal(stc_.vertno, vertno)
        x = np.array([s.data for s in stcs])

    # Construct NDVar Dimensions
    time = UTS(stc.tmin, stc.tstep, stc.shape[1])
    if isinstance(stc, mne.VolSourceEstimate):
        ss = SourceSpace([stc.vertno], subject, src, subjects_dir, parc)
    else:
        ss = SourceSpace(stc.vertno, subject, src, subjects_dir, parc)

    if case:
        dims = ('case', ss, time)
    else:
        dims = (ss, time)

    return NDVar(x, dims, name=name)
Beispiel #30
0
def test_stc_to_label():
    """Test stc_to_label
    """
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        src = read_source_spaces(fwd_fname)
    src_bad = read_source_spaces(src_bad_fname)
    stc = read_source_estimate(stc_fname, 'sample')
    os.environ['SUBJECTS_DIR'] = op.join(data_path, 'subjects')
    labels1 = _stc_to_label(stc, src='sample', smooth=3)
    labels2 = _stc_to_label(stc, src=src, smooth=3)
    assert_equal(len(labels1), len(labels2))
    for l1, l2 in zip(labels1, labels2):
        assert_labels_equal(l1, l2, decimal=4)

    with warnings.catch_warnings(record=True) as w:  # connectedness warning
        warnings.simplefilter('always')
        labels_lh, labels_rh = stc_to_label(stc, src=src, smooth=True,
                                            connected=True)

    assert_true(len(w) > 0)
    assert_raises(ValueError, stc_to_label, stc, 'sample', smooth=True,
                  connected=True)
    assert_raises(RuntimeError, stc_to_label, stc, smooth=True, src=src_bad,
                  connected=True)
    assert_equal(len(labels_lh), 1)
    assert_equal(len(labels_rh), 1)

    # test getting tris
    tris = labels_lh[0].get_tris(src[0]['use_tris'], vertices=stc.vertices[0])
    assert_raises(ValueError, spatial_tris_connectivity, tris,
                  remap_vertices=False)
    connectivity = spatial_tris_connectivity(tris, remap_vertices=True)
    assert_true(connectivity.shape[0] == len(stc.vertices[0]))

    # "src" as a subject name
    assert_raises(TypeError, stc_to_label, stc, src=1, smooth=False,
                  connected=False, subjects_dir=subjects_dir)
    assert_raises(ValueError, stc_to_label, stc, src=SourceSpaces([src[0]]),
                  smooth=False, connected=False, subjects_dir=subjects_dir)
    assert_raises(ValueError, stc_to_label, stc, src='sample', smooth=False,
                  connected=True, subjects_dir=subjects_dir)
    assert_raises(ValueError, stc_to_label, stc, src='sample', smooth=True,
                  connected=False, subjects_dir=subjects_dir)
    labels_lh, labels_rh = stc_to_label(stc, src='sample', smooth=False,
                                        connected=False,
                                        subjects_dir=subjects_dir)
    assert_true(len(labels_lh) > 1)
    assert_true(len(labels_rh) > 1)

    # with smooth='patch'
    with warnings.catch_warnings(record=True) as w:  # connectedness warning
        warnings.simplefilter('always')
        labels_patch = stc_to_label(stc, src=src, smooth=True)
    assert_equal(len(w), 1)
    assert_equal(len(labels_patch), len(labels1))
    for l1, l2 in zip(labels1, labels2):
        assert_labels_equal(l1, l2, decimal=4)
Beispiel #31
0
def test_morph_data():
    """Test morphing of data
    """
    subject_from = 'sample'
    subject_to = 'fsaverage'
    fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg')
    stc_from = read_source_estimate(fname, subject='sample')
    fname = op.join(data_path, 'MEG', 'sample', 'fsaverage_audvis-meg')
    stc_to = read_source_estimate(fname)
    # make sure we can specify grade
    stc_from.crop(0.09, 0.1)  # for faster computation
    stc_to.crop(0.09, 0.1)  # for faster computation
    stc_to1 = stc_from.morph(subject_to,
                             grade=3,
                             smooth=12,
                             buffer_size=1000,
                             subjects_dir=subjects_dir)
    stc_to1.save(op.join(tempdir, '%s_audvis-meg' % subject_to))
    # make sure we can specify vertices
    vertices_to = grade_to_vertices(subject_to,
                                    grade=3,
                                    subjects_dir=subjects_dir)
    stc_to2 = morph_data(subject_from,
                         subject_to,
                         stc_from,
                         grade=vertices_to,
                         smooth=12,
                         buffer_size=1000,
                         subjects_dir=subjects_dir)
    # make sure we can use different buffer_size
    stc_to3 = morph_data(subject_from,
                         subject_to,
                         stc_from,
                         grade=vertices_to,
                         smooth=12,
                         buffer_size=3,
                         subjects_dir=subjects_dir)

    assert_array_almost_equal(stc_to.data, stc_to1.data, 5)
    assert_array_almost_equal(stc_to1.data, stc_to2.data)
    assert_array_almost_equal(stc_to1.data, stc_to3.data)
    # make sure precomputed morph matrices work
    morph_mat = compute_morph_matrix(subject_from,
                                     subject_to,
                                     stc_from.vertno,
                                     vertices_to,
                                     smooth=12,
                                     subjects_dir=subjects_dir)
    stc_to3 = stc_from.morph_precomputed(subject_to, vertices_to, morph_mat)
    assert_array_almost_equal(stc_to1.data, stc_to3.data)

    mean_from = stc_from.data.mean(axis=0)
    mean_to = stc_to1.data.mean(axis=0)
    assert_true(np.corrcoef(mean_to, mean_from).min() > 0.999)

    # make sure we can fill by morphing
    stc_to5 = morph_data(subject_from,
                         subject_to,
                         stc_from,
                         grade=None,
                         smooth=12,
                         buffer_size=3,
                         subjects_dir=subjects_dir)
    assert_true(stc_to5.data.shape[0] == 163842 + 163842)

    # test morphing to the same subject
    stc_to6 = stc_from.morph(subject_from,
                             grade=stc_from.vertno,
                             smooth=1,
                             subjects_dir=subjects_dir)
    mask = np.ones(stc_from.data.shape[0], dtype=np.bool)
    # XXX: there is a bug somewhere that causes a difference at 2 vertices..
    mask[6799] = False
    mask[6800] = False
    assert_array_almost_equal(stc_from.data[mask], stc_to6.data[mask], 5)

    # Morph sparse data
    # Make a sparse stc
    stc_from.vertno[0] = stc_from.vertno[0][[100, 500]]
    stc_from.vertno[1] = stc_from.vertno[1][[200]]
    stc_from._data = stc_from._data[:3]

    assert_raises(RuntimeError,
                  stc_from.morph,
                  subject_to,
                  sparse=True,
                  grade=5,
                  subjects_dir=subjects_dir)

    stc_to_sparse = stc_from.morph(subject_to,
                                   grade=None,
                                   sparse=True,
                                   subjects_dir=subjects_dir)
    assert_array_almost_equal(np.sort(stc_from.data.sum(axis=1)),
                              np.sort(stc_to_sparse.data.sum(axis=1)))
    assert_equal(len(stc_from.rh_vertno), len(stc_to_sparse.rh_vertno))
    assert_equal(len(stc_from.lh_vertno), len(stc_to_sparse.lh_vertno))
    assert_equal(stc_to_sparse.subject, subject_to)
    assert_equal(stc_from.tmin, stc_from.tmin)
    assert_equal(stc_from.tstep, stc_from.tstep)
Beispiel #32
0
def test_morph_stc_sparse():
    """Test morphing stc with sparse=True."""
    subject_from = 'sample'
    subject_to = 'fsaverage'
    # Morph sparse data
    # Make a sparse stc
    stc_from = read_source_estimate(fname_smorph, subject='sample')
    stc_from.vertices[0] = stc_from.vertices[0][[100, 500]]
    stc_from.vertices[1] = stc_from.vertices[1][[200]]
    stc_from._data = stc_from._data[:3]

    stc_to_sparse = compute_source_morph(
        stc_from,
        subject_from=subject_from,
        subject_to=subject_to,
        spacing=None,
        sparse=True,
        subjects_dir=subjects_dir).apply(stc_from)

    assert_allclose(np.sort(stc_from.data.sum(axis=1)),
                    np.sort(stc_to_sparse.data.sum(axis=1)))
    assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
    assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
    assert stc_to_sparse.subject == subject_to
    assert stc_from.tmin == stc_from.tmin
    assert stc_from.tstep == stc_from.tstep

    stc_from.vertices[0] = np.array([], dtype=np.int64)
    stc_from._data = stc_from._data[:1]

    stc_to_sparse = compute_source_morph(
        stc_from,
        subject_from,
        subject_to,
        spacing=None,
        sparse=True,
        subjects_dir=subjects_dir).apply(stc_from)

    assert_allclose(np.sort(stc_from.data.sum(axis=1)),
                    np.sort(stc_to_sparse.data.sum(axis=1)))
    assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
    assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
    assert stc_to_sparse.subject == subject_to
    assert stc_from.tmin == stc_from.tmin
    assert stc_from.tstep == stc_from.tstep

    # Degenerate cases
    with pytest.raises(ValueError, match='spacing must be set to None'):
        compute_source_morph(stc_from,
                             subject_from=subject_from,
                             subject_to=subject_to,
                             spacing=5,
                             sparse=True,
                             subjects_dir=subjects_dir)
    with pytest.raises(ValueError, match='xhemi=True can only be used with'):
        compute_source_morph(stc_from,
                             subject_from=subject_from,
                             subject_to=subject_to,
                             spacing=None,
                             sparse=True,
                             xhemi=True,
                             subjects_dir=subjects_dir)
# that's why I collected the stc.data.T of every subject into a X_freq_diff array to do the cluster stats on ...
# however, I only saved that X_....npy for the gamma_high freq band and for all the label analyses..
# good thing though is that I can re-order the subjects first here, then collect the X together, and then save the X in the ordered order :)

# get the behavioral data array ready & choose the variable
N_behav = pd.read_csv('{}NEMO_behav.csv'.format(proc_dir))
Behav = np.array(N_behav['Ton_Ang'])

for freq,vals in freqs.items():

    # prepare the data arrays / objects needed
    all_diff_plot = []  # list for averaging and plotting group STC
    X_diff = []  #  list for collecting data for cluster stat analyses
    for sub in subjs:
        # load the STC data
        stc_fsavg_diff = mne.read_source_estimate("{dir}nc_{sub}_stc_fsavg_diff_{freq}".format(dir=meg_dir,sub=sub,freq=freq), subject='fsaverage')
        # collect the individual stcs into lists
        all_diff_plot.append(stc_fsavg_diff)
        X_diff.append(stc_fsavg_diff.data.T)
    # create group average stc for plotting later
    stc_sum = all_diff_plot.pop()
    for stc in all_diff_plot:
        stc_sum = stc_sum + stc
    NEM_all_stc_diff = stc_sum / len(subjs)
    # make data array for cluster permutation stats N-P stc vals
    X_diff = np.array(X_diff).squeeze()
    # calculate Pearson's r for each vertex to Behavioral variable of the subject
    X_Rval = np.empty(X_diff.shape[1])
    X_R_Tval = np.empty(X_diff.shape[1])
    for vert_idx in range(X_diff.shape[1]):
        X_Rval[vert_idx], p = stats.pearsonr(X_diff[:,vert_idx],Behav)
Beispiel #34
0
def test_stc_to_label():
    """Test stc_to_label
    """
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        src = read_source_spaces(fwd_fname)
    src_bad = read_source_spaces(src_bad_fname)
    stc = read_source_estimate(stc_fname, 'sample')
    os.environ['SUBJECTS_DIR'] = op.join(data_path, 'subjects')
    labels1 = _stc_to_label(stc, src='sample', smooth=3)
    labels2 = _stc_to_label(stc, src=src, smooth=3)
    assert_equal(len(labels1), len(labels2))
    for l1, l2 in zip(labels1, labels2):
        assert_labels_equal(l1, l2, decimal=4)

    with warnings.catch_warnings(record=True) as w:  # connectedness warning
        warnings.simplefilter('always')
        labels_lh, labels_rh = stc_to_label(stc,
                                            src=src,
                                            smooth=True,
                                            connected=True)

    assert_true(len(w) > 0)
    assert_raises(ValueError,
                  stc_to_label,
                  stc,
                  'sample',
                  smooth=True,
                  connected=True)
    assert_raises(RuntimeError,
                  stc_to_label,
                  stc,
                  smooth=True,
                  src=src_bad,
                  connected=True)
    assert_equal(len(labels_lh), 1)
    assert_equal(len(labels_rh), 1)

    # test getting tris
    tris = labels_lh[0].get_tris(src[0]['use_tris'], vertices=stc.vertices[0])
    assert_raises(ValueError,
                  spatial_tris_connectivity,
                  tris,
                  remap_vertices=False)
    connectivity = spatial_tris_connectivity(tris, remap_vertices=True)
    assert_true(connectivity.shape[0] == len(stc.vertices[0]))

    # "src" as a subject name
    assert_raises(TypeError,
                  stc_to_label,
                  stc,
                  src=1,
                  smooth=False,
                  connected=False,
                  subjects_dir=subjects_dir)
    assert_raises(ValueError,
                  stc_to_label,
                  stc,
                  src=SourceSpaces([src[0]]),
                  smooth=False,
                  connected=False,
                  subjects_dir=subjects_dir)
    assert_raises(ValueError,
                  stc_to_label,
                  stc,
                  src='sample',
                  smooth=False,
                  connected=True,
                  subjects_dir=subjects_dir)
    assert_raises(ValueError,
                  stc_to_label,
                  stc,
                  src='sample',
                  smooth=True,
                  connected=False,
                  subjects_dir=subjects_dir)
    labels_lh, labels_rh = stc_to_label(stc,
                                        src='sample',
                                        smooth=False,
                                        connected=False,
                                        subjects_dir=subjects_dir)
    assert_true(len(labels_lh) > 1)
    assert_true(len(labels_rh) > 1)

    # with smooth='patch'
    with warnings.catch_warnings(record=True) as w:  # connectedness warning
        warnings.simplefilter('always')
        labels_patch = stc_to_label(stc, src=src, smooth=True)
    assert_equal(len(w), 1)
    assert_equal(len(labels_patch), len(labels1))
    for l1, l2 in zip(labels1, labels2):
        assert_labels_equal(l1, l2, decimal=4)
Beispiel #35
0
]

SUBJECTS = SUBJ_ASD + SUBJ_NT
PATHfrom = '/net/server/data/Archive/aut_gamma/orekhova/KI/'
myPATH = '/net/server/data/Archive/aut_gamma/orekhova/KI/Scripts_bkp/Shishkina/KI/'
subjects_dir = PATHfrom + 'freesurfersubjects'

#for the NT group
X_sum = []
for subject in SUBJ_NT:

    subjpath = PATHfrom + 'SUBJECTS/' + subject + '/ICA_nonotch_crop/epochs/'
    savepath = myPATH + 'Results_Alpha_and_Gamma/'

    #load stcs
    sum_csp = mne.read_source_estimate(savepath + '1_results/CSP_sum/' +
                                       subject + 'sum_CSP_1_6_diff_V3-V1_old')

    #Setting up SourceMorph for SourceEstimate
    morph = mne.read_source_morph(savepath + '1_results/morph_CSP/' + subject +
                                  'CSP-morph.h5')

    #Apply morph to SourceEstimate
    sum_csp_fsaverage = morph.apply(sum_csp)
    X_sum.append(sum_csp_fsaverage.data)

#average across freqs and subjects
X = np.asarray(X_sum)
X_avg_freq = np.mean(X, axis=2)
X_avg_group = np.mean(X_avg_freq, axis=0)
X_avg = X_avg_group[:, np.newaxis]
Beispiel #36
0
import mne
from mne.time_frequency import csd_morlet
from mne.beamformer import make_dics, apply_dics_csd
import numpy as np
from itertools import product
import pandas as pd

import config
from config import fname
from utils import make_dipole, evaluate_stc

# Read in the simulated data
stc_signal = mne.read_source_estimate(
    fname.stc_signal(noise=config.noise, vertex=config.vertex))
epochs = mne.read_epochs(
    fname.simulated_epochs(noise=config.noise, vertex=config.vertex))
fwd = mne.read_forward_solution(fname.fwd)

# For pick_ori='normal', the fwd needs to be in surface orientation
fwd = mne.convert_forward_solution(fwd, surf_ori=True)

# The DICS beamformer currently only uses one sensor type
epochs_grad = epochs.copy().pick_types(meg='grad')
epochs_mag = epochs.copy().pick_types(meg='mag')

# Make CSD matrix
csd = csd_morlet(epochs, [config.signal_freq])

# Compute the settings grid
regs = [0.05, 0.1, 0.5]
sensor_types = ['grad', 'mag']
Beispiel #37
0
def grouplevel_spatial_stats(ListSubj,condition,method,mod,twin,clust_p):
    
    wdir = "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG"
        
    # load a specific STC morphed on fsaverage to get shape info
    stc0_path = (wdir + '/' + ListSubj[0] + '/mne_python/STCS/IcaCorr_' + mod + 
               '_' + ListSubj[0] + '_' + condition[0] + '_pick_oriNone_' + 
                method + '_ico-5-fwd-fsaverage.fif-rh.stc')
    stc0      = mne.read_source_estimate(stc0_path)
    stc0.crop(-0.2,2.5)
    ncond     = len(condition)
    nsub      = len(ListSubj)
    ntimes    = len(stc0.times)    
    nvertices = stc0.data.shape[0]        
            
    # average individual STCs morphed on fsaverage for each cond
    AVG_STC_cond  = np.empty([nvertices, ntimes,  nsub, ncond])
    
    for s,subj in enumerate(ListSubj):            
        for c,cond in enumerate(condition):
            stc_path = (wdir + '/' + subj + '/mne_python/STCS/IcaCorr_' + mod +
                        '_' + subj + '_' + cond + '_pick_oriNone_' + 
                        method + '_ico-5-fwd-fsaverage.fif-rh.stc')
            stc = mne.read_source_estimate(stc_path) 
            stc.crop(-0.2,2.5)
            AVG_STC_cond[:,:,s,c] = stc.data

    # optional: restrict computation to temporal window of interest
    lower_bound = np.where(stc0.times >= twin[0])[0][0]
    upper_bound = np.where(stc0.times >= twin[1])[0][0]
    
    con = mne.spatial_tris_connectivity(grade_to_tris(5))
    # array of shapes (obs, time, vertices)
    X = []
    for c,cond in enumerate(condition):
        X.append(np.mean(np.transpose(AVG_STC_cond[:,lower_bound: upper_bound,:,c], [2, 1, 0]),1))
 
    effects = 'A'
    factor_levels = [3]
   
    # get f-values only.
    def mystat_fun(*args):
        return f_mway_rm(np.swapaxes(args, 1, 0), factor_levels=factor_levels,
                     effects=effects, return_pvals=False)[0]

    p_threshold = clust_p
    f_threshold = f_threshold_mway_rm(nsub,factor_levels = factor_levels, effects = effects,pvalue= p_threshold)
    
    F_obs, clu, clu_p_val, H0  = mne.stats.permutation_cluster_test(X, threshold=f_threshold,stat_fun=mystat_fun,
                                                                        connectivity=con, n_jobs=1,
                                                                    verbose=True, seed = 666)                                       
    
    wdir = "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/"
    save_path = (wdir+'GROUP/mne_python/Plot_STATS/' + "_vs_".join(condition))  
    
    if not os.path.exists(save_path):
        os.makedirs(save_path)  
    
    # save cluster stats
    spatial_clust_F = np.array((F_obs, clu, clu_p_val, H0))
    np.save((save_path+'/' + mod + '_' +'cluster_stats_f_'+ "_vs_".join(condition)),
            spatial_clust_F)        
    
    # save F-Map                                                
    tmp = F_obs
    tmp = tmp[:,np.newaxis]
    fsave_vertices = [np.arange(10242), np.arange(10242)]
    stc_Ftest = mne.SourceEstimate(tmp,fsave_vertices,0,stc.tstep) 
    stc_Ftest.save((save_path + '/fmap'  + mod + '_' + "_vs_".join(condition)))
    
    return F_obs, clu, clu_p_val, H0,stc0   
        else:
            inv = read_inverse_operator(fn_inv)

        evoked = epochs.average()

        # Compute inverse solution for evoked data
        fn_stc = fn_epo.rsplit('-epo.fif')[0] + ',ave'

        if not op.isfile(fn_stc + '-lh.stc'):
            stc = apply_inverse(evoked, inv, lambda2, method, pick_ori=None)
            stc.save(fn_stc)
            print('Saved...', fn_stc + '-lh.stc')

        if plot_stc:

            stc = mne.read_source_estimate(fn_stc + '-lh.stc')
            pos, t_peak = stc.get_peak(hemi=None, tmin=0., tmax=0.5,
                                       mode='abs')
            brain = stc.plot(subject=subj, surface='inflated', hemi='both',
                             colormap='auto', time_label='auto',
                             subjects_dir=subjects_dir, figure=None,
                             colorbar=True, clim='auto', initial_time=t_peak,
                             time_viewer=time_viewer)
            stc_plot_fname = op.join(basedir, 'plots', op.basename(fn_stc) + ',plot.png')
            time.sleep(1)

            if not time_viewer:
                # works only if time viewer is disabled
                brain.save_montage(stc_plot_fname, order=['lat', 'dor', 'med'])
                brain.close()
                time.sleep(1)
Beispiel #39
0
    # stc_SD_words_all = C.stc_SD_words_all
    # stc_LD_words_all = C.stc_LD_words_all
    # stc_SD_LD_words_all = C.stc_SD_LD_words_all

    for i in np.arange(0, len(subjects)):

        print('participant : ', i, C.signal_mode[n])
        subject_from = subjects[i]
        meg = subjects[i]


        fname_SD_words_fsaverage = C.data_path + meg + 'block_SD_words_'+\
                                   C.signal_mode[n]+'_fsaverage'
        fname_LD_words_fsaverage = C.data_path + meg + 'block_LD_words_'+\
                                   C.signal_mode[n]+'_fsaverage'
        stc_SD_words = mne.read_source_estimate(fname_SD_words_fsaverage)
        stc_LD_words = mne.read_source_estimate(fname_LD_words_fsaverage)
        if (i == 0):
            stc_SD_all_words = stc_SD_words
            stc_LD_all_words = stc_LD_words
        else:
            stc_SD_all_words = stc_SD_all_words + stc_SD_words
            stc_LD_all_words = stc_LD_all_words + stc_LD_words

# a=stc_SD_all_words.copy().crop(0.100,0.500).mean().data.squeeze()
# brain_SD = a.plot(surface='inflated', hemi='lh',subject = 'fsaverage',
#           subjects_dir=data_path,time_viewer =False,title=
#           'SD_words_all',colorbar=False, size=(500, 400))

stc_SD_all_words = stc_SD_all_words / len(subjects)
stc_LD_all_words = stc_LD_all_words / len(subjects)
                        loo_str = 'loo%s' % str(int(100 * loose))

                        if depth is None:
                            depth = 0
                        dep_str = 'dep%s' % str(int(100 * depth))

                    stctext1 = '%s_%s_%s' % (function, metric, method1)

                    stctext2 = '%s_%s_%s' % (function, metric, method2)

                    fname_stc1 = C.fname_STC(C, C.resolution_subdir, subject,
                                             stctext1)
                    fname_stc2 = C.fname_STC(C, C.resolution_subdir, subject,
                                             stctext2)

                    stc1 = mne.read_source_estimate(fname_stc1)
                    stc2 = mne.read_source_estimate(fname_stc2)

                    # Compute difference distributions for metrics
                    stc_diff = stc1 - stc2

                    contr_str = '%s-%s' % (method1, method2)
                    stctext3 = '%s_%s_%s' % (function, metric, contr_str)

                    fname_stc3 = C.fname_STC(C, C.resolution_subdir, subject,
                                             stctext3)

                    # save STC with difference distribution
                    print('Writing difference distribution to %s.' %
                          fname_stc3)
                    stc_diff.save(fname_stc3)
    lbl_cur_inds = get_label_active_indices(lbl_path + label)
    np.array(lbl_cur_inds)
    print(len(lbl_cur_inds))
    print(lbl_cur_inds)

    lbl_name = label.replace('-lh.label', '')

    for subject_idx, subject in enumerate(subjects):
        #print("\n({:2}/{:2}) reading {}\n".format(subject_idx+1, len(subjects), subject))

        for t, timing in enumerate(timings):
            #print("\n({:2}/{:2}) reading {}\n".format(t+1, len(timings), timing))

            for d, difference_type in enumerate(difference):
                masked_data = []

                stc = mne.read_source_estimate(
                    TFCE_data_path.format(subject, difference_type, timing))
                masked_data = np.zeros(stc.data.shape)
                masked_data[lbl_cur_inds, :] = stc.data[lbl_cur_inds, :]

                stc_mask = []
                stc_mask = mne.SourceEstimate(data=masked_data,
                                              vertices=stc.vertices,
                                              tmin=stc.tmin,
                                              tstep=stc.tstep)
                stc_mask.save(
                    target_files_format.format(subject, difference_type,
                                               timing, lbl_name))
Beispiel #42
0
def test_sensitivity_maps():
    """Test sensitivity map computation."""
    fwd = mne.read_forward_solution(fwd_fname, surf_ori=True)
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        projs = read_proj(eog_fname)
        projs.extend(read_proj(ecg_fname))
    decim = 6
    for ch_type in ['eeg', 'grad', 'mag']:
        w = read_source_estimate(sensmap_fname % (ch_type, 'lh')).data
        stc = sensitivity_map(fwd,
                              projs=None,
                              ch_type=ch_type,
                              mode='free',
                              exclude='bads')
        assert_array_almost_equal(stc.data, w, decim)
        assert_true(stc.subject == 'sample')
        # let's just make sure the others run
        if ch_type == 'grad':
            # fixed (2)
            w = read_source_estimate(sensmap_fname % (ch_type, '2-lh')).data
            stc = sensitivity_map(fwd,
                                  projs=None,
                                  mode='fixed',
                                  ch_type=ch_type,
                                  exclude='bads')
            assert_array_almost_equal(stc.data, w, decim)
        if ch_type == 'mag':
            # ratio (3)
            w = read_source_estimate(sensmap_fname % (ch_type, '3-lh')).data
            stc = sensitivity_map(fwd,
                                  projs=None,
                                  mode='ratio',
                                  ch_type=ch_type,
                                  exclude='bads')
            assert_array_almost_equal(stc.data, w, decim)
        if ch_type == 'eeg':
            # radiality (4), angle (5), remaining (6), and  dampening (7)
            modes = ['radiality', 'angle', 'remaining', 'dampening']
            ends = ['4-lh', '5-lh', '6-lh', '7-lh']
            for mode, end in zip(modes, ends):
                w = read_source_estimate(sensmap_fname % (ch_type, end)).data
                stc = sensitivity_map(fwd,
                                      projs=projs,
                                      mode=mode,
                                      ch_type=ch_type,
                                      exclude='bads')
                assert_array_almost_equal(stc.data, w, decim)

    # test corner case for EEG
    stc = sensitivity_map(fwd,
                          projs=[make_eeg_average_ref_proj(fwd['info'])],
                          ch_type='eeg',
                          exclude='bads')
    # test corner case for projs being passed but no valid ones (#3135)
    assert_raises(ValueError, sensitivity_map, fwd, projs=None, mode='angle')
    assert_raises(RuntimeError, sensitivity_map, fwd, projs=[], mode='angle')
    # test volume source space
    fname = op.join(sample_path, 'sample_audvis_trunc-meg-vol-7-fwd.fif')
    fwd = mne.read_forward_solution(fname)
    sensitivity_map(fwd)
Beispiel #43
0
def test_morph_stc_dense():
    """Test morphing stc."""
    subject_from = 'sample'
    subject_to = 'fsaverage'
    stc_from = read_source_estimate(fname_smorph, subject='sample')
    stc_to = read_source_estimate(fname_fmorph)
    # make sure we can specify grade
    stc_from.crop(0.09, 0.1)  # for faster computation
    stc_to.crop(0.09, 0.1)  # for faster computation
    assert_array_equal(stc_to.time_as_index([0.09, 0.1], use_rounding=True),
                       [0, len(stc_to.times) - 1])

    # After dep change this to:
    morph = compute_source_morph(subject_to=subject_to,
                                 spacing=3,
                                 smooth=12,
                                 src=stc_from,
                                 subjects_dir=subjects_dir,
                                 precompute=True)
    assert morph.vol_morph_mat is None  # a no-op for surface
    stc_to1 = morph.apply(stc_from)
    assert_allclose(stc_to.data, stc_to1.data, atol=1e-5)

    mean_from = stc_from.data.mean(axis=0)
    mean_to = stc_to1.data.mean(axis=0)
    assert np.corrcoef(mean_to, mean_from).min() > 0.999

    vertices_to = grade_to_vertices(subject_to,
                                    grade=3,
                                    subjects_dir=subjects_dir)

    # make sure we can fill by morphing
    with pytest.warns(RuntimeWarning, match='consider increasing'):
        morph = compute_source_morph(stc_from,
                                     subject_from,
                                     subject_to,
                                     spacing=None,
                                     smooth=1,
                                     subjects_dir=subjects_dir)
    stc_to5 = morph.apply(stc_from)
    assert stc_to5.data.shape[0] == 163842 + 163842

    # Morph vector data
    stc_vec = _real_vec_stc()
    stc_vec_to1 = compute_source_morph(stc_vec,
                                       subject_from,
                                       subject_to,
                                       subjects_dir=subjects_dir,
                                       spacing=vertices_to,
                                       smooth=1,
                                       warn=False).apply(stc_vec)
    assert stc_vec_to1.subject == subject_to
    assert stc_vec_to1.tmin == stc_vec.tmin
    assert stc_vec_to1.tstep == stc_vec.tstep
    assert len(stc_vec_to1.lh_vertno) == 642
    assert len(stc_vec_to1.rh_vertno) == 642

    # Degenerate conditions

    # Morphing to a density that is too high should raise an informative error
    # (here we need to push to grade=6, but for some subjects even grade=5
    # will break)
    with pytest.raises(ValueError, match='Cannot use icosahedral grade 6 '):
        compute_source_morph(stc_to1,
                             subject_from=subject_to,
                             subject_to=subject_from,
                             spacing=6,
                             subjects_dir=subjects_dir)
    del stc_to1

    with pytest.raises(ValueError, match='smooth.* has to be at least 0'):
        compute_source_morph(stc_from,
                             subject_from,
                             subject_to,
                             spacing=5,
                             smooth=-1,
                             subjects_dir=subjects_dir)

    # subject from mismatch
    with pytest.raises(ValueError, match="subject_from does not match"):
        compute_source_morph(stc_from,
                             subject_from='foo',
                             subjects_dir=subjects_dir)

    # only one set of vertices
    with pytest.raises(ValueError, match="grade.*list must have two elements"):
        compute_source_morph(stc_from,
                             subject_from=subject_from,
                             spacing=[vertices_to[0]],
                             subjects_dir=subjects_dir)
Beispiel #44
0
print(__doc__)

###############################################################################
# Setup paths

sample_dir_raw = sample.data_path()
sample_dir = os.path.join(sample_dir_raw, 'MEG', 'sample')
subjects_dir = os.path.join(sample_dir_raw, 'subjects')

fname_stc = os.path.join(sample_dir, 'sample_audvis-meg')

###############################################################################
# Load example data

# Read stc from file
stc = mne.read_source_estimate(fname_stc, subject='sample')

###############################################################################
# Setting up SourceMorph for SourceEstimate
# -----------------------------------------
#
# In MNE surface source estimates represent the source space simply as
# lists of vertices (see
# :ref:`tut-source-estimate-class`).
# This list can either be obtained from
# :class:`mne.SourceSpaces` (src) or from the ``stc`` itself.
#
# Since the default ``spacing`` (resolution of surface mesh) is ``5`` and
# ``subject_to`` is set to 'fsaverage', :class:`mne.SourceMorph` will use
# default ico-5 ``fsaverage`` vertices to morph, which are the special
# values ``[np.arange(10242)] * 2``.
def run_report(subject, session=None):
    bids_path = BIDSPath(subject=subject,
                         session=session,
                         task=config.get_task(),
                         acquisition=config.acq,
                         run=None,
                         recording=config.rec,
                         space=config.space,
                         extension='.fif',
                         datatype=config.get_datatype(),
                         root=config.deriv_root,
                         check=False)

    fname_ave = bids_path.copy().update(suffix='ave')
    fname_trans = bids_path.copy().update(suffix='trans')
    fname_epo = bids_path.copy().update(suffix='epo')
    fname_trans = bids_path.copy().update(suffix='trans')
    fname_ica = bids_path.copy().update(suffix='ica')
    fname_decoding = fname_epo.copy().update(suffix='decoding',
                                             extension='.mat')

    fs_subject = config.get_fs_subject(subject)
    fs_subjects_dir = config.get_fs_subjects_dir()

    params: Dict[str, Any] = dict(info_fname=fname_ave,
                                  raw_psd=True,
                                  subject=fs_subject)
    if op.exists(fname_trans):
        params['subjects_dir'] = fs_subjects_dir

    rep = mne.Report(**params)
    rep_kwargs: Dict[str, Any] = dict(data_path=fname_ave.fpath.parent,
                                      verbose=False)
    if not op.exists(fname_trans):
        rep_kwargs['render_bem'] = False

    task = config.get_task()
    if task is not None:
        rep_kwargs['pattern'] = f'*_task-{task}*'
    if mne.viz.get_3d_backend() is not None:
        with mne.viz.use_3d_backend('pyvista'):
            rep.parse_folder(**rep_kwargs)
    else:
        rep.parse_folder(**rep_kwargs)

    # Visualize automated noisy channel detection.
    if config.find_noisy_channels_meg:
        figs, captions = plot_auto_scores(subject=subject, session=session)
        rep.add_figs_to_section(figs=figs,
                                captions=captions,
                                section='Data Quality')

    # Visualize events.
    events_fig = plot_events(subject=subject, session=session)
    rep.add_figs_to_section(figs=events_fig,
                            captions='Events in filtered continuous data',
                            section='Events')

    ###########################################################################
    #
    # Visualize effect of ICA artifact rejection.
    #
    if config.use_ica:
        epochs = mne.read_epochs(fname_epo)
        ica = mne.preprocessing.read_ica(fname_ica)
        fig = ica.plot_overlay(epochs.average(), show=False)
        rep.add_figs_to_section(
            fig,
            captions=f'Evoked response (across all epochs) '
            f'before and after ICA '
            f'({len(ica.exclude)} ICs removed)',
            section='ICA')

    ###########################################################################
    #
    # Visualize evoked responses.
    #
    if isinstance(config.conditions, dict):
        conditions = list(config.conditions.keys())
    else:
        conditions = config.conditions.copy()

    conditions.extend(config.contrasts)
    evokeds = mne.read_evokeds(fname_ave)
    if config.analyze_channels:
        for evoked in evokeds:
            evoked.pick(config.analyze_channels)

    for condition, evoked in zip(conditions, evokeds):
        if condition in config.conditions:
            caption = f'Condition: {condition}'
            section = 'Evoked'
        else:  # It's a contrast of two conditions.
            caption = f'Contrast: {condition[0]} – {condition[1]}'
            section = 'Contrast'

        fig = evoked.plot(spatial_colors=True, gfp=True, show=False)
        rep.add_figs_to_section(figs=fig,
                                captions=caption,
                                comments=evoked.comment,
                                section=section)

    ###########################################################################
    #
    # Visualize decoding results.
    #
    if config.decode:
        epochs = mne.read_epochs(fname_epo)

        for contrast in config.contrasts:
            cond_1, cond_2 = contrast
            a_vs_b = f'{cond_1}+{cond_2}'.replace(op.sep, '')
            processing = f'{a_vs_b}+{config.decoding_metric}'
            processing = processing.replace('_', '-').replace('-', '')
            fname_decoding_ = (fname_decoding.copy().update(
                processing=processing))
            decoding_data = loadmat(fname_decoding_)
            del fname_decoding_, processing, a_vs_b

            fig = plot_decoding_scores(
                times=epochs.times,
                cross_val_scores=decoding_data['scores'],
                metric=config.decoding_metric)

            caption = f'Time-by-time Decoding: {cond_1} ./. {cond_2}'
            comment = (f'{len(epochs[cond_1])} × {cond_1} ./. '
                       f'{len(epochs[cond_2])} × {cond_2}')
            rep.add_figs_to_section(figs=fig,
                                    captions=caption,
                                    comments=comment,
                                    section='Decoding')
            del decoding_data, cond_1, cond_2, caption, comment

        del epochs

    ###########################################################################
    #
    # Visualize the coregistration & inverse solutions.
    #
    evokeds = mne.read_evokeds(fname_ave)

    if op.exists(fname_trans):
        # We can only plot the coregistration if we have a valid 3d backend.
        if mne.viz.get_3d_backend() is not None:
            fig = mne.viz.plot_alignment(evoked.info,
                                         fname_trans,
                                         subject=fs_subject,
                                         subjects_dir=fs_subjects_dir,
                                         meg=True,
                                         dig=True,
                                         eeg=True)
            rep.add_figs_to_section(figs=fig,
                                    captions='Coregistration',
                                    section='Coregistration')
        else:
            msg = ('Cannot render sensor alignment (coregistration) because '
                   'no usable 3d backend was found.')
            logger.warning(
                gen_log_message(message=msg,
                                step=99,
                                subject=subject,
                                session=session))

        for condition, evoked in zip(conditions, evokeds):
            msg = f'Rendering inverse solution for {evoked.comment} …'
            logger.info(
                gen_log_message(message=msg,
                                step=99,
                                subject=subject,
                                session=session))

            if condition in config.conditions:
                full_condition = config.sanitize_cond_name(evoked.comment)
                caption = f'Condition: {full_condition}'
                del full_condition
            else:  # It's a contrast of two conditions.
                # XXX Will change once we process contrasts here too
                continue

            method = config.inverse_method
            cond_str = config.sanitize_cond_name(condition)
            inverse_str = method
            hemi_str = 'hemi'  # MNE will auto-append '-lh' and '-rh'.

            fname_stc = bids_path.copy().update(
                suffix=f'{cond_str}+{inverse_str}+{hemi_str}', extension=None)

            if op.exists(str(fname_stc) + "-lh.stc"):
                stc = mne.read_source_estimate(fname_stc, subject=fs_subject)
                _, peak_time = stc.get_peak()

                # Plot using 3d backend if available, and use Matplotlib
                # otherwise.
                import matplotlib.pyplot as plt

                if mne.viz.get_3d_backend() is not None:
                    brain = stc.plot(views=['lat'],
                                     hemi='split',
                                     initial_time=peak_time,
                                     backend='pyvista',
                                     time_viewer=True,
                                     subjects_dir=fs_subjects_dir)
                    brain.toggle_interface()
                    brain._renderer.plotter.reset_camera()
                    brain._renderer.plotter.subplot(0, 0)
                    brain._renderer.plotter.reset_camera()
                    figs, ax = plt.subplots(figsize=(15, 10))
                    ax.imshow(brain.screenshot(time_viewer=True))
                    ax.axis('off')
                    comments = evoked.comment
                    captions = caption
                else:
                    fig_lh = plt.figure()
                    fig_rh = plt.figure()

                    brain_lh = stc.plot(views='lat',
                                        hemi='lh',
                                        initial_time=peak_time,
                                        backend='matplotlib',
                                        subjects_dir=fs_subjects_dir,
                                        figure=fig_lh)
                    brain_rh = stc.plot(views='lat',
                                        hemi='rh',
                                        initial_time=peak_time,
                                        subjects_dir=fs_subjects_dir,
                                        backend='matplotlib',
                                        figure=fig_rh)
                    figs = [brain_lh, brain_rh]
                    comments = [
                        f'{evoked.comment} - left hemisphere',
                        f'{evoked.comment} - right hemisphere'
                    ]
                    captions = [f'{caption} - left', f'{caption} - right']

                rep.add_figs_to_section(figs=figs,
                                        captions=captions,
                                        comments=comments,
                                        section='Sources')
                del peak_time

    if config.process_er:
        fig_er_psd = plot_er_psd(subject=subject, session=session)
        rep.add_figs_to_section(figs=fig_er_psd,
                                captions='Empty-Room Power Spectral Density '
                                '(after filtering)',
                                section='Empty-Room')

    fname_report = bids_path.copy().update(suffix='report', extension='.html')
    rep.save(fname=fname_report, open_browser=False, overwrite=True)
    import matplotlib.pyplot as plt  # nested import to help joblib
    plt.close('all')  # close all figures to save memory
Beispiel #46
0
sub = ["ATT_10"]
subjects_dir = "/home/jeff/freesurfer/subjects/"
proc_dir = "../proc/"
all_conds = [["audio", "rest"], ["visual", "rest"], ["audio", "visual"],
             ["visselten", "audio"], ["zaehlen", "rest"]]
all_conds = [["visselten", "audio"]]
threshold = 0.99
lower = 3e-27
upper = 3e-26
avg_clim = {"kind": "value", "lims": [lower, (upper - lower) / 2, upper]}
avg_clim = "auto"

src = mne.read_source_spaces(proc_dir + sub + "-src.fif")
filename = proc_dir + "stcs/" + sub
for conds in all_conds:
    stc_a = mne.read_source_estimate("{dir}stcs/nc_{a}_{b}_mean-lh.stc".format(
        dir=proc_dir, a=sub, b=conds[0]))
    stc_b = mne.read_source_estimate("{dir}stcs/nc_{a}_{b}_mean-lh.stc".format(
        dir=proc_dir, a=sub, b=conds[1]))
    stc_c = stc_a - stc_b
    favg = mlab.figure()
    stc_c.plot(sub_key[sub], hemi="both", figure=favg, clim=avg_clim)
    cnx = mne.spatial_src_connectivity(src)
    X = [
        np.load("{dir}stcs/nc_{a}_{b}_stc.npy".format(dir=proc_dir,
                                                      a=sub,
                                                      b=conds[0])),
        np.load("{dir}stcs/nc_{a}_{b}_stc.npy".format(dir=proc_dir,
                                                      a=sub,
                                                      b=conds[1]))
    ]
    X = [x.transpose(0, 2, 1) for x in X]
Beispiel #47
0
def test_brain(renderer):
    """Test initialization of the _Brain instance."""
    from mne.label import read_label
    hemi = 'lh'
    surf = 'inflated'
    cortex = 'low_contrast'
    title = 'test'
    size = (300, 300)

    with pytest.raises(ValueError, match='"size" parameter must be'):
        _Brain(subject_id=subject_id, hemi=hemi, surf=surf, size=[1, 2, 3])
    with pytest.raises(TypeError, match='figure'):
        _Brain(subject_id=subject_id, hemi=hemi, surf=surf, figure='foo')
    with pytest.raises(ValueError, match='interaction'):
        _Brain(subject_id=subject_id, hemi=hemi, surf=surf, interaction=0)
    with pytest.raises(KeyError):
        _Brain(subject_id=subject_id, hemi='foo', surf=surf)

    brain = _Brain(subject_id,
                   hemi=hemi,
                   surf=surf,
                   size=size,
                   subjects_dir=subjects_dir,
                   title=title,
                   cortex=cortex)
    # add_data
    stc = read_source_estimate(fname_stc)
    fmin = stc.data.min()
    fmax = stc.data.max()
    for h in brain._hemis:
        if h == 'lh':
            hi = 0
        else:
            hi = 1
        hemi_data = stc.data[:len(stc.vertices[hi]), 10]
        hemi_vertices = stc.vertices[hi]

        with pytest.raises(TypeError, match='scale_factor'):
            brain.add_data(hemi_data, hemi=h, scale_factor='foo')
        with pytest.raises(TypeError, match='vector_alpha'):
            brain.add_data(hemi_data, hemi=h, vector_alpha='foo')
        with pytest.raises(ValueError, match='thresh'):
            brain.add_data(hemi_data, hemi=h, thresh=-1)
        with pytest.raises(ValueError, match='remove_existing'):
            brain.add_data(hemi_data, hemi=h, remove_existing=-1)
        with pytest.raises(ValueError, match='time_label_size'):
            brain.add_data(hemi_data, hemi=h, time_label_size=-1)
        with pytest.raises(ValueError, match='is positive'):
            brain.add_data(hemi_data, hemi=h, smoothing_steps=-1)
        with pytest.raises(TypeError, match='int or NoneType'):
            brain.add_data(hemi_data, hemi=h, smoothing_steps='foo')
        with pytest.raises(ValueError):
            brain.add_data(array=np.array([0, 1, 2]), hemi=h)
        with pytest.raises(ValueError):
            brain.add_data(hemi_data,
                           fmin=fmin,
                           hemi=hemi,
                           fmax=fmax,
                           vertices=None)

        brain.add_data(hemi_data,
                       fmin=fmin,
                       hemi=h,
                       fmax=fmax,
                       colormap='hot',
                       vertices=hemi_vertices,
                       smoothing_steps='nearest',
                       colorbar=False,
                       time=None)
        brain.add_data(hemi_data,
                       fmin=fmin,
                       hemi=h,
                       fmax=fmax,
                       colormap='hot',
                       vertices=hemi_vertices,
                       smoothing_steps=1,
                       initial_time=0.,
                       colorbar=False,
                       time=None)

    # add label
    label = read_label(fname_label)
    brain.add_label(label, scalar_thresh=0.)
    brain.remove_labels()

    # add foci
    brain.add_foci([0], coords_as_verts=True, hemi=hemi, color='blue')

    # add text
    brain.add_text(x=0, y=0, text='foo')

    # screenshot
    brain.show_view(view=dict(azimuth=180., elevation=90.))
    img = brain.screenshot(mode='rgb')
    assert_allclose(img.shape, (size[0], size[1], 3),
                    atol=70)  # XXX undo once size is fixed

    # add annotation
    annots = ['aparc', 'PALS_B12_Lobes']
    borders = [True, 2]
    alphas = [1, 0.5]
    brain = _Brain(subject_id='fsaverage',
                   hemi=hemi,
                   size=size,
                   surf='inflated',
                   subjects_dir=subjects_dir)
    for a, b, p in zip(annots, borders, alphas):
        brain.add_annotation(a, b, p)

    brain.close()
Beispiel #48
0
def test_morph_data():
    """Test morphing of data
    """
    tempdir = _TempDir()
    subject_from = 'sample'
    subject_to = 'fsaverage'
    stc_from = read_source_estimate(fname_smorph, subject='sample')
    stc_to = read_source_estimate(fname_fmorph)
    # make sure we can specify grade
    stc_from.crop(0.09, 0.1)  # for faster computation
    stc_to.crop(0.09, 0.1)  # for faster computation
    assert_raises(ValueError,
                  stc_from.morph,
                  subject_to,
                  grade=3,
                  smooth=-1,
                  subjects_dir=subjects_dir)
    stc_to1 = stc_from.morph(subject_to,
                             grade=3,
                             smooth=12,
                             buffer_size=1000,
                             subjects_dir=subjects_dir)
    stc_to1.save(op.join(tempdir, '%s_audvis-meg' % subject_to))
    # make sure we can specify vertices
    vertices_to = grade_to_vertices(subject_to,
                                    grade=3,
                                    subjects_dir=subjects_dir)
    stc_to2 = morph_data(subject_from,
                         subject_to,
                         stc_from,
                         grade=vertices_to,
                         smooth=12,
                         buffer_size=1000,
                         subjects_dir=subjects_dir)
    # make sure we can use different buffer_size
    stc_to3 = morph_data(subject_from,
                         subject_to,
                         stc_from,
                         grade=vertices_to,
                         smooth=12,
                         buffer_size=3,
                         subjects_dir=subjects_dir)
    # make sure we get a warning about # of steps
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        morph_data(subject_from,
                   subject_to,
                   stc_from,
                   grade=vertices_to,
                   smooth=1,
                   buffer_size=3,
                   subjects_dir=subjects_dir)
    assert_equal(len(w), 2)

    assert_array_almost_equal(stc_to.data, stc_to1.data, 5)
    assert_array_almost_equal(stc_to1.data, stc_to2.data)
    assert_array_almost_equal(stc_to1.data, stc_to3.data)
    # make sure precomputed morph matrices work
    morph_mat = compute_morph_matrix(subject_from,
                                     subject_to,
                                     stc_from.vertices,
                                     vertices_to,
                                     smooth=12,
                                     subjects_dir=subjects_dir)
    stc_to3 = stc_from.morph_precomputed(subject_to, vertices_to, morph_mat)
    assert_array_almost_equal(stc_to1.data, stc_to3.data)
    assert_raises(ValueError, stc_from.morph_precomputed, subject_to,
                  vertices_to, 'foo')
    assert_raises(ValueError, stc_from.morph_precomputed, subject_to,
                  [vertices_to[0]], morph_mat)
    assert_raises(ValueError, stc_from.morph_precomputed, subject_to,
                  [vertices_to[0][:-1], vertices_to[1]], morph_mat)
    assert_raises(ValueError,
                  stc_from.morph_precomputed,
                  subject_to,
                  vertices_to,
                  morph_mat,
                  subject_from='foo')

    # steps warning
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        compute_morph_matrix(subject_from,
                             subject_to,
                             stc_from.vertices,
                             vertices_to,
                             smooth=1,
                             subjects_dir=subjects_dir)
    assert_equal(len(w), 2)

    mean_from = stc_from.data.mean(axis=0)
    mean_to = stc_to1.data.mean(axis=0)
    assert_true(np.corrcoef(mean_to, mean_from).min() > 0.999)

    # make sure we can fill by morphing
    stc_to5 = morph_data(subject_from,
                         subject_to,
                         stc_from,
                         grade=None,
                         smooth=12,
                         buffer_size=3,
                         subjects_dir=subjects_dir)
    assert_true(stc_to5.data.shape[0] == 163842 + 163842)

    # Morph sparse data
    # Make a sparse stc
    stc_from.vertices[0] = stc_from.vertices[0][[100, 500]]
    stc_from.vertices[1] = stc_from.vertices[1][[200]]
    stc_from._data = stc_from._data[:3]

    assert_raises(RuntimeError,
                  stc_from.morph,
                  subject_to,
                  sparse=True,
                  grade=5,
                  subjects_dir=subjects_dir)

    stc_to_sparse = stc_from.morph(subject_to,
                                   grade=None,
                                   sparse=True,
                                   subjects_dir=subjects_dir)
    assert_array_almost_equal(np.sort(stc_from.data.sum(axis=1)),
                              np.sort(stc_to_sparse.data.sum(axis=1)))
    assert_equal(len(stc_from.rh_vertno), len(stc_to_sparse.rh_vertno))
    assert_equal(len(stc_from.lh_vertno), len(stc_to_sparse.lh_vertno))
    assert_equal(stc_to_sparse.subject, subject_to)
    assert_equal(stc_from.tmin, stc_from.tmin)
    assert_equal(stc_from.tstep, stc_from.tstep)

    stc_from.vertices[0] = np.array([], dtype=np.int64)
    stc_from._data = stc_from._data[:1]

    stc_to_sparse = stc_from.morph(subject_to,
                                   grade=None,
                                   sparse=True,
                                   subjects_dir=subjects_dir)
    assert_array_almost_equal(np.sort(stc_from.data.sum(axis=1)),
                              np.sort(stc_to_sparse.data.sum(axis=1)))
    assert_equal(len(stc_from.rh_vertno), len(stc_to_sparse.rh_vertno))
    assert_equal(len(stc_from.lh_vertno), len(stc_to_sparse.lh_vertno))
    assert_equal(stc_to_sparse.subject, subject_to)
    assert_equal(stc_from.tmin, stc_from.tmin)
    assert_equal(stc_from.tstep, stc_from.tstep)
Beispiel #49
0
save_result_format = "/net/server/data/programs/razoral/platon_pmwords/target/data_for_TFCE/cos/{}_{}_{}_integ5"

### params

#time_lbls = ["144_362ms", "144_217ms", "226_362ms"]
#time_intervals = np.array([[145, 362], [145, 217], [226, 362]])

time_lbls = ["144_362ms"]
time_intervals = np.array([[145, 362]])

#time_lbls = ["144_217ms", "226_362ms"]
#time_intervals = np.array([[145, 217], [226, 362]])

### code

stc_test = mne.read_source_estimate(
    data_path.format(subjects[0], "passive1", words[0], integ_ms))

time_intervals_inds = calc_time_indices(
    time_intervals,
    data_path.format(subjects[0], "passive1", words[0], integ_ms), integ_ms)

for t, time_interval in enumerate(time_intervals):

    print('calulations performed in  ', time_lbls[t])

    sub_cos_sw = np.zeros((len(subjects), n_voxels))
    sub_cos_sd = np.zeros((len(subjects), n_voxels))

    for subject_idx, subject in enumerate(subjects):

        print("\n\t({:2}/{:2}) processing {}\n".format(subject_idx + 1,
Beispiel #50
0
def test_volume_stc():
    """Test volume STCs
    """
    tempdir = _TempDir()
    N = 100
    data = np.arange(N)[:, np.newaxis]
    datas = [data, data, np.arange(2)[:, np.newaxis]]
    vertno = np.arange(N)
    vertnos = [vertno, vertno[:, np.newaxis], np.arange(2)[:, np.newaxis]]
    vertno_reads = [vertno, vertno, np.arange(2)]
    for data, vertno, vertno_read in zip(datas, vertnos, vertno_reads):
        stc = VolSourceEstimate(data, vertno, 0, 1)
        fname_temp = op.join(tempdir, 'temp-vl.stc')
        stc_new = stc
        for _ in range(2):
            stc_new.save(fname_temp)
            stc_new = read_source_estimate(fname_temp)
            assert_true(isinstance(stc_new, VolSourceEstimate))
            assert_array_equal(vertno_read, stc_new.vertices)
            assert_array_almost_equal(stc.data, stc_new.data)

    # now let's actually read a MNE-C processed file
    stc = read_source_estimate(fname_vol, 'sample')
    assert_true(isinstance(stc, VolSourceEstimate))

    assert_true('sample' in repr(stc))
    stc_new = stc
    assert_raises(ValueError, stc.save, fname_vol, ftype='whatever')
    for _ in range(2):
        fname_temp = op.join(tempdir, 'temp-vol.w')
        stc_new.save(fname_temp, ftype='w')
        stc_new = read_source_estimate(fname_temp)
        assert_true(isinstance(stc_new, VolSourceEstimate))
        assert_array_equal(stc.vertices, stc_new.vertices)
        assert_array_almost_equal(stc.data, stc_new.data)

    # save the stc as a nifti file and export
    try:
        import nibabel as nib
        with warnings.catch_warnings(record=True):
            warnings.simplefilter('always')
            src = read_source_spaces(fname_vsrc)
        vol_fname = op.join(tempdir, 'stc.nii.gz')
        stc.save_as_volume(vol_fname, src, dest='surf', mri_resolution=False)
        with warnings.catch_warnings(record=True):  # nib<->numpy
            img = nib.load(vol_fname)
        assert_true(img.shape == src[0]['shape'] + (len(stc.times), ))

        with warnings.catch_warnings(record=True):  # nib<->numpy
            t1_img = nib.load(fname_t1)
        stc.save_as_volume(op.join(tempdir, 'stc.nii.gz'),
                           src,
                           dest='mri',
                           mri_resolution=True)
        with warnings.catch_warnings(record=True):  # nib<->numpy
            img = nib.load(vol_fname)
        assert_true(img.shape == t1_img.shape + (len(stc.times), ))
        assert_array_almost_equal(img.get_affine(),
                                  t1_img.get_affine(),
                                  decimal=5)

        # export without saving
        img = stc.as_volume(src, dest='mri', mri_resolution=True)
        assert_true(img.shape == t1_img.shape + (len(stc.times), ))
        assert_array_almost_equal(img.get_affine(),
                                  t1_img.get_affine(),
                                  decimal=5)

    except ImportError:
        print('Save as nifti test skipped, needs NiBabel')
Beispiel #51
0
conditions = ["pos_ton"]
freqs = {"beta_high":list(np.arange(26,35)),"gamma":(np.arange(35,56)),"gamma_high":(np.arange(65,96))}

for cond in conditions:

    for freq,vals in freqs.items():

        print("Running analyses for {}, band '{}'\n".format(cond,freq))
        # list for collecting stcs for group average for plotting
        all_diff = []
        # list for data arrays for permutation t-test on source
        X_diff = []

        for meg,mri in sub_dict.items():
            # load and prepare the STC data
            stc_fsavg_diff = mne.read_source_estimate("{d}nc_{s}_stc_fs_{c}_diff_F_{f}".format(d=meg_dir,s=meg,c=cond,f=freq), subject='fsaverage')  ## works without file ending like this (loads both lh and rh)
            # collect the individual stcs into lists for averaging later
            all_diff.append(stc_fsavg_diff)
            X_diff.append(stc_fsavg_diff.data.T)

        # create STC average over all subjects for plotting
        stc_sum = all_diff.pop()
        for stc in all_diff:
            stc_sum = stc_sum + stc
        NEM_all_stc_diff = stc_sum / len(sub_dict)
        # plot difference N or P vs. Ton on fsaverage
        NEM_all_stc_diff.plot(subjects_dir=mri_dir,subject='fsaverage',surface='white',hemi='both',time_viewer=True)

        # now do cluster permutation analysis
        X_diff = np.array(X_diff)
        t_obs, clusters, cluster_pv, H0 = clu = mne.stats.spatio_temporal_cluster_1samp_test(X_diff, n_permutations=1024, threshold = threshold, tail=0, connectivity=connectivity, n_jobs=4, step_down_p=0.05, t_power=1, out_type='indices')
Beispiel #52
0
def test_label_center_of_mass():
    """Test computing the center of mass of a label."""
    stc = read_source_estimate(stc_fname)
    stc.lh_data[:] = 0
    vertex_stc = stc.center_of_mass('sample', subjects_dir=subjects_dir)[0]
    assert_equal(vertex_stc, 124791)
    label = Label(stc.vertices[1],
                  pos=None,
                  values=stc.rh_data.mean(axis=1),
                  hemi='rh',
                  subject='sample')
    vertex_label = label.center_of_mass(subjects_dir=subjects_dir)
    assert_equal(vertex_label, vertex_stc)

    labels = read_labels_from_annot('sample',
                                    parc='aparc.a2009s',
                                    subjects_dir=subjects_dir)
    src = read_source_spaces(src_fname)
    # Try a couple of random ones, one from left and one from right
    # Visually verified in about the right place using mne_analyze
    for label, expected in zip([labels[2], labels[3], labels[-5]],
                               [141162, 145221, 55979]):
        label.values[:] = -1
        pytest.raises(ValueError,
                      label.center_of_mass,
                      subjects_dir=subjects_dir)
        label.values[:] = 0
        pytest.raises(ValueError,
                      label.center_of_mass,
                      subjects_dir=subjects_dir)
        label.values[:] = 1
        assert_equal(label.center_of_mass(subjects_dir=subjects_dir), expected)
        assert_equal(
            label.center_of_mass(subjects_dir=subjects_dir,
                                 restrict_vertices=label.vertices), expected)
        # restrict to source space
        idx = 0 if label.hemi == 'lh' else 1
        # this simple nearest version is not equivalent, but is probably
        # close enough for many labels (including the test ones):
        pos = label.pos[np.where(label.vertices == expected)[0][0]]
        pos = (src[idx]['rr'][src[idx]['vertno']] - pos)
        pos = np.argmin(np.sum(pos * pos, axis=1))
        src_expected = src[idx]['vertno'][pos]
        # see if we actually get the same one
        src_restrict = np.intersect1d(label.vertices, src[idx]['vertno'])
        assert_equal(
            label.center_of_mass(subjects_dir=subjects_dir,
                                 restrict_vertices=src_restrict), src_expected)
        assert_equal(
            label.center_of_mass(subjects_dir=subjects_dir,
                                 restrict_vertices=src), src_expected)
    # degenerate cases
    pytest.raises(ValueError,
                  label.center_of_mass,
                  subjects_dir=subjects_dir,
                  restrict_vertices='foo')
    pytest.raises(TypeError,
                  label.center_of_mass,
                  subjects_dir=subjects_dir,
                  surf=1)
    pytest.raises(IOError,
                  label.center_of_mass,
                  subjects_dir=subjects_dir,
                  surf='foo')
os.environ['ETS_TOOLKIT'] = 'qt4'
os.environ['QT_API'] = 'pyqt5'

fig_path = op.join(main_path, 'figures')
if not os.path.isdir(fig_path):
    os.mkdir(fig_path)
# PLot
stc_condition = list()
for cond in conditions:
    stcs = list()
    for subject_id in range(1, 20):
        if subject_id not in exclude_subjects:
            subject = "sub%03d" % subject_id
            out_path = op.join(data_path, subject, 'MEG')
            stc = mne.read_source_estimate(
                op.join(out_path, 'mne_dSPM_inverse_morph-%s' % (cond)))
            stcs.append(stc)

    data = np.average([np.abs(s.data) for s in stcs], axis=0)
    stc = mne.SourceEstimate(data, stcs[0].vertices, stcs[0].tmin,
                             stcs[0].tstep, 'fsaverage')
    del stcs
    stc_condition.append(stc)

data = stc_condition[0].data/np.max(stc_condition[0].data) + \
        stc_condition[2].data/np.max(stc_condition[2].data) - \
        stc_condition[1].data/np.max(stc_condition[1].data)
data = np.abs(data)
stc_contrast = mne.SourceEstimate(data, stc_condition[0].vertices,
                                  stc_condition[0].tmin,
                                  stc_condition[0].tstep, 'fsaverage')
Beispiel #54
0
def test_stc_to_label():
    """Test stc_to_label."""
    src = read_source_spaces(fwd_fname)
    src_bad = read_source_spaces(src_bad_fname)
    stc = read_source_estimate(stc_fname, 'sample')
    os.environ['SUBJECTS_DIR'] = op.join(data_path, 'subjects')
    labels1 = _stc_to_label(stc, src='sample', smooth=3)
    labels2 = _stc_to_label(stc, src=src, smooth=3)
    assert_equal(len(labels1), len(labels2))
    for l1, l2 in zip(labels1, labels2):
        assert_labels_equal(l1, l2, decimal=4)

    with pytest.warns(RuntimeWarning, match='have holes'):
        labels_lh, labels_rh = stc_to_label(stc,
                                            src=src,
                                            smooth=True,
                                            connected=True)

    pytest.raises(ValueError,
                  stc_to_label,
                  stc,
                  'sample',
                  smooth=True,
                  connected=True)
    pytest.raises(RuntimeError,
                  stc_to_label,
                  stc,
                  smooth=True,
                  src=src_bad,
                  connected=True)
    assert_equal(len(labels_lh), 1)
    assert_equal(len(labels_rh), 1)

    # test getting tris
    tris = labels_lh[0].get_tris(src[0]['use_tris'], vertices=stc.vertices[0])
    pytest.raises(ValueError,
                  spatial_tris_adjacency,
                  tris,
                  remap_vertices=False)
    adjacency = spatial_tris_adjacency(tris, remap_vertices=True)
    assert (adjacency.shape[0] == len(stc.vertices[0]))

    # "src" as a subject name
    pytest.raises(TypeError,
                  stc_to_label,
                  stc,
                  src=1,
                  smooth=False,
                  connected=False,
                  subjects_dir=subjects_dir)
    pytest.raises(ValueError,
                  stc_to_label,
                  stc,
                  src=SourceSpaces([src[0]]),
                  smooth=False,
                  connected=False,
                  subjects_dir=subjects_dir)
    pytest.raises(ValueError,
                  stc_to_label,
                  stc,
                  src='sample',
                  smooth=False,
                  connected=True,
                  subjects_dir=subjects_dir)
    pytest.raises(ValueError,
                  stc_to_label,
                  stc,
                  src='sample',
                  smooth=True,
                  connected=False,
                  subjects_dir=subjects_dir)
    labels_lh, labels_rh = stc_to_label(stc,
                                        src='sample',
                                        smooth=False,
                                        connected=False,
                                        subjects_dir=subjects_dir)
    assert (len(labels_lh) > 1)
    assert (len(labels_rh) > 1)

    # with smooth='patch'
    with pytest.warns(RuntimeWarning, match='have holes'):
        labels_patch = stc_to_label(stc, src=src, smooth=True)
    assert len(labels_patch) == len(labels1)
    for l1, l2 in zip(labels1, labels2):
        assert_labels_equal(l1, l2, decimal=4)
''' Generates correlations for a full matrix of all sources in fsaverage. Saves one matrix per subject/band so it's easy to access later. '''

import mne
import numpy as np

bands = [[1, 4], [4, 8], [8, 13], [13, 30], [30, 50]]
subjs_fname = '/Users/sudregp/data/meg/aligned_subjs.txt'
dir_out = '/mnt/neuro/MEG_data/correlations/'
data_dir = '/Users/sudregp/data/meg/'
nsources = 20484

fid = open(subjs_fname, 'r')
subjs = [line.rstrip() for line in fid]

iu = np.triu_indices(nsources, k=1)
# for each band, compute subject-based correlation map
for l_freq, h_freq in bands:
    subj_corrs = []
    print 'Band %d to %d Hz'%(l_freq, h_freq)
    for cnt, s in enumerate(subjs):
        print cnt+1, '/', len(subjs), ':', s
        fname = data_dir + 'morphed-lcmv-%dto%d-'%(l_freq,h_freq) + s
        stc = mne.read_source_estimate(fname)
        corr = np.float16(np.corrcoef(stc.data))[iu]
        np.save(dir_out + 'all2allCorr-%dto%d-'%(l_freq,h_freq) + s, corr)
Beispiel #56
0
def test_xhemi_morph():
    """Test cross-hemisphere morphing."""
    stc = read_source_estimate(fname_stc, subject='sample')
    # smooth 1 for speed where possible
    smooth = 4
    spacing = 4
    n_grade_verts = 2562
    stc = compute_source_morph(stc,
                               'sample',
                               'fsaverage_sym',
                               smooth=smooth,
                               warn=False,
                               spacing=spacing,
                               subjects_dir=subjects_dir).apply(stc)
    morph = compute_source_morph(stc,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=1,
                                 xhemi=True,
                                 warn=False,
                                 spacing=[stc.vertices[0], []],
                                 subjects_dir=subjects_dir)
    stc_xhemi = morph.apply(stc)
    assert stc_xhemi.data.shape[0] == n_grade_verts
    assert stc_xhemi.rh_data.shape[0] == 0
    assert len(stc_xhemi.vertices[1]) == 0
    assert stc_xhemi.lh_data.shape[0] == n_grade_verts
    assert len(stc_xhemi.vertices[0]) == n_grade_verts
    # complete reversal mapping
    morph = compute_source_morph(stc,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=smooth,
                                 xhemi=True,
                                 warn=False,
                                 spacing=stc.vertices,
                                 subjects_dir=subjects_dir)
    mm = morph.morph_mat
    assert mm.shape == (n_grade_verts * 2, ) * 2
    assert mm.size > n_grade_verts * 2
    assert mm[:n_grade_verts, :n_grade_verts].size == 0  # L to L
    assert mm[n_grade_verts:, n_grade_verts:].size == 0  # R to L
    assert mm[n_grade_verts:, :n_grade_verts].size > n_grade_verts  # L to R
    assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts  # R to L
    # more complicated reversal mapping
    vertices_use = [stc.vertices[0], np.arange(10242)]
    n_src_verts = len(vertices_use[1])
    assert vertices_use[0].shape == (n_grade_verts, )
    assert vertices_use[1].shape == (n_src_verts, )
    # ensure it's sufficiently diffirent to manifest round-trip errors
    assert np.in1d(vertices_use[1], stc.vertices[1]).mean() < 0.3
    morph = compute_source_morph(stc,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=smooth,
                                 xhemi=True,
                                 warn=False,
                                 spacing=vertices_use,
                                 subjects_dir=subjects_dir)
    mm = morph.morph_mat
    assert mm.shape == (n_grade_verts + n_src_verts, n_grade_verts * 2)
    assert mm[:n_grade_verts, :n_grade_verts].size == 0
    assert mm[n_grade_verts:, n_grade_verts:].size == 0
    assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts
    assert mm[n_grade_verts:, :n_grade_verts].size > n_src_verts
    # morph forward then back
    stc_xhemi = morph.apply(stc)
    morph = compute_source_morph(stc_xhemi,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=smooth,
                                 xhemi=True,
                                 warn=False,
                                 spacing=stc.vertices,
                                 subjects_dir=subjects_dir)
    stc_return = morph.apply(stc_xhemi)
    for hi in range(2):
        assert_array_equal(stc_return.vertices[hi], stc.vertices[hi])
    correlation = np.corrcoef(stc.data.ravel(), stc_return.data.ravel())[0, 1]
    assert correlation > 0.9  # not great b/c of sparse grade + small smooth
Beispiel #57
0
import mne
from mayavi import mlab
import numpy as np
import matplotlib.pyplot as plt
plt.ion()

proc_dir = "/home/jeff/ATT_dat/proc/"
subjects_dir = "/home/jeff/hdd/jeff/freesurfer/subjects/"
subject = "fsaverage"
stc_file = "stc_f_60-89Hz_rest_audio_tfce_A-lh.stc"
initial_time = 0

stc = mne.read_source_estimate("{proc}{file}".format(proc=proc_dir,
                                                     file=stc_file))
if stc.data.min() < 0:
    s_max = np.abs(stc.data).max()
    #s_max = 45
    s_mid = s_max / 2
    clim = {"kind": "value", "pos_lims": [0, s_mid, s_max]}
    colorbar = True
elif stc.data.max() == 1:
    clim = "auto"
    colorbar = False
else:
    s_max = stc.data.max()
    s_min = stc.data.min()
    s_mid = (s_max - s_min) / 2 + s_min
    clim = {"kind": "value", "lims": [s_min, s_mid, s_max]}
    colorbar = True

fig, axes = plt.subplots(2, 2)
Beispiel #58
0
def test_volume_source_morph_basic(tmpdir):
    """Test volume source estimate morph, special cases and exceptions."""
    import nibabel as nib
    inverse_operator_vol = read_inverse_operator(fname_inv_vol)
    stc_vol = read_source_estimate(fname_vol_w, 'sample')

    # check for invalid input type
    with pytest.raises(TypeError, match='src must be'):
        compute_source_morph(src=42)

    # check for raising an error if neither
    # inverse_operator_vol['src'][0]['subject_his_id'] nor subject_from is set,
    # but attempting to perform a volume morph
    src = inverse_operator_vol['src']
    assert src._subject is None  # already None on disk (old!)

    with pytest.raises(ValueError, match='subject_from could not be inferred'):
        with pytest.warns(RuntimeWarning, match='recommend regenerating'):
            compute_source_morph(src=src, subjects_dir=subjects_dir)

    # check infer subject_from from src[0]['subject_his_id']
    src[0]['subject_his_id'] = 'sample'

    with pytest.raises(ValueError, match='Inter-hemispheric morphing'):
        compute_source_morph(src=src, subjects_dir=subjects_dir, xhemi=True)

    with pytest.raises(ValueError, match='Only surface.*sparse morph'):
        compute_source_morph(src=src, sparse=True, subjects_dir=subjects_dir)

    # terrible quality but fast
    zooms = 20
    kwargs = dict(zooms=zooms, niter_sdr=(1, ), niter_affine=(1, ))
    source_morph_vol = compute_source_morph(subjects_dir=subjects_dir,
                                            src=fname_inv_vol,
                                            subject_from='sample',
                                            **kwargs)
    shape = (13, ) * 3  # for the given zooms

    assert source_morph_vol.subject_from == 'sample'

    # the brain used in sample data has shape (255, 255, 255)
    assert tuple(source_morph_vol.sdr_morph.domain_shape) == shape

    assert tuple(source_morph_vol.pre_affine.domain_shape) == shape

    # proofs the above
    assert_array_equal(source_morph_vol.zooms, (zooms, ) * 3)

    # assure proper src shape
    mri_size = (src[0]['mri_height'], src[0]['mri_depth'], src[0]['mri_width'])
    assert source_morph_vol.src_data['src_shape_full'] == mri_size

    fwd = read_forward_solution(fname_fwd_vol)
    fwd['src'][0]['subject_his_id'] = 'sample'  # avoid further warnings
    source_morph_vol = compute_source_morph(fwd['src'],
                                            'sample',
                                            'sample',
                                            subjects_dir=subjects_dir,
                                            **kwargs)

    # check wrong subject_to
    with pytest.raises(IOError, match='cannot read file'):
        compute_source_morph(fwd['src'],
                             'sample',
                             '42',
                             subjects_dir=subjects_dir)

    # two different ways of saving
    source_morph_vol.save(tmpdir.join('vol'))

    # check loading
    source_morph_vol_r = read_source_morph(tmpdir.join('vol-morph.h5'))

    # check for invalid file name handling ()
    with pytest.raises(IOError, match='not found'):
        read_source_morph(tmpdir.join('42'))

    # check morph
    stc_vol_morphed = source_morph_vol.apply(stc_vol)
    # old way, verts do not match
    assert not np.array_equal(stc_vol_morphed.vertices[0], stc_vol.vertices[0])

    # vector
    stc_vol_vec = VolVectorSourceEstimate(
        np.tile(stc_vol.data[:, np.newaxis], (1, 3, 1)), stc_vol.vertices, 0,
        1)
    stc_vol_vec_morphed = source_morph_vol.apply(stc_vol_vec)
    assert isinstance(stc_vol_vec_morphed, VolVectorSourceEstimate)
    for ii in range(3):
        assert_allclose(stc_vol_vec_morphed.data[:, ii], stc_vol_morphed.data)

    # check output as NIfTI
    assert isinstance(source_morph_vol.apply(stc_vol_vec, output='nifti2'),
                      nib.Nifti2Image)

    # check for subject_from mismatch
    source_morph_vol_r.subject_from = '42'
    with pytest.raises(ValueError, match='subject_from must match'):
        source_morph_vol_r.apply(stc_vol_morphed)

    # check if nifti is in grid morph space with voxel_size == spacing
    img_morph_res = source_morph_vol.apply(stc_vol, output='nifti1')

    # assure morph spacing
    assert isinstance(img_morph_res, nib.Nifti1Image)
    assert img_morph_res.header.get_zooms()[:3] == (zooms, ) * 3

    # assure src shape
    img_mri_res = source_morph_vol.apply(stc_vol,
                                         output='nifti1',
                                         mri_resolution=True)
    assert isinstance(img_mri_res, nib.Nifti1Image)
    assert (img_mri_res.shape == (src[0]['mri_height'], src[0]['mri_depth'],
                                  src[0]['mri_width']) +
            (img_mri_res.shape[3], ))

    # check if nifti is defined resolution with voxel_size == (5., 5., 5.)
    img_any_res = source_morph_vol.apply(stc_vol,
                                         output='nifti1',
                                         mri_resolution=(5., 5., 5.))
    assert isinstance(img_any_res, nib.Nifti1Image)
    assert img_any_res.header.get_zooms()[:3] == (5., 5., 5.)

    # check if morph outputs correct data
    assert isinstance(stc_vol_morphed, VolSourceEstimate)

    # check if loaded and saved objects contain the same
    assert (all([
        read == saved
        for read, saved in zip(sorted(source_morph_vol_r.__dict__),
                               sorted(source_morph_vol.__dict__))
    ]))

    # check __repr__
    assert 'volume' in repr(source_morph_vol)

    # check Nifti2Image
    assert isinstance(
        source_morph_vol.apply(stc_vol,
                               mri_resolution=True,
                               mri_space=True,
                               output='nifti2'), nib.Nifti2Image)

    # Degenerate conditions
    with pytest.raises(TypeError, match='output must be'):
        source_morph_vol.apply(stc_vol, output=1)
    with pytest.raises(ValueError, match='subject_from does not match'):
        compute_source_morph(src=src, subject_from='42')
    with pytest.raises(ValueError, match='output'):
        source_morph_vol.apply(stc_vol, output='42')
    with pytest.raises(ValueError, match='subject_to cannot be None'):
        compute_source_morph(src, 'sample', None, subjects_dir=subjects_dir)
    # Check if not morphed, but voxel size not boolean, raise ValueError.
    # Note that this check requires dipy to not raise the dipy ImportError
    # before checking if the actual voxel size error will raise.
    with pytest.raises(ValueError, match='Cannot infer original voxel size'):
        stc_vol.as_volume(inverse_operator_vol['src'], mri_resolution=4)

    stc_surf = read_source_estimate(fname_stc, 'sample')
    with pytest.raises(TypeError, match='stc_from must be an instance'):
        source_morph_vol.apply(stc_surf)

    # src_to
    source_morph_vol = compute_source_morph(fwd['src'],
                                            subject_from='sample',
                                            src_to=fwd['src'],
                                            subject_to='sample',
                                            subjects_dir=subjects_dir,
                                            **kwargs)
    stc_vol_2 = source_morph_vol.apply(stc_vol)
    # new way, verts match
    assert_array_equal(stc_vol.vertices[0], stc_vol_2.vertices[0])
    stc_vol_bad = VolSourceEstimate(stc_vol.data[:-1],
                                    [stc_vol.vertices[0][:-1]], stc_vol.tmin,
                                    stc_vol.tstep)
    match = (
        'vertices do not match between morph \\(4157\\) and stc \\(4156\\).*'
        '\n.*\n.*\n.*Vertices were likely excluded during forward computatio.*'
    )
    with pytest.raises(ValueError, match=match):
        source_morph_vol.apply(stc_vol_bad)

    # nifti outputs and stc equiv
    img_vol = source_morph_vol.apply(stc_vol, output='nifti1')
    img_vol_2 = stc_vol_2.as_volume(src=fwd['src'], mri_resolution=False)
    assert_allclose(img_vol.affine, img_vol_2.affine)
    img_vol = img_vol.get_fdata()
    img_vol_2 = img_vol_2.get_fdata()
    assert img_vol.shape == img_vol_2.shape
    assert_allclose(img_vol, img_vol_2)
            morph = compute_source_morph(stcs, subject_from=subject,
                                     subject_to='fsaverage', spacing=4)
            stc_fsaverage = morph.apply(stcs)
            print ("Saving!")
            print (meg_dir+'_STCS/%s/%s/%s/%s_%s_%s_morphed'%(tonetype,key,freq,subject,tonetype,freq))
            stc_fsaverage.save(meg_dir+'_STCS/%s/%s/%s/%s_%s_%s_morphed'%(tonetype,key,freq,subject,tonetype,freq))


# average stcs for each frequency and get center of mass
for tonetype in tonetypes:
    print("COM for %s"%(tonetype))
    for freq in frequencies:
        print('freq=%s'%(freq))
        stcs = []
        for subject in subjects:
            stc = read_source_estimate(meg_dir+'_STCS/%s/%s/%s/%s_%s_%s_morphed'%(tonetype,key,freq,subject,tonetype,freq))
            stcs.append(stc)
        stc_avg = reduce(add, stcs)
        stc_avg /= len(stcs)

        stc_avg._data = np.abs(stc_avg._data)
        stc_avg_cropped = stc_avg.copy().crop(0,0.3)

        print("Computing COM for %s %sHz"%(tonetype,freq))
        for hem,h in zip(['lh','rh'],[0,1]):
            vtx, _, t = stc_avg_cropped.center_of_mass(subject = 'fsaverage',hemi=h, restrict_vertices=False)
            # vtx2,_2,t2 = stc_avg_cropped.center_of_mass(subject = 'fsaverage',hemi=hem, restrict_vertices=False)
            print(vtx)
            np.save(meg_dir + '_STCS/%s/%s/%s/_CM_%s_%s_%s_%s_cropped.npy' % (tonetype,key,freq,tonetype,key,freq,hem), (vtx, _, t))

tonetype = 'pure'
# load labels of interest
HCPlabels = mne.read_labels_from_annot(MRIsubject, parc='HCPMMP1', hemi='both', 
                                       surf_name='inflated', subjects_dir=subjects_dir)

HCPlabellist = []
ROIname = ['VVC','PIT','FFC','V8','V4_']
for roi in ROIname:
    for r in [i for i in HCPlabels if roi in i.name and i.hemi=='rh']:
        HCPlabellist.append(r)

CombinedLabel = HCPlabellist[0] + HCPlabellist[1] + HCPlabellist[2] + HCPlabellist[3] + HCPlabellist[4]

if useFsaveModel != 'ico5':
    # for making SourceEstimate instance of fsaverage
    dirname = filedir+'/Subject1/'+ExpID+'/Datafiles/SourceEstimate/'+srcdir1+'/Fsaverage_%s' % useFsaveModel
    templateSTC = mne.read_source_estimate(dirname+'/SrcEst_MeanTC_FearF_BSF_fsaverage%s' % useFsaveModel.capitalize())
    del dirname

for signiROI in HCPlabellist:
    labelname = signiROI.name.split('_')[1]
    if useFsaveModel == 'ico5':
        exec('vertIdx_'+labelname+' = signiROI.get_vertices_used()')
    else:
        labelSTC = templateSTC.in_label(signiROI)
        if useFsaveModel == 'oct6':
            exec('vertIdx_'+labelname+' = np.array([i for i, n in enumerate(templateSTC.rh_vertno) if n in labelSTC.rh_vertno])')
        else:
            exec('vertIdx_'+labelname+' = labelSTC.rh_vertno')
        del labelSTC