Exemplo n.º 1
0
def test_xhemi_morph():
    """Test cross-hemisphere morphing."""
    stc = read_source_estimate(fname_stc, subject='sample')
    # smooth 1 for speed where possible
    smooth = 4
    spacing = 4
    n_grade_verts = 2562
    stc = compute_source_morph(
        stc, 'sample', 'fsaverage_sym', smooth=smooth, warn=False,
        spacing=spacing, subjects_dir=subjects_dir).apply(stc)
    morph = compute_source_morph(
        stc, 'fsaverage_sym', 'fsaverage_sym', smooth=1, xhemi=True,
        warn=False, spacing=[stc.vertices[0], []],
        subjects_dir=subjects_dir)
    stc_xhemi = morph.apply(stc)
    assert stc_xhemi.data.shape[0] == n_grade_verts
    assert stc_xhemi.rh_data.shape[0] == 0
    assert len(stc_xhemi.vertices[1]) == 0
    assert stc_xhemi.lh_data.shape[0] == n_grade_verts
    assert len(stc_xhemi.vertices[0]) == n_grade_verts
    # complete reversal mapping
    morph = compute_source_morph(
        stc, 'fsaverage_sym', 'fsaverage_sym', smooth=smooth, xhemi=True,
        warn=False, spacing=stc.vertices, subjects_dir=subjects_dir)
    mm = morph.morph_mat
    assert mm.shape == (n_grade_verts * 2,) * 2
    assert mm.size > n_grade_verts * 2
    assert mm[:n_grade_verts, :n_grade_verts].size == 0  # L to L
    assert mm[n_grade_verts:, n_grade_verts:].size == 0  # R to L
    assert mm[n_grade_verts:, :n_grade_verts].size > n_grade_verts  # L to R
    assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts  # R to L
    # more complicated reversal mapping
    vertices_use = [stc.vertices[0], np.arange(10242)]
    n_src_verts = len(vertices_use[1])
    assert vertices_use[0].shape == (n_grade_verts,)
    assert vertices_use[1].shape == (n_src_verts,)
    # ensure it's sufficiently diffirent to manifest round-trip errors
    assert np.in1d(vertices_use[1], stc.vertices[1]).mean() < 0.3
    morph = compute_source_morph(
        stc, 'fsaverage_sym', 'fsaverage_sym', smooth=smooth, xhemi=True,
        warn=False, spacing=vertices_use, subjects_dir=subjects_dir)
    mm = morph.morph_mat
    assert mm.shape == (n_grade_verts + n_src_verts, n_grade_verts * 2)
    assert mm[:n_grade_verts, :n_grade_verts].size == 0
    assert mm[n_grade_verts:, n_grade_verts:].size == 0
    assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts
    assert mm[n_grade_verts:, :n_grade_verts].size > n_src_verts
    # morph forward then back
    stc_xhemi = morph.apply(stc)
    morph = compute_source_morph(
        stc_xhemi, 'fsaverage_sym', 'fsaverage_sym', smooth=smooth,
        xhemi=True, warn=False, spacing=stc.vertices,
        subjects_dir=subjects_dir)
    stc_return = morph.apply(stc_xhemi)
    for hi in range(2):
        assert_array_equal(stc_return.vertices[hi], stc.vertices[hi])
    correlation = np.corrcoef(stc.data.ravel(), stc_return.data.ravel())[0, 1]
    assert correlation > 0.9  # not great b/c of sparse grade + small smooth
Exemplo n.º 2
0
def test_sparse_morph():
    """Test sparse morphing."""
    rng = np.random.RandomState(0)
    vertices_fs = [np.sort(rng.permutation(np.arange(10242))[:4]),
                   np.sort(rng.permutation(np.arange(10242))[:6])]
    data = rng.randn(10, 1)
    stc_fs = SourceEstimate(data, vertices_fs, 1, 1, 'fsaverage')
    spheres_fs = [mne.read_surface(op.join(
        subjects_dir, 'fsaverage', 'surf', '%s.sphere.reg' % hemi))[0]
        for hemi in ('lh', 'rh')]
    spheres_sample = [mne.read_surface(op.join(
        subjects_dir, 'sample', 'surf', '%s.sphere.reg' % hemi))[0]
        for hemi in ('lh', 'rh')]
    morph_fs_sample = compute_source_morph(
        stc_fs, 'fsaverage', 'sample', sparse=True, spacing=None,
        subjects_dir=subjects_dir)
    stc_sample = morph_fs_sample.apply(stc_fs)
    offset = 0
    orders = list()
    for v1, s1, v2, s2 in zip(stc_fs.vertices, spheres_fs,
                              stc_sample.vertices, spheres_sample):
        dists = cdist(s1[v1], s2[v2])
        order = np.argmin(dists, axis=-1)
        assert_array_less(dists[np.arange(len(order)), order], 1.5)  # mm
        orders.append(order + offset)
        offset += len(order)
    assert_allclose(stc_fs.data, stc_sample.data[np.concatenate(orders)])
    # Return
    morph_sample_fs = compute_source_morph(
        stc_sample, 'sample', 'fsaverage', sparse=True, spacing=None,
        subjects_dir=subjects_dir)
    stc_fs_return = morph_sample_fs.apply(stc_sample)
    offset = 0
    orders = list()
    for v1, s, v2 in zip(stc_fs.vertices, spheres_fs, stc_fs_return.vertices):
        dists = cdist(s[v1], s[v2])
        order = np.argmin(dists, axis=-1)
        assert_array_less(dists[np.arange(len(order)), order], 1.5)  # mm
        orders.append(order + offset)
        offset += len(order)
    assert_allclose(stc_fs.data, stc_fs_return.data[np.concatenate(orders)])
Exemplo n.º 3
0
def test_surface_vector_source_morph():
    """Test surface and vector source estimate morph."""
    tempdir = _TempDir()

    inverse_operator_surf = read_inverse_operator(fname_inv_surf)

    stc_surf = read_source_estimate(fname_smorph, subject='sample')
    stc_surf.crop(0.09, 0.1)  # for faster computation

    stc_vec = _real_vec_stc()

    source_morph_surf = compute_source_morph(
        inverse_operator_surf['src'], subjects_dir=subjects_dir,
        smooth=1, warn=False)  # smooth 1 for speed
    assert source_morph_surf.subject_from == 'sample'
    assert source_morph_surf.subject_to == 'fsaverage'
    assert source_morph_surf.kind == 'surface'
    assert isinstance(source_morph_surf.src_data, dict)
    assert isinstance(source_morph_surf.src_data['vertices_from'], list)
    assert isinstance(source_morph_surf, SourceMorph)
    stc_surf_morphed = source_morph_surf.apply(stc_surf)
    assert isinstance(stc_surf_morphed, SourceEstimate)
    stc_vec_morphed = source_morph_surf.apply(stc_vec)
    with pytest.raises(ValueError, match='Only volume source estimates'):
        source_morph_surf.apply(stc_surf, output='nifti1')

    # check if correct class after morphing
    assert isinstance(stc_surf_morphed, SourceEstimate)
    assert isinstance(stc_vec_morphed, VectorSourceEstimate)

    # check __repr__
    assert 'surface' in repr(source_morph_surf)

    # check loading and saving for surf
    source_morph_surf.save(op.join(tempdir, '42.h5'))

    source_morph_surf_r = read_source_morph(op.join(tempdir, '42.h5'))

    assert (all([read == saved for read, saved in
                 zip(sorted(source_morph_surf_r.__dict__),
                     sorted(source_morph_surf.__dict__))]))

    # check wrong subject correction
    stc_surf.subject = None
    assert isinstance(source_morph_surf.apply(stc_surf), SourceEstimate)

    # degenerate
    stc_vol = read_source_estimate(fname_vol, 'sample')
    with pytest.raises(ValueError, match='stc_from was type'):
        source_morph_surf.apply(stc_vol)
Exemplo n.º 4
0
def get_mne_stc(ndvar=False, vol=False, subject='sample'):
    """MNE-Python SourceEstimate

    Parameters
    ----------
    ndvar : bool
        Convert to NDVar (default False; src="ico-4" is false, but it works as
        long as the source space is not accessed).
    vol : bool
        Volume source estimate.
    """
    data_path = Path(mne.datasets.testing.data_path())
    meg_sdir = data_path / 'MEG/sample'
    subjects_dir = data_path / 'subjects'
    # scaled subject
    if subject == 'fsaverage_scaled':
        subject_dir = os.path.join(subjects_dir, subject)
        if not os.path.exists(subject_dir):
            mne.scale_mri('fsaverage', subject, .9, subjects_dir=subjects_dir, skip_fiducials=True, labels=False, annot=True)
        data_subject = 'fsaverage'
    else:
        data_subject = subject

    if vol:
        inv = mn.read_inverse_operator(str(meg_sdir / 'sample_audvis_trunc-meg-vol-7-meg-inv.fif'))
        evoked = mne.read_evokeds(str(meg_sdir / 'sample_audvis_trunc-ave.fif'), 'Left Auditory')
        stc = mn.apply_inverse(evoked, inv, method='MNE', pick_ori='vector')
        if data_subject == 'fsaverage':
            m = mne.compute_source_morph(stc, 'sample', data_subject, subjects_dir)
            stc = m.apply(stc)
            stc.subject = subject
        elif subject != 'sample':
            raise ValueError(f"subject={subject!r}")
        if ndvar:
            return load.fiff.stc_ndvar(stc, subject, 'vol-7', subjects_dir, 'MNE', sss_filename='{subject}-volume-7mm-src.fif')
        else:
            return stc
    stc_path = meg_sdir / f'{data_subject}_audvis_trunc-meg'
    if ndvar:
        return load.fiff.stc_ndvar(stc_path, subject, 'ico-5', subjects_dir)
    else:
        return mne.read_source_estimate(str(stc_path), subject)
Exemplo n.º 5
0
def test_morph_stc_sparse():
    """Test morphing stc with sparse=True."""
    subject_from = 'sample'
    subject_to = 'fsaverage'
    # Morph sparse data
    # Make a sparse stc
    stc_from = read_source_estimate(fname_smorph, subject='sample')
    stc_from.vertices[0] = stc_from.vertices[0][[100, 500]]
    stc_from.vertices[1] = stc_from.vertices[1][[200]]
    stc_from._data = stc_from._data[:3]

    stc_to_sparse = compute_source_morph(
        stc_from, subject_from=subject_from, subject_to=subject_to,
        spacing=None, sparse=True, subjects_dir=subjects_dir).apply(stc_from)

    assert_allclose(np.sort(stc_from.data.sum(axis=1)),
                    np.sort(stc_to_sparse.data.sum(axis=1)))
    assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
    assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
    assert stc_to_sparse.subject == subject_to
    assert stc_from.tmin == stc_from.tmin
    assert stc_from.tstep == stc_from.tstep

    stc_from.vertices[0] = np.array([], dtype=np.int64)
    stc_from._data = stc_from._data[:1]

    stc_to_sparse = compute_source_morph(
        stc_from, subject_from, subject_to, spacing=None, sparse=True,
        subjects_dir=subjects_dir).apply(stc_from)

    assert_allclose(np.sort(stc_from.data.sum(axis=1)),
                    np.sort(stc_to_sparse.data.sum(axis=1)))
    assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
    assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
    assert stc_to_sparse.subject == subject_to
    assert stc_from.tmin == stc_from.tmin
    assert stc_from.tstep == stc_from.tstep

    # Degenerate cases
    with pytest.raises(ValueError, match='spacing must be set to None'):
        compute_source_morph(
            stc_from, subject_from=subject_from, subject_to=subject_to,
            spacing=5, sparse=True, subjects_dir=subjects_dir)
    with pytest.raises(ValueError, match='xhemi=True can only be used with'):
        compute_source_morph(
            stc_from, subject_from=subject_from, subject_to=subject_to,
            spacing=None, sparse=True, xhemi=True, subjects_dir=subjects_dir)
Exemplo n.º 6
0
def test_morphing():
    stc = datasets.get_mne_stc()
    y = load.fiff.stc_ndvar(stc, 'sample', 'ico-5', subjects_dir, 'dSPM', name='src')

    # sample to fsaverage
    m = mne.compute_source_morph(stc, 'sample', 'fsaverage', subjects_dir)
    stc_fsa = m.apply(stc)
    y_fsa = morph_source_space(y, 'fsaverage')
    assert_array_equal(y_fsa.x, stc_fsa.data)
    stc_fsa_ndvar = load.fiff.stc_ndvar(stc_fsa, 'fsaverage', 'ico-5', subjects_dir, 'dSPM', False, 'src', parc=None)
    assert_dataobj_equal(stc_fsa_ndvar, y_fsa)

    # scaled to fsaverage
    y_scaled = datasets.get_mne_stc(True, subject='fsaverage_scaled')
    y_scaled_m = morph_source_space(y_scaled, 'fsaverage')
    assert y_scaled_m.source.subject == 'fsaverage'
    assert_array_equal(y_scaled_m.x, y_scaled.x)

    # scaled to fsaverage [masked]
    y_sub = y_scaled.sub(source='superiortemporal-lh')
    y_sub_m = morph_source_space(y_sub, 'fsaverage')
    assert y_sub_m.source.subject == 'fsaverage'
    assert_array_equal(y_sub_m.x, y_sub.x)
Exemplo n.º 7
0
def test_morph_stc_sparse():
    """Test morphing stc with sparse=True."""
    subject_from = 'sample'
    subject_to = 'fsaverage'
    # Morph sparse data
    # Make a sparse stc
    stc_from = read_source_estimate(fname_smorph, subject='sample')
    stc_from.vertices[0] = stc_from.vertices[0][[100, 500]]
    stc_from.vertices[1] = stc_from.vertices[1][[200]]
    stc_from._data = stc_from._data[:3]

    stc_to_sparse = compute_source_morph(
        stc_from,
        subject_from=subject_from,
        subject_to=subject_to,
        spacing=None,
        sparse=True,
        subjects_dir=subjects_dir).apply(stc_from)

    assert_allclose(np.sort(stc_from.data.sum(axis=1)),
                    np.sort(stc_to_sparse.data.sum(axis=1)))
    assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
    assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
    assert stc_to_sparse.subject == subject_to
    assert stc_from.tmin == stc_from.tmin
    assert stc_from.tstep == stc_from.tstep

    stc_from.vertices[0] = np.array([], dtype=np.int64)
    stc_from._data = stc_from._data[:1]

    stc_to_sparse = compute_source_morph(
        stc_from,
        subject_from,
        subject_to,
        spacing=None,
        sparse=True,
        subjects_dir=subjects_dir).apply(stc_from)

    assert_allclose(np.sort(stc_from.data.sum(axis=1)),
                    np.sort(stc_to_sparse.data.sum(axis=1)))
    assert len(stc_from.rh_vertno) == len(stc_to_sparse.rh_vertno)
    assert len(stc_from.lh_vertno) == len(stc_to_sparse.lh_vertno)
    assert stc_to_sparse.subject == subject_to
    assert stc_from.tmin == stc_from.tmin
    assert stc_from.tstep == stc_from.tstep

    # Degenerate cases
    with pytest.raises(ValueError, match='spacing must be set to None'):
        compute_source_morph(stc_from,
                             subject_from=subject_from,
                             subject_to=subject_to,
                             spacing=5,
                             sparse=True,
                             subjects_dir=subjects_dir)
    with pytest.raises(ValueError, match='xhemi=True can only be used with'):
        compute_source_morph(stc_from,
                             subject_from=subject_from,
                             subject_to=subject_to,
                             spacing=None,
                             sparse=True,
                             xhemi=True,
                             subjects_dir=subjects_dir)
Exemplo n.º 8
0
def test_volume_source_morph(tmpdir):
    """Test volume source estimate morph, special cases and exceptions."""
    import nibabel as nib
    inverse_operator_vol = read_inverse_operator(fname_inv_vol)
    stc_vol = read_source_estimate(fname_vol, 'sample')

    # check for invalid input type
    with pytest.raises(ValueError, match='src must be a string or instance'):
        compute_source_morph(src=42)

    # check for raising an error if neither
    # inverse_operator_vol['src'][0]['subject_his_id'] nor subject_from is set,
    # but attempting to perform a volume morph
    src = inverse_operator_vol['src']
    assert src._subject is None  # already None on disk (old!)

    with pytest.raises(ValueError, match='subject_from could not be inferred'):
        with pytest.warns(RuntimeWarning, match='recommend regenerating'):
            compute_source_morph(src=src, subjects_dir=subjects_dir)

    # check infer subject_from from src[0]['subject_his_id']
    src[0]['subject_his_id'] = 'sample'

    with pytest.raises(ValueError, match='Inter-hemispheric morphing'):
        compute_source_morph(src=src, subjects_dir=subjects_dir, xhemi=True)

    with pytest.raises(ValueError, match='Only surface.*sparse morph'):
        compute_source_morph(src=src, sparse=True, subjects_dir=subjects_dir)

    # terrible quality buts fast
    zooms = 20
    kwargs = dict(zooms=zooms, niter_sdr=(1, ), niter_affine=(1, ))
    with pytest.warns(RuntimeWarning, match='recommend regenerating'):
        source_morph_vol = compute_source_morph(subjects_dir=subjects_dir,
                                                src=fname_inv_vol,
                                                subject_from='sample',
                                                **kwargs)
    shape = (13, ) * 3  # for the given zooms

    assert source_morph_vol.subject_from == 'sample'

    # the brain used in sample data has shape (255, 255, 255)
    assert tuple(source_morph_vol.sdr_morph.domain_shape) == shape

    assert tuple(source_morph_vol.pre_affine.domain_shape) == shape

    # proofs the above
    assert_array_equal(source_morph_vol.zooms, (zooms, ) * 3)

    # assure proper src shape
    mri_size = (src[0]['mri_height'], src[0]['mri_depth'], src[0]['mri_width'])
    assert source_morph_vol.src_data['src_shape_full'] == mri_size

    fwd = read_forward_solution(fname_fwd_vol)
    fwd['src'][0]['subject_his_id'] = 'sample'  # avoid further warnings
    source_morph_vol = compute_source_morph(fwd['src'],
                                            'sample',
                                            'sample',
                                            subjects_dir=subjects_dir,
                                            **kwargs)

    # check wrong subject_to
    with pytest.raises(IOError, match='cannot read file'):
        compute_source_morph(fwd['src'],
                             'sample',
                             '42',
                             subjects_dir=subjects_dir)

    # two different ways of saving
    source_morph_vol.save(tmpdir.join('vol'))

    # check loading
    source_morph_vol_r = read_source_morph(tmpdir.join('vol-morph.h5'))

    # check for invalid file name handling ()
    with pytest.raises(IOError, match='not found'):
        read_source_morph(tmpdir.join('42'))

    # check morph
    stc_vol_morphed = source_morph_vol.apply(stc_vol)
    # old way, verts do not match
    assert not np.array_equal(stc_vol_morphed.vertices, stc_vol.vertices)

    # vector
    stc_vol_vec = VolVectorSourceEstimate(
        np.tile(stc_vol.data[:, np.newaxis], (1, 3, 1)), stc_vol.vertices, 0,
        1)
    stc_vol_vec_morphed = source_morph_vol.apply(stc_vol_vec)
    assert isinstance(stc_vol_vec_morphed, VolVectorSourceEstimate)
    for ii in range(3):
        assert_allclose(stc_vol_vec_morphed.data[:, ii], stc_vol_morphed.data)

    # check output as NIfTI
    assert isinstance(source_morph_vol.apply(stc_vol_vec, output='nifti2'),
                      nib.Nifti2Image)

    # check for subject_from mismatch
    source_morph_vol_r.subject_from = '42'
    with pytest.raises(ValueError, match='subject_from must match'):
        source_morph_vol_r.apply(stc_vol_morphed)

    # check if nifti is in grid morph space with voxel_size == spacing
    img_morph_res = source_morph_vol.apply(stc_vol, output='nifti1')

    # assure morph spacing
    assert isinstance(img_morph_res, nib.Nifti1Image)
    assert img_morph_res.header.get_zooms()[:3] == (zooms, ) * 3

    # assure src shape
    img_mri_res = source_morph_vol.apply(stc_vol,
                                         output='nifti1',
                                         mri_resolution=True)
    assert isinstance(img_mri_res, nib.Nifti1Image)
    assert (img_mri_res.shape == (src[0]['mri_height'], src[0]['mri_depth'],
                                  src[0]['mri_width']) +
            (img_mri_res.shape[3], ))

    # check if nifti is defined resolution with voxel_size == (5., 5., 5.)
    img_any_res = source_morph_vol.apply(stc_vol,
                                         output='nifti1',
                                         mri_resolution=(5., 5., 5.))
    assert isinstance(img_any_res, nib.Nifti1Image)
    assert img_any_res.header.get_zooms()[:3] == (5., 5., 5.)

    # check if morph outputs correct data
    assert isinstance(stc_vol_morphed, VolSourceEstimate)

    # check if loaded and saved objects contain the same
    assert (all([
        read == saved
        for read, saved in zip(sorted(source_morph_vol_r.__dict__),
                               sorted(source_morph_vol.__dict__))
    ]))

    # check __repr__
    assert 'volume' in repr(source_morph_vol)

    # check Nifti2Image
    assert isinstance(
        source_morph_vol.apply(stc_vol,
                               mri_resolution=True,
                               mri_space=True,
                               output='nifti2'), nib.Nifti2Image)

    # Degenerate conditions
    with pytest.raises(TypeError, match='output must be'):
        source_morph_vol.apply(stc_vol, output=1)
    with pytest.raises(ValueError, match='subject_from does not match'):
        compute_source_morph(src=src, subject_from='42')
    with pytest.raises(ValueError, match='output'):
        source_morph_vol.apply(stc_vol, output='42')
    with pytest.raises(ValueError, match='subject_to cannot be None'):
        compute_source_morph(src, 'sample', None, subjects_dir=subjects_dir)
    # Check if not morphed, but voxel size not boolean, raise ValueError.
    # Note that this check requires dipy to not raise the dipy ImportError
    # before checking if the actual voxel size error will raise.
    with pytest.raises(ValueError, match='Cannot infer original voxel size'):
        stc_vol.as_volume(inverse_operator_vol['src'], mri_resolution=4)

    stc_surf = read_source_estimate(fname_stc, 'sample')
    with pytest.raises(ValueError, match='stc_from was type'):
        source_morph_vol.apply(stc_surf)

    # src_to
    # zooms=20 does not match src_to zooms (7)
    with pytest.raises(ValueError, match='If src_to is provided, zooms shoul'):
        source_morph_vol = compute_source_morph(fwd['src'],
                                                subject_from='sample',
                                                src_to=fwd['src'],
                                                subject_to='sample',
                                                subjects_dir=subjects_dir,
                                                **kwargs)
    # hack the src_to "zooms" to make it seem like a pos=20. source space
    fwd['src'][0]['src_mri_t']['trans'][:3, :3] = 0.02 * np.eye(3)
    source_morph_vol = compute_source_morph(fwd['src'],
                                            subject_from='sample',
                                            src_to=fwd['src'],
                                            subject_to='sample',
                                            subjects_dir=subjects_dir,
                                            **kwargs)
    stc_vol_2 = source_morph_vol.apply(stc_vol)
    # new way, verts match
    assert_array_equal(stc_vol.vertices, stc_vol_2.vertices)
    stc_vol_bad = VolSourceEstimate(stc_vol.data[:-1], stc_vol.vertices[:-1],
                                    stc_vol.tmin, stc_vol.tstep)
    with pytest.raises(ValueError, match='vertices do not match between morp'):
        source_morph_vol.apply(stc_vol_bad)
Exemplo n.º 9
0
def test_morphed_source_space_return():
    """Test returning a morphed source space to the original subject."""
    # let's create some random data on fsaverage
    data = rng.randn(20484, 1)
    tmin, tstep = 0, 1.
    src_fs = read_source_spaces(fname_fs)
    stc_fs = SourceEstimate(data, [s['vertno'] for s in src_fs], tmin, tstep,
                            'fsaverage')
    n_verts_fs = sum(len(s['vertno']) for s in src_fs)

    # Create our morph source space
    src_morph = morph_source_spaces(src_fs,
                                    'sample',
                                    subjects_dir=subjects_dir)
    n_verts_sample = sum(len(s['vertno']) for s in src_morph)
    assert n_verts_fs == n_verts_sample

    # Morph the data over using standard methods
    stc_morph = compute_source_morph(src_fs,
                                     'fsaverage',
                                     'sample',
                                     spacing=[s['vertno'] for s in src_morph],
                                     smooth=1,
                                     subjects_dir=subjects_dir,
                                     warn=False).apply(stc_fs)
    assert stc_morph.data.shape[0] == n_verts_sample

    # We can now pretend like this was real data we got e.g. from an inverse.
    # To be complete, let's remove some vertices
    keeps = [
        np.sort(rng.permutation(np.arange(len(v)))[:len(v) - 10])
        for v in stc_morph.vertices
    ]
    stc_morph = SourceEstimate(
        np.concatenate([
            stc_morph.lh_data[keeps[0]], stc_morph.rh_data[keeps[1]]
        ]), [v[k] for v, k in zip(stc_morph.vertices, keeps)], tmin, tstep,
        'sample')

    # Return it to the original subject
    stc_morph_return = stc_morph.to_original_src(src_fs,
                                                 subjects_dir=subjects_dir)

    # This should fail (has too many verts in SourceMorph)
    with pytest.warns(RuntimeWarning, match='vertices not included'):
        morph = compute_source_morph(src_morph,
                                     subject_from='sample',
                                     spacing=stc_morph_return.vertices,
                                     smooth=1,
                                     subjects_dir=subjects_dir)
    with pytest.raises(ValueError, match='vertices do not match'):
        morph.apply(stc_morph)

    # Compare to the original data
    with pytest.warns(RuntimeWarning, match='vertices not included'):
        stc_morph_morph = compute_source_morph(
            src=stc_morph,
            subject_from='sample',
            spacing=stc_morph_return.vertices,
            smooth=1,
            subjects_dir=subjects_dir).apply(stc_morph)

    assert_equal(stc_morph_return.subject, stc_morph_morph.subject)
    for ii in range(2):
        assert_array_equal(stc_morph_return.vertices[ii],
                           stc_morph_morph.vertices[ii])
    # These will not match perfectly because morphing pushes data around
    corr = np.corrcoef(stc_morph_return.data[:, 0],
                       stc_morph_morph.data[:, 0])[0, 1]
    assert corr > 0.99, corr

    # Explicitly test having two vertices map to the same target vertex. We
    # simulate this by having two vertices be at the same position.
    src_fs2 = src_fs.copy()
    vert1, vert2 = src_fs2[0]['vertno'][:2]
    src_fs2[0]['rr'][vert1] = src_fs2[0]['rr'][vert2]
    stc_morph_return = stc_morph.to_original_src(src_fs2,
                                                 subjects_dir=subjects_dir)

    # test to_original_src method result equality
    for ii in range(2):
        assert_array_equal(stc_morph_return.vertices[ii],
                           stc_morph_morph.vertices[ii])

    # These will not match perfectly because morphing pushes data around
    corr = np.corrcoef(stc_morph_return.data[:, 0],
                       stc_morph_morph.data[:, 0])[0, 1]
    assert corr > 0.99, corr

    # Degenerate cases
    stc_morph.subject = None  # no .subject provided
    pytest.raises(ValueError,
                  stc_morph.to_original_src,
                  src_fs,
                  subject_orig='fsaverage',
                  subjects_dir=subjects_dir)
    stc_morph.subject = 'sample'
    del src_fs[0]['subject_his_id']  # no name in src_fsaverage
    pytest.raises(ValueError,
                  stc_morph.to_original_src,
                  src_fs,
                  subjects_dir=subjects_dir)
    src_fs[0]['subject_his_id'] = 'fsaverage'  # name mismatch
    pytest.raises(ValueError,
                  stc_morph.to_original_src,
                  src_fs,
                  subject_orig='foo',
                  subjects_dir=subjects_dir)
    src_fs[0]['subject_his_id'] = 'sample'
    src = read_source_spaces(fname)  # wrong source space
    pytest.raises(RuntimeError,
                  stc_morph.to_original_src,
                  src,
                  subjects_dir=subjects_dir)
Exemplo n.º 10
0
trans_dir = "G:/TSM_test/NEM_proc/" # enter your special trans file folder here
meg_dir = "G:/TSM_test/NEM_proc/"
mri_dir = "G:/freesurfer/subjects/"
sub_dict = {"NEM_14":"FIN23"}
# sub_dict = {"NEM_26":"ENR41"}

# load the fsaverage source space for computing and saving source morph from subjects
fs_src = mne.read_source_spaces("{}fsaverage_oct6_mix-src.fif".format(meg_dir))

for meg,mri in sub_dict.items():
    # read source space and BEM solution (conductor model) that have been saved
    trans = "{dir}{mri}_{meg}-trans.fif".format(dir=trans_dir,mri=mri,meg=meg)
    src = mne.read_source_spaces("{dir}{meg}-oct6-src.fif".format(dir=meg_dir,meg=meg))
    bem = mne.read_bem_solution("{dir}{meg}-bem.fif".format(dir=meg_dir,meg=meg))
    # load and prepare the MEG data
    epo_info = mne.io.read_info("{dir}{sub}_3-raw.fif".format(dir=preproc_dir,sub=meg))     # use your -epo.fif file
    # build forward model from MRI and BEM  - for each experimental block
    fwd = mne.make_forward_solution(epo_info, trans=trans, src=src, bem=bem, meg=True, eeg=False, mindist=3.0, n_jobs=8)
    # # build averaged forward model for all blocks/conditions
    # fwd = mne.average_forward_solutions([fwd_2,fwd_4], weights=None)     # in case you need to average forwards from several runs
    mne.write_forward_solution("{dir}{meg}-fwd.fif".format(dir=meg_dir,meg=meg),fwd,overwrite=True)

    # get info on dipoles and plot (optional)
    leadfield = fwd['sol']['data']
    print("Leadfield size : %d sensors x %d dipoles" % leadfield.shape)
    mne.viz.plot_alignment(epo_info, trans, subject=mri, dig=False, fwd=fwd, src=fwd['src'], eeg=False, subjects_dir=mri_dir, surfaces='white', bem=bem)

    # compute and save source morph to fsaverage for later group analyses
    morph = mne.compute_source_morph(fwd['src'],subject_from=mri,subject_to="fsaverage",subjects_dir=mri_dir,src_to=fs_src)  ## it's important to use fwd['src'] to account for discarded vertices
    morph.save("{}{}_fs-morph.h5".format(meg_dir,meg))
Exemplo n.º 11
0
    for regressor in var_names:
        print('Regressor: %s' % (regressor))

        # make directories if they dont exists
        save_dir = "%s/_RESULTS/%s" % (root_dir, regressor)
        if not os.path.isdir(save_dir):
            os.mkdir(
                save_dir)  # put the data into an stc for convenience, and save
        stc_fname = '%s/morphed_stcs/%s_%s_%s_morphed' % (save_dir, subject,
                                                          regressor, data_type)
        stc_data = eval("lm['%s'].%s" % (regressor, data_type))

        # morph the stc to average brain
        morph = mne.compute_source_morph(stc_data,
                                         subject_from=subject,
                                         subject_to='fsaverage',
                                         subjects_dir=subjects_dir,
                                         spacing=spacing)
        stc_fsaverage = morph.apply(stc_data)
        # stc_fsaverage = read_source_estimate(stc_fname)
        stc_fsaverage.save(stc_fname)
        betas.append(stc_fsaverage._data)

    all_betas.append(betas)

# convert to np
all_betas = np.array(all_betas, float)

# dimension order should be obs x time x source
all_betas = np.transpose(all_betas, [0, 1, 3, 2])
Exemplo n.º 12
0
# :func:`stc.plot <mne.SourceEstimate.plot>` just as in other MNE
# objects. Note that for this visualization to work, you must have ``PyVista``
# installed on your machine.
initial_time = 0.1
brain = stc.plot(subjects_dir=subjects_dir,
                 initial_time=initial_time,
                 clim=dict(kind='value', lims=[3, 6, 9]),
                 smoothing_steps=7)

# %%
# You can also morph it to fsaverage and visualize it using a flatmap.

# sphinx_gallery_thumbnail_number = 3
stc_fs = mne.compute_source_morph(stc,
                                  'sample',
                                  'fsaverage',
                                  subjects_dir,
                                  smooth=5,
                                  verbose='error').apply(stc)
brain = stc_fs.plot(
    subjects_dir=subjects_dir,
    initial_time=initial_time,
    clim=dict(kind='value', lims=[3, 6, 9]),
    surface='flat',
    hemi='both',
    size=(1000, 500),
    smoothing_steps=5,
    time_viewer=False,
    add_data_kwargs=dict(colorbar_kwargs=dict(label_font_size=10)))

# to help orient us, let's add a parcellation (red=auditory, green=motor,
# blue=visual)
Exemplo n.º 13
0
# Author: Christian Brodbeck <*****@*****.**>
#
# License: BSD (3-clause)

import mne

data_dir = mne.datasets.sample.data_path()
subjects_dir = data_dir + '/subjects'
stc_path = data_dir + '/MEG/sample/sample_audvis-meg-eeg'

stc = mne.read_source_estimate(stc_path, 'sample')

# First, morph the data to fsaverage_sym, for which we have left_right
# registrations:
stc = mne.compute_source_morph(stc, 'sample', 'fsaverage_sym', smooth=5,
                               warn=False,
                               subjects_dir=subjects_dir).apply(stc)

# Compute a morph-matrix mapping the right to the left hemisphere,
# and vice-versa.
morph = mne.compute_source_morph(stc, 'fsaverage_sym', 'fsaverage_sym',
                                 spacing=stc.vertices, warn=False,
                                 subjects_dir=subjects_dir, xhemi=True)
stc_xhemi = morph.apply(stc)

# Now we can subtract them and plot the result:
diff = stc - stc_xhemi

diff.plot(hemi='lh', subjects_dir=subjects_dir, initial_time=0.07,
          size=(800, 600))
Exemplo n.º 14
0
# Generate stc from dipoles
stc = make_stc_from_dipoles(dipoles, forward['src'])

###############################################################################
# View in 2D and 3D ("glass" brain like 3D plot)
solver = "MxNE" if n_mxne_iter == 1 else "irMxNE"
plot_sparse_source_estimates(forward['src'],
                             stc,
                             bgcolor=(1, 1, 1),
                             fig_name="%s (cond %s)" % (solver, condition),
                             opacity=0.1)

###############################################################################
# Morph onto fsaverage brain and view
morph = mne.compute_source_morph(stc,
                                 subject_from='sample',
                                 subject_to='fsaverage',
                                 spacing=None,
                                 sparse=True,
                                 subjects_dir=subjects_dir)
stc_fsaverage = morph.apply(stc)
src_fsaverage_fname = subjects_dir + '/fsaverage/bem/fsaverage-ico-5-src.fif'
src_fsaverage = mne.read_source_spaces(src_fsaverage_fname)

plot_sparse_source_estimates(src_fsaverage,
                             stc_fsaverage,
                             bgcolor=(1, 1, 1),
                             fig_name="Morphed %s (cond %s)" %
                             (solver, condition),
                             opacity=0.1)
Exemplo n.º 15
0
# -----------------------------
#
# We can also use volumetric morphing to get the data to fsaverage space. This
# is for example necessary when comparing activity across subjects. Here, we
# will use the scalar beamformer example.
# We pass a :class:`mne.SourceMorph` as the ``src`` argument to
# `mne.VolSourceEstimate.plot`. To save some computational load when applying
# the morph, we will crop the ``stc``:

fetch_fsaverage(subjects_dir)  # ensure fsaverage src exists
fname_fs_src = subjects_dir + '/fsaverage/bem/fsaverage-vol-5-src.fif'

src_fs = mne.read_source_spaces(fname_fs_src)
morph = mne.compute_source_morph(
    forward['src'], subject_from='sample', src_to=src_fs,
    subjects_dir=subjects_dir,
    niter_sdr=[10, 10, 5], niter_affine=[10, 10, 5],  # just for speed
    verbose=True)
stc_fs = morph.apply(stc.crop(0.05, 0.15))

stc_fs.plot(
    src=src_fs, mode='stat_map', initial_time=0.085, subjects_dir=subjects_dir,
    clim=dict(kind='value', pos_lims=lims), verbose=True)

###############################################################################
# References
# ----------
#
# .. footbibliography::
#
#
Exemplo n.º 16
0
cnx = mne.spatial_src_connectivity(fs_src)
del fs_src
exclude = np.load("{}fsaverage_ico{}_exclude.npy".format(proc_dir, spacing))

for k, v in band_info.items():
    fr = v["freqs"]
    band = k
    X = [[] for wav in wavs for cond in conds]
    #X = [[] for cond in conds]
    for sub_idx, sub in enumerate(subjs):
        src = mne.read_source_spaces("{}{}_ico{}-src.fif".format(
            proc_dir, sub, spacing))
        vertnos = [s["vertno"] for s in src]
        morph = mne.compute_source_morph(src,
                                         subject_from=sub_key[sub],
                                         subject_to="fsaverage",
                                         spacing=spacing,
                                         subjects_dir=subjects_dir,
                                         smooth=None)

        idx = 0
        for cond_idx, cond in enumerate(conds):
            # comment either this out, or the wav loop
            # X_temp = []
            # stc_temp = mne.read_source_estimate(
            #   "{dir}stcs/nc_{a}_{b}_{f0}-{f1}Hz_ico{d}-lh.stc".format(
            #    dir=proc_dir,a=sub,b=cond,f0=fr[0],f1=fr[-1],
            #    d=spacing))
            # stc_temp = morph.apply(stc_temp)
            # X_temp.append(stc_temp.data.transpose(1,0))
            # X[idx].append(np.vstack(X_temp))
            # idx += 1
    stc_neg, freqs_neg = apply_dics_csd(csd_neg, filters_exp)
    del csd_neg
    stc_neg.save(fname=meg_dir + "nc_{}_stc_neg_1-90".format(meg))
    stc_pos, freqs_pos = apply_dics_csd(csd_pos, filters_exp)
    del csd_pos
    stc_pos.save(fname=meg_dir + "nc_{}_stc_pos_1-90".format(meg))
    # calculate the difference between conditions div. by baseline & save to file (for base and for exp)
    stc_diff_tonbas = (stc_tonbas - stc_rest) / stc_rest
    stc_diff_tonbas.save(fname=meg_dir +
                         "nc_{}_stc_diff_tonbas_1-90".format(meg))
    stc_diff_emo = (stc_neg - stc_pos) / stc_ton
    stc_diff_emo.save(fname=meg_dir + "nc_{}_stc_diff_emo_1-90".format(meg))
    # morph the resulting stcs to fsaverage & save  (to be loaded again and averaged)
    #src = mne.read_source_spaces("{}nc_{}-src.fif".format(meg_dir,meg))  ## as no. of vertices doesn't match between src and stc (got reduced by building forward model with mindist 5.0) - stc is used directly for morphing
    morph_bas = mne.compute_source_morph(stc_diff_tonbas,
                                         subject_from=mri,
                                         subject_to="fsaverage",
                                         subjects_dir=mri_dir)
    stc_fsavg_diff_tonbas = morph_bas.apply(stc_diff_tonbas)
    stc_fsavg_diff_tonbas.save(fname=meg_dir +
                               "nc_{}_stc_fsavg_diff_tonbas_1-90".format(meg))
    morph_exp = mne.compute_source_morph(stc_diff_emo,
                                         subject_from=mri,
                                         subject_to="fsaverage",
                                         subjects_dir=mri_dir)
    stc_fsavg_diff_emo = morph_exp.apply(stc_diff_emo)
    stc_fsavg_diff_emo.save(fname=meg_dir +
                            "nc_{}_stc_fsavg_diff_emo_1-90".format(meg))

# now do GROUP ANALYSES - with final subject sample

sub_dict = {
Exemplo n.º 18
0
def load_evoked_with_sources(subject, evoked_filter_name=None, evoked_filter_not=None, evoked_path='evoked_cleaned', apply_baseline=False, lowpass_evoked=True, morph_sources=True, fake_nave=False):
    """

    :param subject: subject name
    :param evoked_filter_name: element du nom de fichier à inclure
    :param evoked_filter_not: element du nom de fichier à exclure
    :param evoked_path: sous-dossier où récupérer les evoqués (sous 'subject/') : "evoked", "evoked_cleaned" ou "evoked_resid"
    :param apply_baseline: appliquer la baseline avant 0 aux évoqués
    :param morph_sources: normaliser les sources vers fsaverage
    :return:
    """

    # Load evoked
    if evoked_path == 'evoked':
        evoked, path_evo = evoked_funcs.load_evoked(subject=subject, filter_name=evoked_filter_name, filter_not=evoked_filter_not, cleaned=False, evoked_resid=False)
    elif evoked_path == 'evoked_cleaned':
        evoked, path_evo = evoked_funcs.load_evoked(subject=subject, filter_name=evoked_filter_name, filter_not=evoked_filter_not, cleaned=True, evoked_resid=False)
    elif evoked_path == 'evoked_resid':
        evoked, path_evo = evoked_funcs.load_evoked(subject=subject, filter_name=evoked_filter_name, filter_not=evoked_filter_not, cleaned=True, evoked_resid=True)
    evoked = evoked[list(evoked.keys())[0]][0]  # first key

    print('Subject ' + subject + ': evoked and sources from ' + path_evo)

    # Low-pass filter
    if lowpass_evoked:
        print('     Low pass filtering 30Hz')
        evoked = evoked.filter(l_freq=None, h_freq=30)  # default parameters (maybe should filter raw data instead of epochs...)

    # Apply baseline
    if apply_baseline:
        print('     Applying baseline to evoked')
        evoked.apply_baseline(baseline=(-0.050, 0.000))

    # Load inverse operator
    print('     Computing sources')
    meg_subject_dir = op.join(config.meg_dir, subject)
    extension = '_%s-inv' % (config.spacing)
    fname_inv = op.join(meg_subject_dir, subject + config.base_fname.format(**locals()))
    inverse_operator = mne.minimum_norm.read_inverse_operator(fname_inv)

    if fake_nave:
        evoked.nave = 100

    # Source estimates: apply inverse
    snr = 3.0
    lambda2 = 1.0 / snr ** 2
    stc = mne.minimum_norm.apply_inverse(evoked, inverse_operator, lambda2, "dSPM", pick_ori=None)

    # Morph to fsaverage
    if morph_sources:
        print('     Morph to fsaverage')
        morph = mne.compute_source_morph(stc, subject_from=subject, subject_to='fsaverage', subjects_dir=op.join(config.root_path, 'data', 'MRI', 'fs_converted'))
        stc_fsaverage = morph.apply(stc)
        stc = stc_fsaverage

    # Sanity check (high peak value)
    # m = np.round(np.max(abs(stc.data)), 0)
    # if m > 200:
    #     raise ValueError('/!\ Probable issue with sources ' + evoked_filter_name + ' for subject ' + subject + ': max value = ' + str(m))
    # if m > 80:
    #     import warnings
    #     warnings.warn('/!\ Probable issue with sources ' + evoked_filter_name + ' for subject ' + subject + ': max value = ' + str(m))

    return evoked, stc
Exemplo n.º 19
0
    '0384'
]

SUBJ_ASD = [
    '0106', '0107', '0139', '0141', '0159', '0160', '0161', '0164', '0253',
    '0254', '0256', '0273', '0274', '0275', '0276', '0346', '0347', '0351',
    '0358', '0380', '0381', '0382', '0383'
]

SUBJECTS = SUBJ_ASD + SUBJ_NT
PATHfrom = '/net/server/data/Archive/aut_gamma/orekhova/KI/'
myPATH = '/net/server/data/Archive/aut_gamma/orekhova/KI/Scripts_bkp/Shishkina/KI/'
subjects_dir = PATHfrom + 'freesurfersubjects'

for subject in SUBJECTS:

    subjpath = PATHfrom + 'SUBJECTS/' + subject + '/ICA_nonotch_crop/epochs/'
    savepath = myPATH + 'Results_Alpha_and_Gamma/'

    #load stcs
    sum_csp = mne.read_source_estimate(savepath + '1_results/CSP_sum/' +
                                       subject + 'sum_CSP_V3-V1_10_17Hz')

    #Setting up SourceMorph for SourceEstimate
    morph = mne.compute_source_morph(sum_csp,
                                     subject_from='Case' + subject,
                                     subject_to='fsaverage5',
                                     subjects_dir=subjects_dir)

    #save
    morph.save(savepath + '1_results/morph_CSP/' + subject + 'CSP')
Exemplo n.º 20
0
def test_volume_source_morph_round_trip(tmpdir, subject_from, subject_to,
                                        lower, upper, dtype, morph_mat,
                                        monkeypatch):
    """Test volume source estimate morph round-trips well."""
    import nibabel as nib
    from nibabel.processing import resample_from_to
    src = dict()
    if morph_mat:
        # ~1.5 minutes with pos=7. (4157 morphs!) for sample, so only test
        # morph_mat computation mode with a few labels
        label_names = sorted(get_volume_labels_from_aseg(fname_aseg))[1:2]
        if 'sample' in (subject_from, subject_to):
            src['sample'] = setup_volume_source_space(
                'sample',
                subjects_dir=subjects_dir,
                volume_label=label_names,
                mri=fname_aseg)
            assert sum(s['nuse'] for s in src['sample']) == 12
        if 'fsaverage' in (subject_from, subject_to):
            src['fsaverage'] = setup_volume_source_space(
                'fsaverage',
                subjects_dir=subjects_dir,
                volume_label=label_names[:3],
                mri=fname_aseg_fs)
            assert sum(s['nuse'] for s in src['fsaverage']) == 16
    else:
        assert not morph_mat
        if 'sample' in (subject_from, subject_to):
            src['sample'] = mne.read_source_spaces(fname_vol)
            src['sample'][0]['subject_his_id'] = 'sample'
            assert src['sample'][0]['nuse'] == 4157
        if 'fsaverage' in (subject_from, subject_to):
            # Created to save space with:
            #
            # bem = op.join(op.dirname(mne.__file__), 'data', 'fsaverage',
            #               'fsaverage-inner_skull-bem.fif')
            # src_fsaverage = mne.setup_volume_source_space(
            #     'fsaverage', pos=7., bem=bem, mindist=0,
            #     subjects_dir=subjects_dir, add_interpolator=False)
            # mne.write_source_spaces(fname_fs_vol, src_fsaverage,
            #                         overwrite=True)
            #
            # For speed we do it without the interpolator because it's huge.
            src['fsaverage'] = mne.read_source_spaces(fname_fs_vol)
            src['fsaverage'][0].update(vol_dims=np.array([23, 29, 25]),
                                       seg_name='brain')
            _add_interpolator(src['fsaverage'])
            assert src['fsaverage'][0]['nuse'] == 6379
    src_to, src_from = src[subject_to], src[subject_from]
    del src
    # No SDR just for speed once everything works
    kwargs = dict(niter_sdr=(),
                  niter_affine=(1, ),
                  subjects_dir=subjects_dir,
                  verbose=True)
    morph_from_to = compute_source_morph(src=src_from,
                                         src_to=src_to,
                                         subject_to=subject_to,
                                         **kwargs)
    morph_to_from = compute_source_morph(src=src_to,
                                         src_to=src_from,
                                         subject_to=subject_from,
                                         **kwargs)
    nuse = sum(s['nuse'] for s in src_from)
    assert nuse > 10
    use = np.linspace(0, nuse - 1, 10).round().astype(int)
    data = np.eye(nuse)[:, use]
    if dtype is complex:
        data = data * 1j
    vertices = [s['vertno'] for s in src_from]
    stc_from = VolSourceEstimate(data, vertices, 0, 1)
    with catch_logging() as log:
        stc_from_rt = morph_to_from.apply(
            morph_from_to.apply(stc_from, verbose='debug'))
    log = log.getvalue()
    assert 'individual volume morph' in log
    maxs = np.argmax(stc_from_rt.data, axis=0)
    src_rr = np.concatenate([s['rr'][s['vertno']] for s in src_from])
    dists = 1000 * np.linalg.norm(src_rr[use] - src_rr[maxs], axis=1)
    mu = np.mean(dists)
    # fsaverage=5.99; 7.97 without additional src_ras_t fix
    # fsaverage=7.97; 25.4 without src_ras_t fix
    assert lower <= mu < upper, f'round-trip distance {mu}'
    # check that pre_affine is close to identity when subject_to==subject_from
    if subject_to == subject_from:
        for morph in (morph_to_from, morph_from_to):
            assert_allclose(morph.pre_affine.affine, np.eye(4), atol=1e-2)
    # check that power is more or less preserved (labelizing messes with this)
    if morph_mat:
        if subject_to == 'fsaverage':
            limits = (18, 18.5)
        else:
            limits = (7, 7.5)
    else:
        limits = (1, 1.2)
    stc_from_unit = stc_from.copy().crop(0, 0)
    stc_from_unit._data.fill(1.)
    stc_from_unit_rt = morph_to_from.apply(morph_from_to.apply(stc_from_unit))
    assert_power_preserved(stc_from_unit, stc_from_unit_rt, limits=limits)
    if morph_mat:
        fname = tmpdir.join('temp-morph.h5')
        morph_to_from.save(fname)
        morph_to_from = read_source_morph(fname)
        assert morph_to_from.vol_morph_mat is None
        morph_to_from.compute_vol_morph_mat(verbose=True)
        morph_to_from.save(fname, overwrite=True)
        morph_to_from = read_source_morph(fname)
        assert isinstance(morph_to_from.vol_morph_mat, csr_matrix), 'csr'
        # equivalence (plus automatic calling)
        assert morph_from_to.vol_morph_mat is None
        monkeypatch.setattr(mne.morph, '_VOL_MAT_CHECK_RATIO', 0.)
        with catch_logging() as log:
            with pytest.warns(RuntimeWarning, match=r'calling morph\.compute'):
                stc_from_rt_lin = morph_to_from.apply(
                    morph_from_to.apply(stc_from, verbose='debug'))
        assert isinstance(morph_from_to.vol_morph_mat, csr_matrix), 'csr'
        log = log.getvalue()
        assert 'sparse volume morph matrix' in log
        assert_allclose(stc_from_rt.data, stc_from_rt_lin.data)
        del stc_from_rt_lin
        stc_from_unit_rt_lin = morph_to_from.apply(
            morph_from_to.apply(stc_from_unit))
        assert_allclose(stc_from_unit_rt.data, stc_from_unit_rt_lin.data)
        del stc_from_unit_rt_lin
    del stc_from, stc_from_rt
    # before and after morph, check the proportion of vertices
    # that are inside and outside the brainmask.mgz
    brain = nib.load(op.join(subjects_dir, subject_from, 'mri', 'brain.mgz'))
    mask = _get_img_fdata(brain) > 0
    if subject_from == subject_to == 'sample':
        for stc in [stc_from_unit, stc_from_unit_rt]:
            img = stc.as_volume(src_from, mri_resolution=True)
            img = nib.Nifti1Image(  # abs to convert complex
                np.abs(_get_img_fdata(img)[:, :, :, 0]), img.affine)
            img = _get_img_fdata(resample_from_to(img, brain, order=1))
            assert img.shape == mask.shape
            in_ = img[mask].astype(bool).mean()
            out = img[~mask].astype(bool).mean()
            if morph_mat:
                out_max = 0.001
                in_min, in_max = 0.005, 0.007
            else:
                out_max = 0.02
                in_min, in_max = 0.97, 0.98
            assert out < out_max, f'proportion out of volume {out}'
            assert in_min < in_ < in_max, f'proportion inside volume {in_}'
Exemplo n.º 21
0
# Plot residual
ylim = dict(eeg=[-10, 10], grad=[-400, 400], mag=[-600, 600])
evoked.pick_types(meg=True, eeg=True, exclude='bads')
evoked.plot(ylim=ylim, proj=True, time_unit='s')
residual.pick_types(meg=True, eeg=True, exclude='bads')
residual.plot(ylim=ylim, proj=True, time_unit='s')

###############################################################################
# Generate stc from dipoles
stc = make_stc_from_dipoles(dipoles, forward['src'])

###############################################################################
# View in 2D and 3D ("glass" brain like 3D plot)
solver = "MxNE" if n_mxne_iter == 1 else "irMxNE"
plot_sparse_source_estimates(forward['src'], stc, bgcolor=(1, 1, 1),
                             fig_name="%s (cond %s)" % (solver, condition),
                             opacity=0.1)

###############################################################################
# Morph onto fsaverage brain and view
morph = mne.compute_source_morph(stc, subject_from='sample',
                                 subject_to='fsaverage', spacing=None,
                                 sparse=True, subjects_dir=subjects_dir)
stc_fsaverage = morph.apply(stc)
src_fsaverage_fname = subjects_dir + '/fsaverage/bem/fsaverage-ico-5-src.fif'
src_fsaverage = mne.read_source_spaces(src_fsaverage_fname)

plot_sparse_source_estimates(src_fsaverage, stc_fsaverage, bgcolor=(1, 1, 1),
                             fig_name="Morphed %s (cond %s)" % (solver,
                             condition), opacity=0.1)
Exemplo n.º 22
0
# We can also use volumetric morphing to get the data to fsaverage space. This
# is for example necessary when comparing activity across subjects. Here, we
# will use the scalar beamformer example.
# We pass a :class:`mne.SourceMorph` as the ``src`` argument to
# `mne.VolSourceEstimate.plot`. To save some computational load when applying
# the morph, we will crop the ``stc``:

fetch_fsaverage(subjects_dir)  # ensure fsaverage src exists
fname_fs_src = subjects_dir + '/fsaverage/bem/fsaverage-vol-5-src.fif'

src_fs = mne.read_source_spaces(fname_fs_src)
morph = mne.compute_source_morph(
    src,
    subject_from='sample',
    src_to=src_fs,
    subjects_dir=subjects_dir,
    niter_sdr=[5, 5, 2],
    niter_affine=[5, 5, 2],
    zooms=7,  # just for speed
    verbose=True)
stc_fs = morph.apply(stc)
del stc

stc_fs.plot(src=src_fs,
            mode='stat_map',
            initial_time=0.085,
            subjects_dir=subjects_dir,
            clim=dict(kind='value', pos_lims=lims),
            verbose=True)

# %%
Exemplo n.º 23
0
    inv_fname_EMEG_SD = data_path + meg + 'InvOp_SD_EMEG-inv.fif'
    inv_fname_EMEG_LD = data_path + meg + 'InvOp_LD_EMEG-inv.fif'

    inv_op_SD = read_inverse_operator(inv_fname_EMEG_SD)
    inv_op_LD = read_inverse_operator(inv_fname_EMEG_LD)

    stc_fname_SD_words = data_path + meg + 'block_SD_words_EMEG'
    stc_fname_LD_words = data_path + meg + 'block_LD_words_EMEG'

    stc_SD_words = mne.read_source_estimate(stc_fname_SD_words)
    stc_LD_words = mne.read_source_estimate(stc_fname_LD_words)

    # # setup source morph
    morph_SD_words = mne.compute_source_morph(
        src=inv_op_SD['src'],
        subject_from=stc_SD_words.subject,
        subject_to=C.subject_to,
        spacing=C.spacing_morph,
        subjects_dir=C.data_path)
    morph_LD_words = mne.compute_source_morph(
        src=inv_op_LD['src'],
        subject_from=stc_LD_words.subject,
        subject_to=C.subject_to,
        spacing=C.spacing_morph,
        subjects_dir=C.data_path)

    stc_SD_words_fsaverage = morph_SD_words.apply(stc_SD_words)
    stc_LD_words_fsaverage = morph_LD_words.apply(stc_LD_words)

    fname_SD_words_fsaverage = C.data_path + meg + 'block_SD_words_EMEG_fsaverage'
    fname_LD_words_fsaverage = C.data_path + meg + 'block_LD_words_EMEG_fsaverage'
Exemplo n.º 24
0
# ``subject_from`` can typically be inferred from
# :class:`src <mne.SourceSpaces>`,
# and ``subject_to`` is  set to 'fsaverage' by default. ``subjects_dir`` can be
# None when set in the environment. In that case SourceMorph can be initialized
# taking ``src`` as only argument. See :class:`mne.SourceMorph` for more
# details.
#
# The default parameter setting for *spacing* will cause the reference volumes
# to be resliced before computing the transform. A value of '5' would cause
# the function to reslice to an isotropic voxel size of 5 mm. The higher this
# value the less accurate but faster the computation will be.
#
# A standard usage for volumetric data reads:

morph = mne.compute_source_morph(inverse_operator['src'],
                                 subject_from='sample', subject_to='fsaverage',
                                 subjects_dir=subjects_dir)

###############################################################################
# Apply morph to VolSourceEstimate
# --------------------------------
#
# The morph can be applied to the source estimate data, by giving it as the
# first argument to the :meth:`morph.apply() <mne.SourceMorph.apply>` method:

stc_fsaverage = morph.apply(stc)

###############################################################################
# Convert morphed VolSourceEstimate into NIfTI
# --------------------------------------------
#
Exemplo n.º 25
0
def SN_functional_connectivity_bands_runs_BL(i, method):
    s = time.time()
    meg = subjects[i]
    sub_to = MRI_sub[i][1:15]
    stc_F_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/stc_' + method + 'BL_F_bands_sub' + str(
        i) + '.json'
    stc_O_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/stc_' + method + 'BL_O_bands_sub' + str(
        i) + '.json'
    stc_M_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/stc_' + method + 'BL_M_bands_sub' + str(
        i) + '.json'
    stc_SD_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/stc_' + method + 'BL_SD_bands_sub' + str(
        i) + '.json'
    stc_LD_file_name = os.path.expanduser(
        '~'
    ) + '/my_semnet/json_files/connectivity/stc_' + method + 'BL_LD_bands_sub' + str(
        i) + '.json'

    morphed_labels = mne.morph_labels(SN_ROI,subject_to=data_path+sub_to,\
                  subject_from='fsaverage',subjects_dir=data_path)

    # Reading epochs

    # Reading epochs
    epo_name_LD = data_path + meg + 'block_LD_words_epochs-epo.fif'

    epochs_ld = mne.read_epochs(epo_name_LD, preload=True)

    epochs_LD = epochs_ld['words'].copy().resample(500).crop(-0.200, 0)

    epoch_fname_fruit = data_path + meg + 'block_fruit_epochs-epo.fif'
    epoch_fname_odour = data_path + meg + 'block_odour_epochs-epo.fif'
    epoch_fname_milk = data_path + meg + 'block_milk_epochs-epo.fif'

    epochs_fruit = mne.read_epochs(epoch_fname_fruit, preload=True)
    epochs_odour = mne.read_epochs(epoch_fname_odour, preload=True)
    epochs_milk = mne.read_epochs(epoch_fname_milk, preload=True)

    epochs_f = mne.epochs.combine_event_ids(
        epochs_fruit, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})
    epochs_o = mne.epochs.combine_event_ids(
        epochs_odour, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})
    epochs_m = mne.epochs.combine_event_ids(
        epochs_milk, ['visual', 'hear', 'hand', 'neutral', 'emotional'],
        {'words': 15})

    epochs_f = epochs_f['words'].copy().resample(500).crop(-0.200, 0)
    epochs_o = epochs_o['words'].copy().resample(500).crop(-0.200, 0)
    epochs_m = epochs_m['words'].copy().resample(500).crop(-0.200, 0)

    # Reading inverse operator
    inv_fname_SD = data_path + meg + 'InvOp_SD_EMEG-inv.fif'
    inv_fname_LD = data_path + meg + 'InvOp_LD_EMEG-inv.fif'

    inv_op_SD = read_inverse_operator(inv_fname_SD)
    inv_op_LD = read_inverse_operator(inv_fname_LD)

    stc_f = apply_inverse_epochs(epochs_f,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_o = apply_inverse_epochs(epochs_o,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_m = apply_inverse_epochs(epochs_m,
                                 inv_op_SD,
                                 lambda2,
                                 method='MNE',
                                 pick_ori="normal",
                                 return_generator=False)
    stc_ld = apply_inverse_epochs(epochs_LD,
                                  inv_op_LD,
                                  lambda2,
                                  method='MNE',
                                  pick_ori="normal",
                                  return_generator=False)

    src_SD = inv_op_SD['src']
    src_LD = inv_op_LD['src']

    # Construct indices to estimate connectivity between the label time course
    # and all source space time courses
    vertices_SD = [src_SD[j]['vertno'] for j in range(2)]
    n_signals_tot = 1 + len(vertices_SD[0]) + len(vertices_SD[1])
    indices = seed_target_indices([0], np.arange(1, n_signals_tot))

    morph_SD = mne.compute_source_morph(src=inv_op_SD['src'],\
                    subject_from=sub_to, subject_to=C.subject_to,\
                    spacing=C.spacing_morph, subjects_dir=C.data_path)
    morph_LD = mne.compute_source_morph(src=inv_op_LD['src'],\
                    subject_from=sub_to, subject_to=C.subject_to,\
                    spacing=C.spacing_morph, subjects_dir=C.data_path)

    for k in np.arange(0, 6):
        print('[i,k]: ', i, k)
        morphed_labels[k].name = C.rois_labels[k]

        seed_ts_f = mne.extract_label_time_course(stc_f, morphed_labels[k], \
                   src_SD, mode='mean_flip',return_generator=False)
        seed_ts_o = mne.extract_label_time_course(stc_o, morphed_labels[k], \
                   src_SD, mode='mean_flip',return_generator=False)
        seed_ts_m = mne.extract_label_time_course(stc_m, morphed_labels[k], \
                   src_SD, mode='mean_flip',return_generator=False)
        seed_ts_ld = mne.extract_label_time_course(stc_ld, morphed_labels[k], \
                   src_LD, mode='mean_flip',return_generator=False)

        for f in np.arange(0, len(C.con_freq_band) - 1):
            print('[i,k,f]: ', i, k, f)
            f_min = C.con_freq_band[f]
            f_max = C.con_freq_band[f + 1]
            print(f_min, f_max)

            comb_ts_f = zip(seed_ts_f, stc_f)
            comb_ts_o = zip(seed_ts_o, stc_o)
            comb_ts_m = zip(seed_ts_m, stc_m)
            comb_ts_ld = zip(seed_ts_ld, stc_ld)

            con_F, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                comb_ts_f,
                method=method,
                mode='fourier',
                indices=indices,
                sfreq=500,
                fmin=f_min,
                fmax=f_max,
                faverage=True,
                n_jobs=10)

            con_O, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                comb_ts_o,
                method=method,
                mode='fourier',
                indices=indices,
                sfreq=500,
                fmin=f_min,
                fmax=f_max,
                faverage=True,
                n_jobs=10)

            con_M, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                comb_ts_m,
                method=method,
                mode='fourier',
                indices=indices,
                sfreq=500,
                fmin=f_min,
                fmax=f_max,
                faverage=True,
                n_jobs=10)
            con_LD, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                comb_ts_ld,
                method=method,
                mode='fourier',
                indices=indices,
                sfreq=500,
                fmin=f_min,
                fmax=f_max,
                faverage=True,
                n_jobs=10)

            con_SD = (con_F + con_O + con_M) / 3

            con_stc_F = mne.SourceEstimate(con_F, vertices=vertices_SD,\
                          tmin=-.200, tstep=2e-3,subject=sub_to)
            con_stc_O = mne.SourceEstimate(con_O, vertices=vertices_SD,\
                          tmin=-.200, tstep=2e-3,subject=sub_to)
            con_stc_M = mne.SourceEstimate(con_M, vertices=vertices_SD,\
                          tmin=-.200, tstep=2e-3,subject=sub_to)
            con_stc_SD = mne.SourceEstimate(con_SD, vertices=vertices_SD,\
                         tmin=-.200, tstep=2e-3,subject=sub_to)

            con_stc_LD = mne.SourceEstimate(con_LD, vertices=vertices_SD,\
                         tmin=-.200, tstep=2e-3,subject=sub_to)

            stc_total_F[k][f] = morph_SD.apply(con_stc_F)
            stc_total_O[k][f] = morph_SD.apply(con_stc_O)
            stc_total_M[k][f] = morph_SD.apply(con_stc_M)
            stc_total_SD[k][f] = morph_SD.apply(con_stc_SD)
            stc_total_LD[k][f] = morph_LD.apply(con_stc_LD)

    with open(stc_F_file_name, "wb") as fp:  #Pickling
        pickle.dump(stc_total_F, fp)
    with open(stc_O_file_name, "wb") as fp:  #Pickling
        pickle.dump(stc_total_O, fp)
    with open(stc_M_file_name, "wb") as fp:  #Pickling
        pickle.dump(stc_total_M, fp)
    with open(stc_SD_file_name, "wb") as fp:  #Pickling
        pickle.dump(stc_total_SD, fp)
    with open(stc_LD_file_name, "wb") as fp:  #Pickling
        pickle.dump(stc_total_LD, fp)
    e = time.time()
    print(e - s)
Exemplo n.º 26
0
# details.
#
# The default parameter setting for *zooms* will cause the reference volumes
# to be resliced before computing the transform. A value of '5' would cause
# the function to reslice to an isotropic voxel size of 5 mm. The higher this
# value the less accurate but faster the computation will be.
#
# The recommended way to use this is to morph to a specific destination source
# space so that different ``subject_from`` morphs will go to the same space.`
# A standard usage for volumetric data reads:

src_fs = mne.read_source_spaces(fname_src_fsaverage)
morph = mne.compute_source_morph(
    inverse_operator['src'],
    subject_from='sample',
    subjects_dir=subjects_dir,
    niter_affine=[10, 10, 5],
    niter_sdr=[10, 10, 5],  # just for speed
    src_to=src_fs,
    verbose=True)

###############################################################################
# Apply morph to VolSourceEstimate
# --------------------------------
#
# The morph can be applied to the source estimate data, by giving it as the
# first argument to the :meth:`morph.apply() <mne.SourceMorph.apply>` method:

stc_fsaverage = morph.apply(stc)

###############################################################################
# Convert morphed VolSourceEstimate into NIfTI
Exemplo n.º 27
0
# Set parameters
# --------------
data_path = sample.data_path()
stc_fname = data_path + '/MEG/sample/sample_audvis-meg-lh.stc'
subjects_dir = data_path + '/subjects'
src_fname = subjects_dir + '/fsaverage/bem/fsaverage-ico-5-src.fif'

# Load stc to in common cortical space (fsaverage)
stc = mne.read_source_estimate(stc_fname)
stc.resample(50, npad='auto')

# Read the source space we are morphing to
src = mne.read_source_spaces(src_fname)
fsave_vertices = [s['vertno'] for s in src]
morph = mne.compute_source_morph(stc, 'sample', 'fsaverage',
                                 spacing=fsave_vertices, smooth=20,
                                 subjects_dir=subjects_dir)
stc = morph.apply(stc)
n_vertices_fsave, n_times = stc.data.shape
tstep = stc.tstep

n_subjects1, n_subjects2 = 7, 9
print('Simulating data for %d and %d subjects.' % (n_subjects1, n_subjects2))

#    Let's make sure our results replicate, so set the seed.
np.random.seed(0)
X1 = np.random.randn(n_vertices_fsave, n_times, n_subjects1) * 10
X2 = np.random.randn(n_vertices_fsave, n_times, n_subjects2) * 10
X1[:, :, :] += stc.data[:, :, np.newaxis]
# make the activity bigger for the second set of subjects
X2[:, :, :] += 3 * stc.data[:, :, np.newaxis]
Exemplo n.º 28
0
    for stc_fname in fnmatch.filter(subject_files, 'sme*energy*_21-lh.stc'):
        final_path = subject_folder + stc_fname
        MORPHEDFILE = subject_folder + stc_fname[:-7] + '_MNE'

        if check_existing_morphs == 1:
            if not os.path.isfile(MORPHEDFILE + '-lh.stc'):
                print('Morphing file: ' + stc_fname)
                print('--------------------------------------------------')
                # Load stc to in common cortical space (fsaverage)
                stc_from = mne.read_source_estimate(final_path)
                # Morph data to average brain
                morph = mne.compute_source_morph(stc_from,
                                                 subject_from=SUBJECT,
                                                 subject_to='fsaverage',
                                                 subjects_dir=SUBJECTS_DIR,
                                                 spacing=5,
                                                 smooth=None,
                                                 warn=True,
                                                 verbose=True)
                stc_fsaverage = morph.apply(stc_from)
                # save
                stc_fsaverage.save(MORPHEDFILE, ftype='stc', verbose=True)
                log.write(MORPHEDFILE + ' done\n')
            else:
                print(MORPHEDFILE + ' already morphed')
                log.write(MORPHEDFILE + ' exists already\n')
        else:
            print('Morphing file: ' + stc_fname)
            print('--------------------------------------------------')
            # Load stc to in common cortical space (fsaverage)
            stc_from = mne.read_source_estimate(final_path)
Exemplo n.º 29
0
#
# License: BSD (3-clause)

import mne

data_dir = mne.datasets.sample.data_path()
subjects_dir = data_dir + '/subjects'
stc_path = data_dir + '/MEG/sample/sample_audvis-meg-eeg'

stc = mne.read_source_estimate(stc_path, 'sample')

# First, morph the data to fsaverage_sym, for which we have left_right
# registrations:
stc = mne.compute_source_morph(stc,
                               'sample',
                               'fsaverage_sym',
                               smooth=5,
                               warn=False,
                               subjects_dir=subjects_dir).apply(stc)

# Compute a morph-matrix mapping the right to the left hemisphere,
# and vice-versa.
morph = mne.compute_source_morph(stc,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 spacing=stc.vertices,
                                 warn=False,
                                 subjects_dir=subjects_dir,
                                 xhemi=True,
                                 verbose='error')  # creating morph map
stc_xhemi = morph.apply(stc)
Exemplo n.º 30
0
                                fwd_base,
                                csd_bas,
                                pick_ori='max-power',
                                rank=None,
                                inversion='single',
                                weight_norm=None,
                                normalize_fwd=True,
                                real_filter=False)
        # apply the DICS beamformers to get source Estimates for rest,ton using common filters & save to file
        stc_rest, freqs_rest = apply_dics_csd(csd_rest, filters_bas)
        stc_ton, freqs_ton = apply_dics_csd(csd_ton, filters_bas)
        # calculate the difference of each condition to the tone baseline & save to file
        stc_tonbas_diff = (stc_ton - stc_rest) / stc_rest
        # morph the resulting stcs to fsaverage & save  (to be loaded again and averaged)
        morph = mne.compute_source_morph(stc_tonbas_diff,
                                         subject_from=mri,
                                         subject_to="fsaverage",
                                         subjects_dir=mri_dir)
        stc_fs_tonbas_diff = morph.apply(stc_tonbas_diff)
        stc_fs_tonbas_diff.save(
            fname=meg_dir + "nc_{}_stc_fs_tonbas_diff_F_{}".format(meg, freq))

# prepare for source diff permutation t-test
src = mne.read_source_spaces("{}fsaverage_ico5-src.fif".format(meg_dir))
connectivity = mne.spatial_src_connectivity(src)
threshold = 2.086

# DO GROUP PLOTS AND CLUSTER PERMUTATIONS

save_dir = "D:/NEMO_analyses/plots/ton_vs_rest/"
freqs = {
    "theta": list(np.arange(4, 7)),
Exemplo n.º 31
0
def test_xhemi_morph():
    """Test cross-hemisphere morphing."""
    stc = read_source_estimate(fname_stc, subject='sample')
    # smooth 1 for speed where possible
    smooth = 4
    spacing = 4
    n_grade_verts = 2562
    stc = compute_source_morph(stc,
                               'sample',
                               'fsaverage_sym',
                               smooth=smooth,
                               warn=False,
                               spacing=spacing,
                               subjects_dir=subjects_dir).apply(stc)
    morph = compute_source_morph(stc,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=1,
                                 xhemi=True,
                                 warn=False,
                                 spacing=[stc.vertices[0], []],
                                 subjects_dir=subjects_dir)
    stc_xhemi = morph.apply(stc)
    assert stc_xhemi.data.shape[0] == n_grade_verts
    assert stc_xhemi.rh_data.shape[0] == 0
    assert len(stc_xhemi.vertices[1]) == 0
    assert stc_xhemi.lh_data.shape[0] == n_grade_verts
    assert len(stc_xhemi.vertices[0]) == n_grade_verts
    # complete reversal mapping
    morph = compute_source_morph(stc,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=smooth,
                                 xhemi=True,
                                 warn=False,
                                 spacing=stc.vertices,
                                 subjects_dir=subjects_dir)
    mm = morph.morph_mat
    assert mm.shape == (n_grade_verts * 2, ) * 2
    assert mm.size > n_grade_verts * 2
    assert mm[:n_grade_verts, :n_grade_verts].size == 0  # L to L
    assert mm[n_grade_verts:, n_grade_verts:].size == 0  # R to L
    assert mm[n_grade_verts:, :n_grade_verts].size > n_grade_verts  # L to R
    assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts  # R to L
    # more complicated reversal mapping
    vertices_use = [stc.vertices[0], np.arange(10242)]
    n_src_verts = len(vertices_use[1])
    assert vertices_use[0].shape == (n_grade_verts, )
    assert vertices_use[1].shape == (n_src_verts, )
    # ensure it's sufficiently diffirent to manifest round-trip errors
    assert np.in1d(vertices_use[1], stc.vertices[1]).mean() < 0.3
    morph = compute_source_morph(stc,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=smooth,
                                 xhemi=True,
                                 warn=False,
                                 spacing=vertices_use,
                                 subjects_dir=subjects_dir)
    mm = morph.morph_mat
    assert mm.shape == (n_grade_verts + n_src_verts, n_grade_verts * 2)
    assert mm[:n_grade_verts, :n_grade_verts].size == 0
    assert mm[n_grade_verts:, n_grade_verts:].size == 0
    assert mm[:n_grade_verts, n_grade_verts:].size > n_grade_verts
    assert mm[n_grade_verts:, :n_grade_verts].size > n_src_verts
    # morph forward then back
    stc_xhemi = morph.apply(stc)
    morph = compute_source_morph(stc_xhemi,
                                 'fsaverage_sym',
                                 'fsaverage_sym',
                                 smooth=smooth,
                                 xhemi=True,
                                 warn=False,
                                 spacing=stc.vertices,
                                 subjects_dir=subjects_dir)
    stc_return = morph.apply(stc_xhemi)
    for hi in range(2):
        assert_array_equal(stc_return.vertices[hi], stc.vertices[hi])
    correlation = np.corrcoef(stc.data.ravel(), stc_return.data.ravel())[0, 1]
    assert correlation > 0.9  # not great b/c of sparse grade + small smooth
Exemplo n.º 32
0
# ``subject_from`` can typically be inferred from
# :class:`src <mne.SourceSpaces>`,
# and ``subject_to`` is  set to 'fsaverage' by default. ``subjects_dir`` can be
# None when set in the environment. In that case SourceMorph can be initialized
# taking ``src`` as only argument. See :class:`mne.SourceMorph` for more
# details.
#
# The default parameter setting for *spacing* will cause the reference volumes
# to be resliced before computing the transform. A value of '5' would cause
# the function to reslice to an isotropic voxel size of 5 mm. The higher this
# value the less accurate but faster the computation will be.
#
# A standard usage for volumetric data reads:

morph = mne.compute_source_morph(inverse_operator['src'],
                                 subject_from='sample', subject_to='fsaverage',
                                 subjects_dir=subjects_dir)

###############################################################################
# Apply morph to VolSourceEstimate
# --------------------------------
#
# The morph can be applied to the source estimate data, by giving it as the
# first argument to the :meth:`morph.apply() <mne.SourceMorph.apply>` method:

stc_fsaverage = morph.apply(stc)

###############################################################################
# Convert morphed VolSourceEstimate into NIfTI
# --------------------------------------------
#
Exemplo n.º 33
0
def test_morph_stc_dense():
    """Test morphing stc."""
    subject_from = 'sample'
    subject_to = 'fsaverage'
    stc_from = read_source_estimate(fname_smorph, subject='sample')
    stc_to = read_source_estimate(fname_fmorph)
    # make sure we can specify grade
    stc_from.crop(0.09, 0.1)  # for faster computation
    stc_to.crop(0.09, 0.1)  # for faster computation
    assert_array_equal(stc_to.time_as_index([0.09, 0.1], use_rounding=True),
                       [0, len(stc_to.times) - 1])

    # After dep change this to:
    stc_to1 = compute_source_morph(subject_to=subject_to,
                                   spacing=3,
                                   smooth=12,
                                   src=stc_from,
                                   subjects_dir=subjects_dir).apply(stc_from)
    assert_allclose(stc_to.data, stc_to1.data, atol=1e-5)

    mean_from = stc_from.data.mean(axis=0)
    mean_to = stc_to1.data.mean(axis=0)
    assert np.corrcoef(mean_to, mean_from).min() > 0.999

    vertices_to = grade_to_vertices(subject_to,
                                    grade=3,
                                    subjects_dir=subjects_dir)

    # make sure we can fill by morphing
    with pytest.warns(RuntimeWarning, match='consider increasing'):
        morph = compute_source_morph(stc_from,
                                     subject_from,
                                     subject_to,
                                     spacing=None,
                                     smooth=1,
                                     subjects_dir=subjects_dir)
    stc_to5 = morph.apply(stc_from)
    assert stc_to5.data.shape[0] == 163842 + 163842

    # Morph vector data
    stc_vec = _real_vec_stc()
    stc_vec_to1 = compute_source_morph(stc_vec,
                                       subject_from,
                                       subject_to,
                                       subjects_dir=subjects_dir,
                                       spacing=vertices_to,
                                       smooth=1,
                                       warn=False).apply(stc_vec)
    assert stc_vec_to1.subject == subject_to
    assert stc_vec_to1.tmin == stc_vec.tmin
    assert stc_vec_to1.tstep == stc_vec.tstep
    assert len(stc_vec_to1.lh_vertno) == 642
    assert len(stc_vec_to1.rh_vertno) == 642

    # Degenerate conditions

    # Morphing to a density that is too high should raise an informative error
    # (here we need to push to grade=6, but for some subjects even grade=5
    # will break)
    with pytest.raises(ValueError, match='Cannot use icosahedral grade 6 '):
        compute_source_morph(stc_to1,
                             subject_from=subject_to,
                             subject_to=subject_from,
                             spacing=6,
                             subjects_dir=subjects_dir)
    del stc_to1

    with pytest.raises(ValueError, match='smooth.* has to be at least 1'):
        compute_source_morph(stc_from,
                             subject_from,
                             subject_to,
                             spacing=5,
                             smooth=-1,
                             subjects_dir=subjects_dir)

    # subject from mismatch
    with pytest.raises(ValueError, match="subject_from does not match"):
        compute_source_morph(stc_from,
                             subject_from='foo',
                             subjects_dir=subjects_dir)

    # only one set of vertices
    with pytest.raises(ValueError, match="grade.*list must have two elements"):
        compute_source_morph(stc_from,
                             subject_from=subject_from,
                             spacing=[vertices_to[0]],
                             subjects_dir=subjects_dir)
Exemplo n.º 34
0
                    fname_stc = C.fname_STC(C, C.resolution_subdir, subject,
                                            stctext)

                    fname_mph = C.fname_STC(C, C.resolution_subdir, subject,
                                            stctext + '_mph')

                    # read existing source estimate
                    print('Reading: %s.' % fname_stc)
                    stc = mne.read_source_estimate(fname_stc, subject)

                    if morph_mat == []:

                        print('Computing morphing matrix.')
                        morph_mat = mne.compute_source_morph(
                            src=stc,
                            subject_from=subject,
                            subject_to=C.stc_morph,
                            subjects_dir=C.subjects_dir)

                        fname_mphmat = C.fname_STC(C, C.resolution_subdir,
                                                   subject, 'mphmat')

                        morph_mat.save(fname_mphmat, overwrite=True)

                    stc_mph = morph_mat.apply(stc)

                    print('Writing morphed to: %s.' % fname_mph)
                    stc_mph.save(fname_mph)

            # plot individual PSFs and CTFs
Exemplo n.º 35
0
#
# .. note:: This is not generally true for other subjects! The set of vertices
#           used for ``fsaverage`` with ico-5 spacing was designed to be
#           special. ico-5 spacings for other subjects (or other spacings
#           for fsaverage) must be calculated and will not be consecutive
#           integers.
#
# If src was not defined, the morph will actually not be precomputed, because
# we lack the vertices *from* that we want to compute. Instead the morph will
# be set up and when applying it, the actual transformation will be computed on
# the fly.
#
# Initialize SourceMorph for SourceEstimate

morph = mne.compute_source_morph(stc,
                                 subject_from='sample',
                                 subject_to='fsaverage',
                                 subjects_dir=subjects_dir)

###############################################################################
# Apply morph to (Vector) SourceEstimate
# --------------------------------------
#
# The morph will be applied to the source estimate data, by giving it as the
# first argument to the morph we computed above.

stc_fsaverage = morph.apply(stc)

###############################################################################
# Plot results
# ------------
    hemi='rh', subjects_dir=subjects_dir,
    clim=dict(kind='value', lims=[8, 12, 15]), views='lateral',
    initial_time=time_max, time_unit='s', size=(800, 800), smoothing_steps=5)
brain = stc.plot(**surfer_kwargs)
brain.add_foci(vertno_max, coords_as_verts=True, hemi='rh', color='blue',
               scale_factor=0.6, alpha=0.5)
brain.add_text(0.1, 0.9, 'dSPM (plus location of maximal activation)', 'title',
               font_size=14)

###############################################################################
# Morph data to average brain
# ---------------------------

# setup source morph
morph = mne.compute_source_morph(
    src=inverse_operator['src'], subject_from=stc.subject,
    subject_to='fsaverage', spacing=5,  # to ico-5
    subjects_dir=subjects_dir)
# morph data
stc_fsaverage = morph.apply(stc)

brain = stc_fsaverage.plot(**surfer_kwargs)
brain.add_text(0.1, 0.9, 'Morphed to fsaverage', 'title', font_size=20)
del stc_fsaverage

###############################################################################
# Dipole orientations
# -------------------
# The ``pick_ori`` parameter of the
# :func:`mne.minimum_norm.apply_inverse` function controls
# the orientation of the dipoles. One useful setting is ``pick_ori='vector'``,
# which will return an estimate that does not only contain the source power at
Exemplo n.º 37
0
#
# .. note:: This is not generally true for other subjects! The set of vertices
#           used for ``fsaverage`` with ico-5 spacing was designed to be
#           special. ico-5 spacings for other subjects (or other spacings
#           for fsaverage) must be calculated and will not be consecutive
#           integers.
#
# If src was not defined, the morph will actually not be precomputed, because
# we lack the vertices *from* that we want to compute. Instead the morph will
# be set up and when applying it, the actual transformation will be computed on
# the fly.
#
# Initialize SourceMorph for SourceEstimate

morph = mne.compute_source_morph(stc, subject_from='sample',
                                 subject_to='fsaverage',
                                 subjects_dir=subjects_dir)

###############################################################################
# Apply morph to (Vector) SourceEstimate
# --------------------------------------
#
# The morph will be applied to the source estimate data, by giving it as the
# first argument to the morph we computed above.

stc_fsaverage = morph.apply(stc)

###############################################################################
# Plot results
# ------------
Exemplo n.º 38
0
def test_volume_source_morph():
    """Test volume source estimate morph, special cases and exceptions."""
    import nibabel as nib
    tempdir = _TempDir()
    inverse_operator_vol = read_inverse_operator(fname_inv_vol)
    stc_vol = read_source_estimate(fname_vol, 'sample')

    # check for invalid input type
    with pytest.raises(TypeError, match='src must be an instance of'):
        compute_source_morph(src=42)

    # check for raising an error if neither
    # inverse_operator_vol['src'][0]['subject_his_id'] nor subject_from is set,
    # but attempting to perform a volume morph
    src = inverse_operator_vol['src']
    src[0]['subject_his_id'] = None

    with pytest.raises(ValueError, match='subject_from could not be inferred'):
        compute_source_morph(src=src, subjects_dir=subjects_dir)

    # check infer subject_from from src[0]['subject_his_id']
    src[0]['subject_his_id'] = 'sample'

    with pytest.raises(ValueError, match='Inter-hemispheric morphing'):
        compute_source_morph(src=src, subjects_dir=subjects_dir, xhemi=True)

    with pytest.raises(ValueError, match='Only surface.*sparse morph'):
        compute_source_morph(src=src, sparse=True, subjects_dir=subjects_dir)

    # terrible quality buts fast
    zooms = 20
    kwargs = dict(zooms=zooms, niter_sdr=(1,), niter_affine=(1,))
    source_morph_vol = compute_source_morph(
        subjects_dir=subjects_dir, src=inverse_operator_vol['src'], **kwargs)
    shape = (13,) * 3  # for the given zooms

    assert source_morph_vol.subject_from == 'sample'

    # the brain used in sample data has shape (255, 255, 255)
    assert tuple(source_morph_vol.sdr_morph.domain_shape) == shape

    assert tuple(source_morph_vol.pre_affine.domain_shape) == shape

    # proofs the above
    assert_array_equal(source_morph_vol.zooms, (zooms,) * 3)

    # assure proper src shape
    mri_size = (src[0]['mri_height'], src[0]['mri_depth'], src[0]['mri_width'])
    assert source_morph_vol.src_data['src_shape_full'] == mri_size

    fwd = read_forward_solution(fname_fwd_vol)
    source_morph_vol = compute_source_morph(
        fwd['src'], 'sample', 'sample', subjects_dir=subjects_dir,
        **kwargs)

    # check wrong subject_to
    with pytest.raises(IOError, match='cannot read file'):
        compute_source_morph(fwd['src'], 'sample', '42',
                             subjects_dir=subjects_dir)

    # two different ways of saving
    source_morph_vol.save(op.join(tempdir, 'vol'))

    # check loading
    source_morph_vol_r = read_source_morph(
        op.join(tempdir, 'vol-morph.h5'))

    # check for invalid file name handling ()
    with pytest.raises(IOError, match='not found'):
        read_source_morph(op.join(tempdir, '42'))

    # check morph
    stc_vol_morphed = source_morph_vol.apply(stc_vol)

    # check output as NIfTI
    assert isinstance(source_morph_vol.apply(stc_vol, output='nifti2'),
                      nib.Nifti2Image)

    # check for subject_from mismatch
    source_morph_vol_r.subject_from = '42'
    with pytest.raises(ValueError, match='subject_from must match'):
        source_morph_vol_r.apply(stc_vol_morphed)

    # check if nifti is in grid morph space with voxel_size == spacing
    img_morph_res = source_morph_vol.apply(stc_vol, output='nifti1')

    # assure morph spacing
    assert isinstance(img_morph_res, nib.Nifti1Image)
    assert img_morph_res.header.get_zooms()[:3] == (zooms,) * 3

    # assure src shape
    img_mri_res = source_morph_vol.apply(stc_vol, output='nifti1',
                                         mri_resolution=True)
    assert isinstance(img_mri_res, nib.Nifti1Image)
    assert (img_mri_res.shape == (src[0]['mri_height'], src[0]['mri_depth'],
                                  src[0]['mri_width']) +
            (img_mri_res.shape[3],))

    # check if nifti is defined resolution with voxel_size == (5., 5., 5.)
    img_any_res = source_morph_vol.apply(stc_vol, output='nifti1',
                                         mri_resolution=(5., 5., 5.))
    assert isinstance(img_any_res, nib.Nifti1Image)
    assert img_any_res.header.get_zooms()[:3] == (5., 5., 5.)

    # check if morph outputs correct data
    assert isinstance(stc_vol_morphed, VolSourceEstimate)

    # check if loaded and saved objects contain the same
    assert (all([read == saved for read, saved in
                 zip(sorted(source_morph_vol_r.__dict__),
                     sorted(source_morph_vol.__dict__))]))

    # check __repr__
    assert 'volume' in repr(source_morph_vol)

    # check Nifti2Image
    assert isinstance(
        source_morph_vol.apply(stc_vol, mri_resolution=True,
                               mri_space=True, output='nifti2'),
        nib.Nifti2Image)

    # Degenerate conditions
    with pytest.raises(TypeError, match='output must be'):
        source_morph_vol.apply(stc_vol, output=1)
    with pytest.raises(ValueError, match='subject_from does not match'):
        compute_source_morph(src=src, subject_from='42')
    with pytest.raises(ValueError, match='output must be one of'):
        source_morph_vol.apply(stc_vol, output='42')
    with pytest.raises(TypeError, match='subject_to must'):
        compute_source_morph(src, 'sample', None,
                             subjects_dir=subjects_dir)
    # Check if not morphed, but voxel size not boolean, raise ValueError.
    # Note that this check requires dipy to not raise the dipy ImportError
    # before checking if the actual voxel size error will raise.
    with pytest.raises(ValueError, match='Cannot infer original voxel size'):
        stc_vol.as_volume(inverse_operator_vol['src'], mri_resolution=4)
    hemi='rh', subjects_dir=subjects_dir,
    clim=dict(kind='value', lims=[8, 12, 15]), views='lateral',
    initial_time=time_max, time_unit='s', size=(800, 800), smoothing_steps=10)
brain = stc.plot(**surfer_kwargs)
brain.add_foci(vertno_max, coords_as_verts=True, hemi='rh', color='blue',
               scale_factor=0.6, alpha=0.5)
brain.add_text(0.1, 0.9, 'dSPM (plus location of maximal activation)', 'title',
               font_size=14)

###############################################################################
# Morph data to average brain
# ---------------------------

# setup source morph
morph = mne.compute_source_morph(
    src=inverse_operator['src'], subject_from=stc.subject,
    subject_to='fsaverage', spacing=5,  # to ico-5
    subjects_dir=subjects_dir)
# morph data
stc_fsaverage = morph.apply(stc)

brain = stc_fsaverage.plot(**surfer_kwargs)
brain.add_text(0.1, 0.9, 'Morphed to fsaverage', 'title', font_size=20)
del stc_fsaverage

###############################################################################
# Dipole orientations
# -------------------
# The ``pick_ori`` parameter of the
# :func:`mne.minimum_norm.apply_inverse` function controls
# the orientation of the dipoles. One useful setting is ``pick_ori='vector'``,
# which will return an estimate that does not only contain the source power at
X[:, :, :, 0] += condition1.data[:, :, np.newaxis]
X[:, :, :, 1] += condition2.data[:, :, np.newaxis]

###############################################################################
# It's a good idea to spatially smooth the data, and for visualization
# purposes, let's morph these to fsaverage, which is a grade 5 source space
# with vertices 0:10242 for each hemisphere. Usually you'd have to morph
# each subject's data separately (and you might want to use morph_data
# instead), but here since all estimates are on 'sample' we can use one
# morph matrix for all the heavy lifting.

# Read the source space we are morphing to
src = mne.read_source_spaces(src_fname)
fsave_vertices = [s['vertno'] for s in src]
morph_mat = mne.compute_source_morph(
    src=inverse_operator['src'], subject_to='fsaverage',
    spacing=fsave_vertices, subjects_dir=subjects_dir).morph_mat

n_vertices_fsave = morph_mat.shape[0]

#    We have to change the shape for the dot() to work properly
X = X.reshape(n_vertices_sample, n_times * n_subjects * 2)
print('Morphing data.')
X = morph_mat.dot(X)  # morph_mat is a sparse matrix
X = X.reshape(n_vertices_fsave, n_times, n_subjects, 2)

###############################################################################
# Finally, we want to compare the overall activity levels in each condition,
# the diff is taken along the last axis (condition). The negative sign makes
# it so condition1 > condition2 shows up as "red blobs" (instead of blue).
X = np.abs(X)  # only magnitude
Exemplo n.º 41
0
def test_morph_stc_dense():
    """Test morphing stc."""
    subject_from = 'sample'
    subject_to = 'fsaverage'
    stc_from = read_source_estimate(fname_smorph, subject='sample')
    stc_to = read_source_estimate(fname_fmorph)
    # make sure we can specify grade
    stc_from.crop(0.09, 0.1)  # for faster computation
    stc_to.crop(0.09, 0.1)  # for faster computation
    assert_array_equal(stc_to.time_as_index([0.09, 0.1], use_rounding=True),
                       [0, len(stc_to.times) - 1])

    # After dep change this to:
    stc_to1 = compute_source_morph(
        subject_to=subject_to, spacing=3, smooth=12, src=stc_from,
        subjects_dir=subjects_dir).apply(stc_from)
    assert_allclose(stc_to.data, stc_to1.data, atol=1e-5)

    mean_from = stc_from.data.mean(axis=0)
    mean_to = stc_to1.data.mean(axis=0)
    assert np.corrcoef(mean_to, mean_from).min() > 0.999

    vertices_to = grade_to_vertices(subject_to, grade=3,
                                    subjects_dir=subjects_dir)

    # make sure we can fill by morphing
    with pytest.warns(RuntimeWarning, match='consider increasing'):
        morph = compute_source_morph(
            stc_from, subject_from, subject_to, spacing=None, smooth=1,
            subjects_dir=subjects_dir)
    stc_to5 = morph.apply(stc_from)
    assert stc_to5.data.shape[0] == 163842 + 163842

    # Morph vector data
    stc_vec = _real_vec_stc()
    stc_vec_to1 = compute_source_morph(
        stc_vec, subject_from, subject_to, subjects_dir=subjects_dir,
        spacing=vertices_to, smooth=1, warn=False).apply(stc_vec)
    assert stc_vec_to1.subject == subject_to
    assert stc_vec_to1.tmin == stc_vec.tmin
    assert stc_vec_to1.tstep == stc_vec.tstep
    assert len(stc_vec_to1.lh_vertno) == 642
    assert len(stc_vec_to1.rh_vertno) == 642

    # Degenerate conditions

    # Morphing to a density that is too high should raise an informative error
    # (here we need to push to grade=6, but for some subjects even grade=5
    # will break)
    with pytest.raises(ValueError, match='Cannot use icosahedral grade 6 '):
        compute_source_morph(
            stc_to1, subject_from=subject_to, subject_to=subject_from,
            spacing=6, subjects_dir=subjects_dir)
    del stc_to1

    with pytest.raises(ValueError, match='smooth.* has to be at least 1'):
        compute_source_morph(
            stc_from, subject_from, subject_to, spacing=5, smooth=-1,
            subjects_dir=subjects_dir)

    # subject from mismatch
    with pytest.raises(ValueError, match="does not match source space subj"):
        compute_source_morph(stc_from, subject_from='foo',
                             subjects_dir=subjects_dir)

    # only one set of vertices
    with pytest.raises(ValueError, match="grade.*list must have two elements"):
        compute_source_morph(
            stc_from, subject_from=subject_from, spacing=[vertices_to[0]],
            subjects_dir=subjects_dir)
Exemplo n.º 42
0
                                      inv_op_LD,
                                      lambda2,
                                      method='MNE',
                                      pick_ori="normal",
                                      return_generator=False)
        times = epochs_sd.times
        stc_SD = []
        stc_LD = []

        for n in np.arange(0, len(stc_sd)):
            stc_SD.append(stc_baseline_correction(stc_sd[n], times))
            stc_LD.append(stc_baseline_correction(stc_ld[n], times))

        # Morphing source signals onto fsaverage
        morph_SD = mne.compute_source_morph( src= inv_op_SD['src'],subject_from\
                   = stc_sd[0].subject , subject_to = C.subject_to , spacing = \
                   C.spacing_morph, subjects_dir = C.data_path)
        morph_LD = mne.compute_source_morph( src= inv_op_LD['src'],subject_from\
                   = stc_ld[0].subject , subject_to = C.subject_to , spacing = \
                   C.spacing_morph, subjects_dir = C.data_path)

        stc_fsaverage_SD = []
        stc_fsaverage_LD = []

        for n in np.arange(0, len(stc_SD)):
            stc_fsaverage_SD.append(morph_SD.apply(stc_SD[n]))
            stc_fsaverage_LD.append(morph_LD.apply(stc_LD[n]))

        # stc_fsaverage_SD=[morph_SD.apply(stc) for stc in stc_sd]
        # stc_fsaverage_LD=[morph_LD.apply(stc) for stc in stc_ld]
Exemplo n.º 43
0



# stc_ld = apply_inverse_epochs(epochs_LD, inv_op_LD,lambda2,method ='MNE',
#                         pick_ori="normal", return_generator=False)
src_SD = inv_op_SD['src']
src_LD = inv_op_LD['src']
# Construct indices to estimate connectivity between the label time course
# and all source space time courses
vertices_SD = [src_SD[j]['vertno'] for j in range(2)]
n_signals_tot = 1 + len(vertices_SD[0]) + len(vertices_SD[1])
indices = seed_target_indices([0], np.arange(1, n_signals_tot))

morph_SD = mne.compute_source_morph(src=inv_op_SD['src'],\
                subject_from=sub_to, subject_to=C.subject_to,\
                spacing=C.spacing_morph, subjects_dir=C.data_path) 
        
# morph_LD = mne.compute_source_morph(src= inv_op_LD['src'],\
#                 subject_from=sub_to, subject_to=C.subject_to,\
#                 spacing=C.spacing_morph, subjects_dir=C.data_path) 
   
stc_SD=[]
stc_LD=[]
stc_F=[]
stc_O=[]
stc_M=[]

 

for n in np.arange(0,len(stc_sd)):
X[:, :, :, 0] += condition1.data[:, :, np.newaxis]
X[:, :, :, 1] += condition2.data[:, :, np.newaxis]

# %%
# It's a good idea to spatially smooth the data, and for visualization
# purposes, let's morph these to fsaverage, which is a grade 5 source space
# with vertices 0:10242 for each hemisphere. Usually you'd have to morph
# each subject's data separately (and you might want to use morph_data
# instead), but here since all estimates are on 'sample' we can use one
# morph matrix for all the heavy lifting.

# Read the source space we are morphing to
src = mne.read_source_spaces(src_fname)
fsave_vertices = [s['vertno'] for s in src]
morph_mat = mne.compute_source_morph(src=inverse_operator['src'],
                                     subject_to='fsaverage',
                                     spacing=fsave_vertices,
                                     subjects_dir=subjects_dir).morph_mat

n_vertices_fsave = morph_mat.shape[0]

# We have to change the shape for the dot() to work properly
X = X.reshape(n_vertices_sample, n_times * n_subjects * 2)
print('Morphing data.')
X = morph_mat.dot(X)  # morph_mat is a sparse matrix
X = X.reshape(n_vertices_fsave, n_times, n_subjects, 2)

# %%
# Finally, we want to compare the overall activity levels in each condition,
# the diff is taken along the last axis (condition). The negative sign makes
# it so condition1 > condition2 shows up as "red blobs" (instead of blue).
X = np.abs(X)  # only magnitude
Exemplo n.º 45
0
def test_morphed_source_space_return():
    """Test returning a morphed source space to the original subject."""
    # let's create some random data on fsaverage
    data = rng.randn(20484, 1)
    tmin, tstep = 0, 1.
    src_fs = read_source_spaces(fname_fs)
    stc_fs = SourceEstimate(data, [s['vertno'] for s in src_fs],
                            tmin, tstep, 'fsaverage')
    n_verts_fs = sum(len(s['vertno']) for s in src_fs)

    # Create our morph source space
    src_morph = morph_source_spaces(src_fs, 'sample',
                                    subjects_dir=subjects_dir)
    n_verts_sample = sum(len(s['vertno']) for s in src_morph)
    assert n_verts_fs == n_verts_sample

    # Morph the data over using standard methods
    stc_morph = compute_source_morph(
        src_fs, 'fsaverage', 'sample',
        spacing=[s['vertno'] for s in src_morph], smooth=1,
        subjects_dir=subjects_dir, warn=False).apply(stc_fs)
    assert stc_morph.data.shape[0] == n_verts_sample

    # We can now pretend like this was real data we got e.g. from an inverse.
    # To be complete, let's remove some vertices
    keeps = [np.sort(rng.permutation(np.arange(len(v)))[:len(v) - 10])
             for v in stc_morph.vertices]
    stc_morph = SourceEstimate(
        np.concatenate([stc_morph.lh_data[keeps[0]],
                        stc_morph.rh_data[keeps[1]]]),
        [v[k] for v, k in zip(stc_morph.vertices, keeps)], tmin, tstep,
        'sample')

    # Return it to the original subject
    stc_morph_return = stc_morph.to_original_src(
        src_fs, subjects_dir=subjects_dir)

    # This should fail (has too many verts in SourceMorph)
    with pytest.warns(RuntimeWarning, match='vertices not included'):
        morph = compute_source_morph(
            src_morph, subject_from='sample',
            spacing=stc_morph_return.vertices, smooth=1,
            subjects_dir=subjects_dir)
    with pytest.raises(ValueError, match='vertices do not match'):
        morph.apply(stc_morph)

    # Compare to the original data
    with pytest.warns(RuntimeWarning, match='vertices not included'):
        stc_morph_morph = compute_source_morph(
            src=stc_morph, subject_from='sample',
            spacing=stc_morph_return.vertices, smooth=1,
            subjects_dir=subjects_dir).apply(stc_morph)

    assert_equal(stc_morph_return.subject, stc_morph_morph.subject)
    for ii in range(2):
        assert_array_equal(stc_morph_return.vertices[ii],
                           stc_morph_morph.vertices[ii])
    # These will not match perfectly because morphing pushes data around
    corr = np.corrcoef(stc_morph_return.data[:, 0],
                       stc_morph_morph.data[:, 0])[0, 1]
    assert corr > 0.99, corr

    # Explicitly test having two vertices map to the same target vertex. We
    # simulate this by having two vertices be at the same position.
    src_fs2 = src_fs.copy()
    vert1, vert2 = src_fs2[0]['vertno'][:2]
    src_fs2[0]['rr'][vert1] = src_fs2[0]['rr'][vert2]
    stc_morph_return = stc_morph.to_original_src(
        src_fs2, subjects_dir=subjects_dir)

    # test to_original_src method result equality
    for ii in range(2):
        assert_array_equal(stc_morph_return.vertices[ii],
                           stc_morph_morph.vertices[ii])

    # These will not match perfectly because morphing pushes data around
    corr = np.corrcoef(stc_morph_return.data[:, 0],
                       stc_morph_morph.data[:, 0])[0, 1]
    assert corr > 0.99, corr

    # Degenerate cases
    stc_morph.subject = None  # no .subject provided
    pytest.raises(ValueError, stc_morph.to_original_src,
                  src_fs, subject_orig='fsaverage', subjects_dir=subjects_dir)
    stc_morph.subject = 'sample'
    del src_fs[0]['subject_his_id']  # no name in src_fsaverage
    pytest.raises(ValueError, stc_morph.to_original_src,
                  src_fs, subjects_dir=subjects_dir)
    src_fs[0]['subject_his_id'] = 'fsaverage'  # name mismatch
    pytest.raises(ValueError, stc_morph.to_original_src,
                  src_fs, subject_orig='foo', subjects_dir=subjects_dir)
    src_fs[0]['subject_his_id'] = 'sample'
    src = read_source_spaces(fname)  # wrong source space
    pytest.raises(RuntimeError, stc_morph.to_original_src,
                  src, subjects_dir=subjects_dir)
# Set parameters
# --------------
data_path = sample.data_path()
stc_fname = data_path + '/MEG/sample/sample_audvis-meg-lh.stc'
subjects_dir = data_path + '/subjects'
src_fname = subjects_dir + '/fsaverage/bem/fsaverage-ico-5-src.fif'

# Load stc to in common cortical space (fsaverage)
stc = mne.read_source_estimate(stc_fname)
stc.resample(50, npad='auto')

# Read the source space we are morphing to
src = mne.read_source_spaces(src_fname)
fsave_vertices = [s['vertno'] for s in src]
morph = mne.compute_source_morph(stc, 'sample', 'fsaverage',
                                 spacing=fsave_vertices, smooth=20,
                                 subjects_dir=subjects_dir)
stc = morph.apply(stc)
n_vertices_fsave, n_times = stc.data.shape
tstep = stc.tstep

n_subjects1, n_subjects2 = 7, 9
print('Simulating data for %d and %d subjects.' % (n_subjects1, n_subjects2))

#    Let's make sure our results replicate, so set the seed.
np.random.seed(0)
X1 = np.random.randn(n_vertices_fsave, n_times, n_subjects1) * 10
X2 = np.random.randn(n_vertices_fsave, n_times, n_subjects2) * 10
X1[:, :, :] += stc.data[:, :, np.newaxis]
# make the activity bigger for the second set of subjects
X2[:, :, :] += 3 * stc.data[:, :, np.newaxis]