示例#1
0
def test_apply_mne_inverse_raw():
    """Test MNE with precomputed inverse operator on Raw."""
    start = 3
    stop = 10
    raw = read_raw_fif(fname_raw)
    label_lh = read_label(fname_label % 'Aud-lh')
    _, times = raw[0, start:stop]
    inverse_operator = read_inverse_operator(fname_full)
    with pytest.raises(ValueError, match='has not been prepared'):
        apply_inverse_raw(raw, inverse_operator, lambda2, prepared=True)
    inverse_operator = prepare_inverse_operator(inverse_operator, nave=1,
                                                lambda2=lambda2, method="dSPM")
    for pick_ori in [None, "normal", "vector"]:
        stc = apply_inverse_raw(raw, inverse_operator, lambda2, "dSPM",
                                label=label_lh, start=start, stop=stop, nave=1,
                                pick_ori=pick_ori, buffer_size=None,
                                prepared=True)

        stc2 = apply_inverse_raw(raw, inverse_operator, lambda2, "dSPM",
                                 label=label_lh, start=start, stop=stop,
                                 nave=1, pick_ori=pick_ori,
                                 buffer_size=3, prepared=True)

        if pick_ori is None:
            assert (np.all(stc.data > 0))
            assert (np.all(stc2.data > 0))

        assert (stc.subject == 'sample')
        assert (stc2.subject == 'sample')
        assert_array_almost_equal(stc.times, times)
        assert_array_almost_equal(stc2.times, times)
        assert_array_almost_equal(stc.data, stc2.data)
示例#2
0
    def _update(self):
        mne_info = self.traverse_back_and_find('mne_info')
        bads = mne_info['bads']
        if bads != self._bad_channels:
            self.logger.info('Found new bad channels {};'.format(bads) +
                             'updating inverse operator')
            # self.inverse_operator = make_inverse_operator(self.fwd, mne_info)
            self.inverse_operator = make_inverse_operator(self.fwd,
                                                          mne_info,
                                                          depth=self.depth,
                                                          loose=self.loose,
                                                          fixed=self.fixed)
            self.inverse_operator = prepare_inverse_operator(
                self.inverse_operator, nave=100,
                lambda2=self.lambda2, method=self.method)
            # self._inverse_model_matrix = matrix_from_inverse_operator(
            #     inverse_operator=self.inverse_operator, mne_info=mne_info,
            #     snr=self.snr, method=self.method)
            self._bad_channels = bads

        input_array = self.parent.output
        raw_array = mne.io.RawArray(input_array, mne_info, verbose='ERROR')
        raw_array.pick_types(eeg=True, meg=False, stim=False, exclude='bads')
        # data = raw_array.get_data()
        # self.output = self._apply_inverse_model_matrix(data)
        stc = apply_inverse_raw(raw_array, self.inverse_operator,
                                lambda2=self.lambda2, method=self.method,
                                prepared=True)
        self.output = stc.data
示例#3
0
def test_inverse_ctf_comp():
    """Test interpolation with compensated CTF data."""
    raw = mne.io.read_raw_ctf(fname_raw_ctf).crop(0, 0)
    raw.apply_gradient_compensation(1)
    sphere = make_sphere_model()
    cov = make_ad_hoc_cov(raw.info)
    src = mne.setup_volume_source_space(
        pos=dict(rr=[[0., 0., 0.01]], nn=[[0., 1., 0.]]))
    fwd = make_forward_solution(raw.info, None, src, sphere, eeg=False)
    raw.apply_gradient_compensation(0)
    with pytest.raises(RuntimeError, match='Compensation grade .* not match'):
        make_inverse_operator(raw.info, fwd, cov, loose=1.)
    raw.apply_gradient_compensation(1)
    inv = make_inverse_operator(raw.info, fwd, cov, loose=1.)
    apply_inverse_raw(raw, inv, 1. / 9.)  # smoke test
    raw.apply_gradient_compensation(0)
    with pytest.raises(RuntimeError, match='Compensation grade .* not match'):
        apply_inverse_raw(raw, inv, 1. / 9.)
示例#4
0
def compute_ts_inv_sol(raw, fwd_filename, cov_fname, snr, inv_method, aseg):
    """Compute ts inverse solution."""
    import os.path as op
    import numpy as np
    import mne
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from nipype.utils.filemanip import split_filename as split_f

    print(('***** READ FWD SOL %s *****' % fwd_filename))
    forward = mne.read_forward_solution(fwd_filename)

    # Convert to surface orientation for cortically constrained
    # inverse modeling
    if not aseg:
        forward = mne.convert_forward_solution(forward, surf_ori=True)

    lambda2 = 1.0 / snr**2

    # compute inverse operator
    print('***** COMPUTE INV OP *****')
    inverse_operator = make_inverse_operator(raw.info,
                                             forward,
                                             cov_fname,
                                             loose=0.2,
                                             depth=0.8)

    # apply inverse operator to the time windows [t_start, t_stop]s
    # TEST
    t_start = 0  # sec
    t_stop = 3  # sec
    start, stop = raw.time_as_index([t_start, t_stop])
    print(('***** APPLY INV OP ***** [%d %d]sec' % (t_start, t_stop)))
    stc = apply_inverse_raw(raw,
                            inverse_operator,
                            lambda2,
                            inv_method,
                            label=None,
                            start=start,
                            stop=stop,
                            pick_ori=None)

    print('***')
    print(('stc dim ' + str(stc.shape)))
    print('***')

    subj_path, basename, ext = split_f(raw.info['filename'])
    data = stc.data

    print(('data dim ' + str(data.shape)))

    # save results in .npy file that will be the input for spectral node
    print('***** SAVE SOL *****')
    ts_file = op.abspath(basename + '.npy')
    np.save(ts_file, data)

    return ts_file
示例#5
0
def test_apply_mne_inverse_fixed_raw():
    """Test MNE with fixed-orientation inverse operator on Raw."""
    raw = read_raw_fif(fname_raw)
    start = 3
    stop = 10
    _, times = raw[0, start:stop]
    label_lh = read_label(fname_label % 'Aud-lh')

    # create a fixed-orientation inverse operator
    fwd = read_forward_solution_meg(fname_fwd, force_fixed=False,
                                    surf_ori=True)
    noise_cov = read_cov(fname_cov)
    pytest.raises(ValueError, make_inverse_operator,
                  raw.info, fwd, noise_cov, loose=1., fixed=True)
    inv_op = make_inverse_operator(raw.info, fwd, noise_cov,
                                   fixed=True, use_cps=True)

    inv_op2 = prepare_inverse_operator(inv_op, nave=1,
                                       lambda2=lambda2, method="dSPM")
    stc = apply_inverse_raw(raw, inv_op2, lambda2, "dSPM",
                            label=label_lh, start=start, stop=stop, nave=1,
                            pick_ori=None, buffer_size=None, prepared=True)

    stc2 = apply_inverse_raw(raw, inv_op2, lambda2, "dSPM",
                             label=label_lh, start=start, stop=stop, nave=1,
                             pick_ori=None, buffer_size=3, prepared=True)

    stc3 = apply_inverse_raw(raw, inv_op, lambda2, "dSPM",
                             label=label_lh, start=start, stop=stop, nave=1,
                             pick_ori=None, buffer_size=None)

    assert (stc.subject == 'sample')
    assert (stc2.subject == 'sample')
    assert_array_almost_equal(stc.times, times)
    assert_array_almost_equal(stc2.times, times)
    assert_array_almost_equal(stc3.times, times)
    assert_array_almost_equal(stc.data, stc2.data)
    assert_array_almost_equal(stc.data, stc3.data)
def compute_ts_inv_sol(raw, fwd_filename, cov_fname, snr, inv_method, aseg):
    import os.path as op
    import numpy as np
    import mne
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from nipype.utils.filemanip import split_filename as split_f

    print '***** READ FWD SOL %s *****' % fwd_filename
    forward = mne.read_forward_solution(fwd_filename)

    # Convert to surface orientation for cortically constrained
    # inverse modeling
    if not aseg:
        forward = mne.convert_forward_solution(forward, surf_ori=True)

    lambda2 = 1.0 / snr ** 2

    # compute inverse operator
    print '***** COMPUTE INV OP *****'
    inverse_operator = make_inverse_operator(raw.info, forward, cov_fname,
                                             loose=0.2, depth=0.8)

    # apply inverse operator to the time windows [t_start, t_stop]s
    # TEST
    t_start = 0  # sec
    t_stop = 3  # sec
    start, stop = raw.time_as_index([t_start, t_stop])
    print '***** APPLY INV OP ***** [%d %d]sec' % (t_start, t_stop)
    stc = apply_inverse_raw(raw, inverse_operator, lambda2, inv_method,
                            label=None,
                            start=start, stop=stop, pick_ori=None)

    print '***'
    print 'stc dim ' + str(stc.shape)
    print '***'

    subj_path, basename, ext = split_f(raw.info['filename'])
    data = stc.data

    print 'data dim ' + str(data.shape)

    # save results in .npy file that will be the input for spectral node
    print '***** SAVE SOL *****'
    ts_file = op.abspath(basename + '.npy')
    np.save(ts_file, data)

    return ts_file
示例#7
0
    def _update(self):
        input_array = self.input_node.output
        last_slice = last_sample(input_array)
        n_src = self.mne_inv['nsource']
        n_times = input_array.shape[1]
        output_mce = np.empty([n_src, n_times])

        raw_slice = mne.io.RawArray(np.expand_dims(last_slice, axis=1),
                                    self.mne_info,
                                    verbose='ERROR')
        raw_slice.pick_types(eeg=True, meg=False, stim=False, exclude='bads')
        raw_slice.set_eeg_reference(ref_channels='average', projection=True)

        # ------------------- get dipole orientations --------------------- #
        stc_slice = apply_inverse_raw(raw_slice,
                                      self.mne_inv,
                                      pick_ori='vector',
                                      method='MNE',
                                      lambda2=1,
                                      verbose='ERROR')
        Q = normalize(stc_slice.data[:, :, 0])  # dipole orientations
        # ----------------------------------------------------------------- #

        # -------- setup linprog params -------- #
        n_sen = self.A_non_ori.shape[0]
        A_eq = np.empty([n_sen, n_src])
        for i in range(n_src):
            A_eq[:, i] = self.A_non_ori[:, i * 3:(i + 1) * 3] @ Q[i, :].T
        data_slice = raw_slice.get_data()[:, 0]
        b_eq = self.Un.T @ data_slice
        c = np.ones(A_eq.shape[1])
        # -------------------------------------- #

        with nostdout():
            sol = linprog(c,
                          A_eq=A_eq,
                          b_eq=b_eq,
                          method='interior-point',
                          bounds=(0, None),
                          options={'disp': False})
        output_mce[:, :] = sol.x[:, np.newaxis]

        self.output = output_mce
        self.sol = sol
        return Q, A_eq, data_slice, b_eq, c
示例#8
0
 def calculate_inverse_solution(self, data, method='dSPM'):
     """
     Calculates the L2 MNE inverse solution for given data
     :param data: instance of Raw data
     :param method: 'MNE' | 'dSPM' | 'sLORETA'
         Use mininum norm, dSPM or sLORETA.
     :return: source estimate of the raw data with inverse operator applied
     """
     fwd = mne.convert_forward_solution(
         self.fwd,
         surf_ori=True)  # Orient the forward operator with surface
     #  coordinates
     inv = mne.minimum_norm.make_inverse_operator(data.info,
                                                  fwd,
                                                  self.cov,
                                                  loose=self.loose,
                                                  depth=self.depth)
     stc = apply_inverse_raw(data, inv, lambda2=self.lambda2, method=method)
     return stc
示例#9
0
def test_apply_inverse_cov(method, pick_ori):
    """Test MNE with precomputed inverse operator on cov."""
    raw = read_raw_fif(fname_raw, preload=True)
    # use 10 sec of data
    raw.crop(0, 10)

    raw.filter(1, None)
    label_lh = read_label(fname_label % 'Aud-lh')

    # test with a free ori inverse
    inverse_operator = read_inverse_operator(fname_inv)

    data_cov = compute_raw_covariance(raw, tstep=None)

    with pytest.raises(ValueError, match='has not been prepared'):
        apply_inverse_cov(data_cov, raw.info, inverse_operator,
                          lambda2=lambda2, prepared=True)

    this_inv_op = prepare_inverse_operator(inverse_operator, nave=1,
                                           lambda2=lambda2, method=method)

    raw_ori = 'normal' if pick_ori == 'normal' else 'vector'
    stc_raw = apply_inverse_raw(
        raw, this_inv_op, lambda2, method, label=label_lh, nave=1,
        pick_ori=raw_ori, prepared=True)
    stc_cov = apply_inverse_cov(
        data_cov, raw.info, this_inv_op, method=method, pick_ori=pick_ori,
        label=label_lh, prepared=True, lambda2=lambda2)
    n_sources = np.prod(stc_cov.data.shape[:-1])
    raw_data = stc_raw.data.reshape(n_sources, -1)
    exp_res = np.diag(np.cov(raw_data, ddof=1)).copy()
    exp_res *= 1 if raw_ori == pick_ori else 3.
    # There seems to be some precision penalty when combining orientations,
    # but it's probably acceptable
    rtol = 5e-4 if pick_ori is None else 1e-12
    assert_allclose(exp_res, stc_cov.data.ravel(), rtol=rtol)

    with pytest.raises(ValueError, match='Invalid value'):
        apply_inverse_cov(
            data_cov, raw.info, this_inv_op, method=method, pick_ori='vector')
示例#10
0
# Save the source time courses to disk:

stc.save('mne_dSPM_inverse')

##############################################################################
# Now, let's compute dSPM on a raw file within a label:

fname_label = data_path + '/MEG/sample/labels/Aud-lh.label'
label = mne.read_label(fname_label)

##############################################################################
# Compute inverse solution during the first 15s:

from mne.minimum_norm import apply_inverse_raw  # noqa
start, stop = raw.time_as_index([0, 15])  # read the first 15s of data
stc = apply_inverse_raw(raw, inverse_operator, lambda2, method, label,
                        start, stop)

##############################################################################
# Save result in stc files:

stc.save('mne_dSPM_raw_inverse_Aud')

##############################################################################
# What else can you do?
# ^^^^^^^^^^^^^^^^^^^^^
#
#     - detect heart beat QRS component
#     - detect eye blinks and EOG artifacts
#     - compute SSP projections to remove ECG or EOG artifacts
#     - compute Independent Component Analysis (ICA) to remove artifacts or
#       select latent sources
示例#11
0
lambda2 = 1.0 / snr**2
method = "sLORETA"  # use sLORETA method (could also be MNE or dSPM)

# Load data
raw = mne.io.read_raw_fif(fname_raw)
inverse_operator = read_inverse_operator(fname_inv)
label = mne.read_label(fname_label)

raw.set_eeg_reference('average', projection=True)  # set average reference.
start, stop = raw.time_as_index([0, 15])  # read the first 15s of data

# Compute inverse solution
stc = apply_inverse_raw(raw,
                        inverse_operator,
                        lambda2,
                        method,
                        label,
                        start,
                        stop,
                        pick_ori=None)

# Save result in stc files
stc.save('mne_%s_raw_inverse_%s' % (method, label_name))

###############################################################################
# View activation time-series
plt.plot(1e3 * stc.times, stc.data[::100, :].T)
plt.xlabel('time (ms)')
plt.ylabel('%s value' % method)
plt.show()
示例#12
0
def test_compute_LF_matrix():
    import os
    import os.path as op
    import nipype.pipeline.engine as pe
    from nipype.interfaces.mne import WatershedBEM
    import mne
    import mne.io as io
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from mne.report import Report
    from nipype.utils.filemanip import split_filename as split_f
    main_path = '/home/karim/Documents/pasca/data/resting_state/'
    sbj_id = 'K0002'
    sbj_dir = op.join(main_path, 'FSF')
    bem_dir = op.join(sbj_dir, sbj_id, 'bem')
    surface_dir = op.join(sbj_dir, sbj_id, 'bem/watershed')
    data_dir = op.join(main_path, 'MEG')
    raw_fname = op.join(data_dir, '%s/%s_rest_tsss_mc.fif' % (sbj_id, sbj_id))
    raw = io.Raw(raw_fname, preload=True)
    picks = mne.pick_types(raw.info, meg=True, ref_meg=False, exclude='bads')
    raw.filter(l_freq=0.1, h_freq=300, picks=picks, method='iir', n_jobs=2)
    raw.resample(sfreq=300, npad=0)
    report = Report()
    surfaces = [sbj_id + '_brain_surface',
     sbj_id + '_inner_skull_surface',
     sbj_id + '_outer_skull_surface',
     sbj_id + '_outer_skin_surface']
    new_surfaces = ['brain.surf',
     'inner_skull.surf',
     'outer_skull.surf',
     'outer_skin.surf']
    sbj_inner_skull_fname = op.join(bem_dir, sbj_id + '-' + new_surfaces[1])
    inner_skull_fname = op.join(bem_dir, new_surfaces[1])
    if not (op.isfile(sbj_inner_skull_fname) or op.isfile(inner_skull_fname)):
        bem_IF = WatershedBEM()
        bem_IF.inputs.subject_id = sbj_id
        bem_IF.inputs.subjects_dir = sbj_dir
        bem_IF.inputs.atlas_mode = True
        bem_IF.run()
        for i in range(len(surfaces)):
            os.system('cp %s %s' % (op.join(surface_dir, surfaces[i]), op.join(bem_dir, sbj_id + '-' + new_surfaces[i])))

    else:
        print '*** inner skull surface exists!!!'
    bem = op.join(bem_dir, '%s-5120-bem-sol.fif' % sbj_id)
    if not op.isfile(bem):
        os.system('$MNE_ROOT/bin/mne_setup_forward_model --subject ' + sbj_id + ' --homog --surf --ico 4')
    else:
        print '*** BEM solution file exists!!!'
    src_fname = op.join(bem_dir, '%s-ico-5-src.fif' % sbj_id)
    if not op.isfile(src_fname):
        src = mne.setup_source_space(sbj_id, fname=True, spacing='ico5', subjects_dir=sbj_dir, overwrite=True, n_jobs=2)
    else:
        print '*** source space file exists!!!'
        src = mne.read_source_spaces(src_fname)
    trans_fname = op.join(data_dir, '%s/%s-trans.fif' % (sbj_id, sbj_id))
    data_path, basename, ext = split_f(raw_fname)
    fwd_filename = op.join(data_path, '%s-fwd.fif' % basename)
    forward = mne.make_forward_solution(raw_fname, trans_fname, src, bem, fwd_filename, n_jobs=2, overwrite=True)
    forward = mne.convert_forward_solution(forward, surf_ori=True)
    snr = 1.0
    lambda2 = 1.0 / snr ** 2
    method = 'MNE'
    reject = dict(mag=4e-12, grad=4e-10, eog=0.00025)
    noise_cov = mne.compute_raw_data_covariance(raw, picks=picks, reject=reject)
    inverse_operator = make_inverse_operator(raw.info, forward, noise_cov, loose=0.2, depth=0.8)
    start, stop = raw.time_as_index([0, 30])
    stc = apply_inverse_raw(raw, inverse_operator, lambda2, method, label=None, start=start, stop=stop, pick_ori=None)
    print '***'
    stc.shape
    print '***'
    subj_path, basename, ext = split_f(raw_fname)
    stc_filename = op.join(subj_path, basename)
    stc.save(stc_filename)
    report_filename = op.join(subj_path, basename + '-BEM-report.html')
    print report_filename
    report.save(report_filename, open_browser=False, overwrite=True)
    return
示例#13
0
channels = [
    'EEG 001', 'EEG 002', 'EEG 003', 'EEG 004', 'EEG 005', 'EEG 006',
    'EEG 007', 'EEG 008', 'EEG 009', 'EEG 010', 'EEG 011', 'EEG 012',
    'EEG 013', 'EEG 014', 'EEG 015', 'EEG 016', 'EEG 017', 'EEG 018',
    'EEG 019', 'EEG 020', 'EEG 021', 'EEG 022', 'EEG 023', 'EEG 024',
    'EEG 025', 'EEG 026', 'EEG 027', 'EEG 028', 'EEG 029', 'EEG 030',
    'EEG 031', 'EEG 032', 'EEG 033', 'EEG 034', 'EEG 035', 'EEG 036',
    'EEG 037', 'EEG 038', 'EEG 039', 'EEG 040', 'EEG 041', 'EEG 042',
    'EEG 043', 'EEG 044', 'EEG 045', 'EEG 046', 'EEG 047', 'EEG 048',
    'EEG 049', 'EEG 050', 'EEG 051', 'EEG 052', 'EEG 054', 'EEG 055',
    'EEG 056', 'EEG 057', 'EEG 058', 'EEG 059', 'EEG 060'
]
#info = mne.create_info(ch_names=channels, sfreq=fs, montage=mne.channels.read_montage(kind='standard_primed'), ch_types=['eeg' for ch in channels])
info = inv['info']
#info['sfreq'] = 500
data = np.random.normal(loc=0,
                        scale=0.00001,
                        size=(5000, len(info["ch_names"])))
info.plot_sensors()
raw = mne.io.RawArray(data.T, info)
info.plot_sensors()
#raw.set_eeg_reference()
#raw.plot()
#plt.show()

sources = apply_inverse_raw(raw, inv, 0.01)

print("Method: %s" % inv['methods'])
print("fMRI prior: %s" % inv['fmri_prior'])
print("Number of sources: %s" % inv['nsource'])
print("Number of channels: %s" % inv['nchan'])
示例#14
0
def compute_rois_inv_sol(raw_filename,
                         sbj_id,
                         sbj_dir,
                         fwd_filename,
                         cov_fname,
                         is_epoched=False,
                         events_id=[],
                         t_min=None,
                         t_max=None,
                         is_evoked=False,
                         snr=1.0,
                         inv_method='MNE',
                         parc='aparc',
                         aseg=False,
                         aseg_labels=[],
                         save_stc=False,
                         is_fixed=False):
    """Compute the inverse solution on raw/epoched data.

    This function return the average time series computed in the N_r regions of
    the source space defined by the specified cortical parcellation

    Parameters
    ----------
    raw_filename : str
        filename of the raw/epoched data
    sbj_id : str
        subject name
    sbj_dir : str
        Freesurfer directory
    fwd_filename : str
        filename of the forward operator
    cov_filename : str
        filename of the noise covariance matrix
    is_epoched : bool
        if True and events_id = None the input data are epoch data
        in the format -epo.fif
        if True and events_id is not None, the raw data are epoched
        according to events_id and t_min and t_max values
    events_id: dict
        the dict of events
    t_min, t_max: int
        define the time interval in which to epoch the raw data
    is_evoked: bool
        if True the raw data will be averaged according to the events
        contained in the dict events_id
    inv_method : str
        the inverse method to use; possible choices: MNE, dSPM, sLORETA
    snr : float
        the SNR value used to define the regularization parameter
    parc: str
        the parcellation defining the ROIs atlas in the source space
    aseg: bool
        if True a mixed source space will be created and the sub cortical
        regions defined in aseg_labels will be added to the source space
    aseg_labels: list
        list of substructures we want to include in the mixed source space
    save_stc: bool
        if True the stc will be saved

    Returns
    -------
    ts_file : str
        filename of the file where are saved the ROIs time series
    labels_file : str
        filename of the file where are saved the ROIs of the parcellation
    label_names_file : str
        filename of the file where are saved the name of the ROIs of the
        parcellation
    label_coords_file : str
        filename of the file where are saved the coordinates of the
        centroid of the ROIs of the parcellation
    """
    import os.path as op
    import numpy as np
    import mne

    from mne.io import read_raw_fif
    from mne import read_epochs
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from mne.minimum_norm import apply_inverse_epochs, apply_inverse
    from mne import get_volume_labels_from_src

    from nipype.utils.filemanip import split_filename as split_f

    from ephypype.preproc import create_reject_dict
    from ephypype.source_space import create_mni_label_files

    try:
        traits.undefined(events_id)
    except NameError:
        events_id = None

    print(('\n*** READ raw filename %s ***\n' % raw_filename))
    if is_epoched and events_id is None:
        epochs = read_epochs(raw_filename)
        info = epochs.info
    else:
        raw = read_raw_fif(raw_filename, preload=True)
        #        raw.set_eeg_reference()
        info = raw.info

    subj_path, basename, ext = split_f(raw_filename)

    print(('\n*** READ noise covariance %s ***\n' % cov_fname))
    noise_cov = mne.read_cov(cov_fname)

    print(('\n*** READ FWD SOL %s ***\n' % fwd_filename))
    forward = mne.read_forward_solution(fwd_filename)

    if not aseg:
        print(('\n*** fixed orientation {} ***\n'.format(is_fixed)))
        forward = mne.convert_forward_solution(forward,
                                               surf_ori=True,
                                               force_fixed=is_fixed)

    lambda2 = 1.0 / snr**2

    # compute inverse operator
    print('\n*** COMPUTE INV OP ***\n')
    if is_fixed:
        loose = None
        depth = None
        pick_ori = None
    elif aseg:
        loose = 1
        depth = None
        pick_ori = None
    else:
        loose = 0.2
        depth = 0.8
        pick_ori = 'normal'

    print(('\n *** loose {}  depth {} ***\n'.format(loose, depth)))
    inverse_operator = make_inverse_operator(info,
                                             forward,
                                             noise_cov,
                                             loose=loose,
                                             depth=depth,
                                             fixed=is_fixed)

    # apply inverse operator to the time windows [t_start, t_stop]s
    print('\n*** APPLY INV OP ***\n')
    if is_epoched and events_id is not None:
        events = mne.find_events(raw)
        picks = mne.pick_types(info, meg=True, eog=True, exclude='bads')
        reject = create_reject_dict(info)

        if is_evoked:
            epochs = mne.Epochs(raw,
                                events,
                                events_id,
                                t_min,
                                t_max,
                                picks=picks,
                                baseline=(None, 0),
                                reject=reject)
            evoked = [epochs[k].average() for k in events_id]
            snr = 3.0
            lambda2 = 1.0 / snr**2

            ev_list = list(events_id.items())
            for k in range(len(events_id)):
                stc = apply_inverse(evoked[k],
                                    inverse_operator,
                                    lambda2,
                                    inv_method,
                                    pick_ori=pick_ori)

                print(('\n*** STC for event %s ***\n' % ev_list[k][0]))
                stc_file = op.abspath(basename + '_' + ev_list[k][0])

                print('***')
                print(('stc dim ' + str(stc.shape)))
                print('***')

                if not aseg:
                    stc.save(stc_file)

        else:
            epochs = mne.Epochs(raw,
                                events,
                                events_id,
                                t_min,
                                t_max,
                                picks=picks,
                                baseline=(None, 0),
                                reject=reject)
            stc = apply_inverse_epochs(epochs,
                                       inverse_operator,
                                       lambda2,
                                       inv_method,
                                       pick_ori=pick_ori)

            print('***')
            print(('len stc %d' % len(stc)))
            print('***')

    elif is_epoched and events_id is None:
        stc = apply_inverse_epochs(epochs,
                                   inverse_operator,
                                   lambda2,
                                   inv_method,
                                   pick_ori=pick_ori)
        print('***')
        print(('len stc %d' % len(stc)))
        print('***')
    else:
        stc = apply_inverse_raw(raw,
                                inverse_operator,
                                lambda2,
                                inv_method,
                                label=None,
                                start=None,
                                stop=None,
                                buffer_size=1000,
                                pick_ori=pick_ori)  # None 'normal'

        print('***')
        print(('stc dim ' + str(stc.shape)))
        print('***')

    if not isinstance(stc, list):
        stc = [stc]

    if save_stc:
        for i in range(len(stc)):
            stc_file = op.abspath(basename + '_stc_' + str(i) + '.npy')
            np.save(stc_file, stc[i].data)

    # these coo are in MRI space and we have to convert to MNI space
    labels_cortex = mne.read_labels_from_annot(sbj_id,
                                               parc=parc,
                                               subjects_dir=sbj_dir)

    print(('\n*** %d ***\n' % len(labels_cortex)))

    src = inverse_operator['src']

    # allow_empty : bool -> Instead of emitting an error, return all-zero time
    # courses for labels that do not have any vertices in the source estimate

    if is_fixed:
        mode = 'mean_flip'
    else:
        mode = 'mean'

    label_ts = mne.extract_label_time_course(stc,
                                             labels_cortex,
                                             src,
                                             mode=mode,
                                             allow_empty=True,
                                             return_generator=False)

    # save results in .npy file that will be the input for spectral node
    print('\n*** SAVE ROI TS ***\n')
    print((len(label_ts)))

    ts_file = op.abspath(basename + '_ROI_ts.npy')
    np.save(ts_file, label_ts)

    if aseg:
        print(sbj_id)
        labels_aseg = get_volume_labels_from_src(src, sbj_id, sbj_dir)
        labels = labels_cortex + labels_aseg
    else:
        labels = labels_cortex
        labels_aseg = None

    print((labels[0].pos))
    print((len(labels)))

    # labels_file, label_names_file, label_coords_file = \
    # create_label_files(labels)
    labels_file, label_names_file, label_coords_file = \
        create_mni_label_files(forward, labels_cortex, labels_aseg,
                               sbj_id, sbj_dir)

    return ts_file, labels_file, label_names_file, label_coords_file
示例#15
0
def _compute_inverse_solution(raw_filename, sbj_id, subjects_dir, fwd_filename,
                              cov_fname, is_epoched=False, events_id=None,
                              condition=None, is_ave=False,
                              t_min=None, t_max=None, is_evoked=False,
                              snr=1.0, inv_method='MNE',
                              parc='aparc', aseg=False, aseg_labels=[],
                              all_src_space=False, ROIs_mean=True,
                              is_fixed=False):
    """
    Compute the inverse solution on raw/epoched data and return the average
    time series computed in the N_r regions of the source space defined by
    the specified cortical parcellation

    Inputs
        raw_filename : str
            filename of the raw/epoched data
        sbj_id : str
            subject name
        subjects_dir : str
            Freesurfer directory
        fwd_filename : str
            filename of the forward operator
        cov_filename : str
            filename of the noise covariance matrix
        is_epoched : bool
            if True and events_id = None the input data are epoch data
            in the format -epo.fif
            if True and events_id is not None, the raw data are epoched
            according to events_id and t_min and t_max values
        events_id: dict
            the dict of events
        t_min, t_max: int
            define the time interval in which to epoch the raw data
        is_evoked: bool
            if True the raw data will be averaged according to the events
            contained in the dict events_id
        inv_method : str
            the inverse method to use; possible choices: MNE, dSPM, sLORETA
        snr : float
            the SNR value used to define the regularization parameter
        parc: str
            the parcellation defining the ROIs atlas in the source space
        aseg: bool
            if True a mixed source space will be created and the sub cortical
            regions defined in aseg_labels will be added to the source space
        aseg_labels: list
            list of substructures we want to include in the mixed source space
        all_src_space: bool
            if True we compute the inverse for all points of the s0urce space
        ROIs_mean: bool
            if True we compute the mean of estimated time series on ROIs


    Outputs
        ts_file : str
            filename of the file where are saved the estimated time series
        labels_file : str
            filename of the file where are saved the ROIs of the parcellation
        label_names_file : str
            filename of the file where are saved the name of the ROIs of the
            parcellation
        label_coords_file : str
            filename of the file where are saved the coordinates of the
            centroid of the ROIs of the parcellation

    """
    print(('\n*** READ raw filename %s ***\n' % raw_filename))
    if is_epoched:
        epochs = read_epochs(raw_filename)
        info = epochs.info
    elif is_ave:
        evokeds = read_evokeds(raw_filename)
        info = evokeds[0].info
    else:
        raw = read_raw_fif(raw_filename, preload=True)
        info = raw.info

    subj_path, basename, ext = split_f(raw_filename)

    print(('\n*** READ noise covariance %s ***\n' % cov_fname))
    noise_cov = mne.read_cov(cov_fname)

    print(('\n*** READ FWD SOL %s ***\n' % fwd_filename))
    forward = mne.read_forward_solution(fwd_filename)

    # TODO check use_cps for force_fixed=True
    if not aseg:
        print(('\n*** fixed orientation {} ***\n'.format(is_fixed)))
        # is_fixed=True => to convert the free-orientation fwd solution to
        # (surface-oriented) fixed orientation.
        forward = mne.convert_forward_solution(forward, surf_ori=True,
                                               force_fixed=is_fixed,
                                               use_cps=False)

    lambda2 = 1.0 / snr ** 2

    # compute inverse operator
    print('\n*** COMPUTE INV OP ***\n')
    if is_fixed:
        loose = 0
        depth = None
        pick_ori = None
    elif aseg:
        loose = 1
        depth = None
        pick_ori = None
    else:
        loose = 0.2
        depth = 0.8
        pick_ori = 'normal'

    print(('\n *** loose {}  depth {} ***\n'.format(loose, depth)))
    inverse_operator = make_inverse_operator(info, forward, noise_cov,
                                             loose=loose, depth=depth,
                                             fixed=is_fixed)

    # apply inverse operator to the time windows [t_start, t_stop]s
    print('\n*** APPLY INV OP ***\n')
    stc_files = list()

    if is_epoched and events_id != {}:
        if is_evoked:
            stc = list()

            if events_id != condition and condition:
                events_name = condition
            else:
                events_name = events_id
            evoked = [epochs[k].average() for k in events_name]

            if 'epo' in basename:
                basename = basename.replace('-epo', '')
            fname_evo = op.abspath(basename + '-ave.fif')
            write_evokeds(fname_evo, evoked)

            for k in range(len(events_name)):
                print(evoked[k])
                stc_evo = apply_inverse(evoked[k], inverse_operator, lambda2,
                                        inv_method, pick_ori=pick_ori)
                print(('\n*** STC for event %s ***\n' % k))
                print('***')
                print(('stc dim ' + str(stc_evo.shape)))
                print('***')

                stc_evo_file = op.abspath(basename + '-%d' % k)
                stc_evo.save(stc_evo_file)
                stc.append(stc_evo)
                stc_files.append(stc_evo_file)

        else:
            stc = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                       inv_method, pick_ori=pick_ori)

    elif is_epoched and events_id == {}:
        stc = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                   inv_method, pick_ori=pick_ori)

    elif is_ave:
        stc = list()
        for evo in evokeds:
            print(evo.comment)
            stc_evo = apply_inverse(evo, inverse_operator, lambda2,
                                    inv_method, pick_ori=pick_ori)
            print(('\n*** STC for event %s ***\n' % evo.comment))
            print('***')
            print(('stc dim ' + str(stc_evo.shape)))
            print('***')

            stc_evo_file = op.join(subj_path, basename + '-%s' % evo.comment)
            stc_evo.save(stc_evo_file)

            stc.append(stc_evo)
            stc_files.append(stc_evo_file)
    else:
        stc = apply_inverse_raw(raw, inverse_operator, lambda2, inv_method,
                                label=None,
                                start=None, stop=None,
                                buffer_size=1000,
                                pick_ori=pick_ori)  # None 'normal'

    ts_file, label_ts, labels_file, label_names_file, label_coords_file = \
        _process_stc(stc, basename, sbj_id, subjects_dir, parc, forward,
                     aseg, is_fixed, all_src_space=False, ROIs_mean=True)

    return ts_file, labels_file, label_names_file, \
        label_coords_file, stc_files
示例#16
0
# inv_cogni = np.load(file=inv_cogni_path)
#  cogni}}} #
#  inverse}}} #

# start, stop = raw.time_as_index([75,85])
start, stop = raw.time_as_index([0, 29.9])

#  {{{ apply inverse #
raw_c = raw.copy()
raw_c.filter(l_freq=8, h_freq=12)
raw_c.set_eeg_reference(ref_channels='average')
raw_c.apply_proj()
# stc = apply_inverse_raw(raw_c, inverse_operator, pick_ori='vector', method=inv_method, lambda2=lambda2, start=start, stop=stop)
stc = apply_inverse_raw(raw_c,
                        inverse_operator,
                        method=inv_method,
                        lambda2=lambda2,
                        start=start,
                        stop=stop)
# raw.set_reference_channels
stc
#   {{{beamformer #
# fwd_fix = mne.convert_forward_solution(fwd, surf_ori=True,  force_fixed=False)
# data_cov = mne.compute_raw_covariance(raw_c, tmin=0, tmax=5, method='shrunk')
# stc = lcmv_raw(raw_c, fwd_fix, None, data_cov, reg=0.05, start=start, stop=stop,
#                pick_ori='max-power', weight_norm='unit_noise_gain', max_ori_out='signed')
# weight_norm=None, max_ori_out='signed')

#  beamformer}}} #

# raw_cc = raw_c.copy()
# raw_cc[:,:] = np.random.rand(raw_cc[:,:][0].shape[0], raw_cc[:,:][0].shape[1]) - 1
data_path = sample.data_path('..')
fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'
fname_raw = data_path + '/MEG/sample/sample_audvis_raw.fif'
label_name = 'Aud-lh'
fname_label = data_path + '/MEG/sample/labels/%s.label' % label_name

snr = 1.0  # use smaller SNR for raw data
lambda2 = 1.0 / snr ** 2
method = "sLORETA"  # use sLORETA method (could also be MNE or dSPM)

# Load data
raw = Raw(fname_raw)
inverse_operator = read_inverse_operator(fname_inv)
label = mne.read_label(fname_label)

start, stop = raw.time_as_index([0, 15])  # read the first 15s of data

# Compute inverse solution
stc = apply_inverse_raw(raw, inverse_operator, lambda2, method, label,
                        start, stop, pick_normal=False)

# Save result in stc files
stc.save('mne_%s_raw_inverse_%s' % (method, label_name))

###############################################################################
# View activation time-series
pl.plot(1e3 * stc.times, stc.data[::100, :].T)
pl.xlabel('time (ms)')
pl.ylabel('%s value' % method)
pl.show()
示例#18
0
S_ = ica.fit(raw_filt, reject=reject)
# raw_filt.info['projs'] = projs
h = S_.plot_components(res=128, cmap='viridis', inst=raw_filt)
S_.plot_properties(raw_filt, picks=range(5))

raw_copy = raw_filt.copy()
S_.apply(raw_copy)
raw_copy.plot()  # check the result

# Compute inverse operator
snr = 1.0  # use smaller SNR for raw data
lambda2 = 1.0 / snr**2
method = "sLORETA"  # use sLORETA method (could also be MNE or dSPM)
inverse_op = make_inverse_operator(raw_copy.info, fwd, raw_cov)
# Compute inverse solution
start, stop = raw_copy.time_as_index([0, 30])
stc = apply_inverse_raw(raw_copy,
                        inverse_op,
                        lambda2,
                        method,
                        start=start,
                        stop=stop,
                        pick_ori=None)

brain = stc.plot('genz_bo',
                 'inflated',
                 'split',
                 subjects_dir=subjects_dir,
                 time_viewer=True)
brain.show_view('lateral')
示例#19
0
def raw_ndvar(raw, i_start=None, i_stop=None, decim=1, data=None, exclude='bads',
              sysname=None,  connectivity=None,
              inv=None, lambda2=1, method='dSPM', pick_ori=None, src=None,
              subjects_dir=None, parc='aparc', label=None):
    """Raw dta as NDVar

    Parameters
    ----------
    raw : Raw | str
        Raw instance, or path of a raw FIFF file..
    i_start : int | sequence of int
        Start sample (see notes; default is the beginning of the ``raw``).
    i_stop : int | sequence of int
        Stop sample (see notes; default is end of the ``raw``).
    decim : int
        Downsample the data by this factor when importing. ``1`` (default)
        means no downsampling. Note that this function does not low-pass filter
        the data. The data is downsampled by picking out every n-th sample.
    data : 'eeg' | 'mag' | 'grad' | None
        The kind of data to include (default based on data).
    exclude : list of string | str
        Channels to exclude (:func:`mne.pick_types` kwarg).
        If 'bads' (default), exclude channels in info['bads'].
        If empty do not exclude any.
    sysname : str
        Name of the sensor system to load sensor connectivity (e.g. 'neuromag',
        inferred automatically for KIT data converted with a recent version of
        MNE-Python).
    connectivity : str | list of (str, str) | array of int, (n_edges, 2)
        Connectivity between elements. Can be specified as:

        - ``"none"`` for no connections
        - list of connections (e.g., ``[('OZ', 'O1'), ('OZ', 'O2'), ...]``)
        - :class:`numpy.ndarray` of int, shape (n_edges, 2), to specify
          connections in terms of indices. Each row should specify one
          connection [i, j] with i < j. If the array's dtype is uint32,
          property checks are disabled to improve efficiency.
        - ``"grid"`` to use adjacency in the sensor names

        If unspecified, it is inferred from ``sysname`` if possible.
    inv : InverseOperator
        MNE inverse operator to transform data to source space (by default, data
        are loaded in sensor space). If ``inv`` is specified, subsequent
        parameters are required to construct the right source space.
    lambda2 : scalar
        Inverse solution parameter: lambda squared parameter.
    method : str
        Inverse solution parameter: noise normalization method.
    pick_ori : bool
        Inverse solution parameter.
    src : str
        Source space descriptor (e.g. ``'ico-4'``).
    subjects_dir : str
        MRI subjects directory.
    parc : str
        Parcellation to load for the source space.
    label : Label
        Restrict source estimate to this label.

    Returns
    -------
    data : NDVar | list of NDVar
        Data (sensor or source space). If ``i_start`` and ``i_stopr`` are scalar
        then a single NDVar is returned, if they are lists then a list of NDVars
        is returned.

    Notes
    -----
    ``i_start`` and ``i_stop`` are interpreted as event indexes (from
    :func:`mne.find_events`), i.e. relative to ``raw.first_samp``.
    """
    if not isinstance(raw, MNE_RAW):
        raw = mne_raw(raw)
    name = os.path.basename(_get_raw_filename(raw))
    start_scalar = i_start is None or isinstance(i_start, int)
    stop_scalar = i_stop is None or isinstance(i_stop, int)
    if start_scalar or stop_scalar:
        if not start_scalar and stop_scalar:
            raise TypeError(
                "i_start and i_stop must either both be scalar or both "
                "iterable, got i_start=%r, i_stop=%s" % (i_start, i_stop))
        i_start = (i_start,)
        i_stop = (i_stop,)
        scalar = True
    else:
        scalar = False

    # event index to raw index
    i_start = tuple(i if i is None else i - raw.first_samp for i in i_start)
    i_stop = tuple(i if i is None else i - raw.first_samp for i in i_stop)

    # target dimension
    if inv is None:
        if data is None:
            data = _guess_ndvar_data_type(raw.info)
        picks = _picks(raw.info, data, exclude)
        dim = sensor_dim(raw, picks, sysname, connectivity)
        info = _sensor_info(data, None, raw.info)
    else:
        assert data is None
        dim = SourceSpace.from_mne_source_spaces(inv['src'], src, subjects_dir,
                                                 parc, label)
        inv = prepare_inverse_operator(inv, 1, lambda2, method)
        info = {}  # FIXME

    out = []
    for start, stop in zip(i_start, i_stop):
        if inv is None:
            x = raw[picks, start:stop][0]
        else:
            x = apply_inverse_raw(raw, inv, lambda2, method, label, start,
                                  stop, pick_ori=pick_ori, prepared=True).data

        if decim != 1:
            x = x[:, ::decim]
        time = UTS(0, float(decim) / raw.info['sfreq'], x.shape[1])
        out.append(NDVar(x, (dim, time), info, name))

    if scalar:
        return out[0]
    else:
        return out
def extract_ts(dir_prepro_dat, dir_save, lower, upper, atlas):
    """
    Parameters
    ----------
    dir_prepro_dat : string
        Path to saved preprocessed data.
    dir_save : string
        Path to where the extracted time series should be saved.
    lower : list of floats
        Lower limit of the desired frequency ranges. Needs to have same length
        as upper.
    upper : list of floats
        Upper limit of the desired frequency ranges. Needs to have same length
        as lower.

    Notes
    ----------
    Saves the extracted time series and the run time as a dictionary, in the 
    chosen path (dir_save).

    """

    ##################################################################
    # Initialize parameters
    ##################################################################

    fs_dir = fetch_fsaverage(verbose=True)
    subjects_dir = op.dirname(fs_dir)
    #fs_dir = '/home/kmsa/mne_data/MNE-fsaverage-data/fsaverage'
    #subjects_dir = '/home/kmsa/mne_data/MNE-fsaverage-data'

    # The files live in:
    subject = 'fsaverage'
    trans = 'fsaverage'  # MNE has a built-in fsaverage transformation
    src = mne.setup_source_space(subject,
                                 spacing='oct6',
                                 subjects_dir=None,
                                 add_dist=False)
    # Boundary element method
    bem = op.join(fs_dir, 'bem', 'fsaverage-5120-5120-5120-bem-sol.fif')

    # Inverse parameters
    method = "eLORETA"  #other options are minimum norm, dSPM, and sLORETA
    snr = 3.
    lambda2 = 1. / snr**2
    buff_sz = 250

    # Create folder if it does not exist
    if not op.exists(dir_save):
        mkdir(dir_save)
        print('\nCreated new path : ' + dir_save)

    # Check what atlas to use and read labels
    if atlas == 'DK':
        # Desikan-Killiany Atlas = aparc
        parc = 'Yeo2011_7Networks_N1000'  # 'aparc'
        labels = read_labels_from_annot(subject,
                                        parc='aparc',
                                        hemi='both',
                                        surf_name='white',
                                        annot_fname=None,
                                        regexp=None,
                                        subjects_dir=subjects_dir,
                                        verbose=None)

        labels = labels[:-1]
    # elif atlas == 'BA':
    #     # Broadmann areas
    #     labels = read_labels_from_annot(subject, parc = 'PALS_B12_Brodmann', hemi='both',
    #                                  surf_name= 'white', annot_fname = None, regexp = None,
    #                                  subjects_dir = subjects_dir, verbose = None)
    #     labels = labels[5:87]

    elif atlas == 'BAita':
        # Brodmann areas collected as in Di Lorenzo et al.
        labels = read_labels_from_annot(subject,
                                        parc='PALS_B12_Brodmann',
                                        hemi='both',
                                        surf_name='white',
                                        annot_fname=None,
                                        regexp=None,
                                        subjects_dir=subjects_dir,
                                        verbose=None)
        labels = labels[5:87]
        lab_dict = {}
        for lab in labels:
            lab_dict[lab.name] = lab

        ita_ba = [
            [1, 2, 3, 4],
            [5, 7],
            [6, 8],
            [9, 10],
            [11, 47],
            [44, 45, 46],  #[13],
            [20, 21, 22, 38, 41, 42],
            [24, 25, 32],  #[24,25,32,33], 
            [23, 29, 30, 31],
            [27, 28, 35, 36],  #[27,28,34,35,36], 
            [39, 40, 43],
            [19, 37],
            [17, 18]
        ]
        # ita_label = ['SMA', 'SPL', 'SFC', 'AFC', 'OFC', 'LFC', #'INS',
        #              'LTL', 'ACC_new', 'PCC', 'PHG_new', 'IPL', 'FLC', 'PVC']

        # Sort labels according to connectivity featurers
        new_label = []
        for idx, i in enumerate(ita_ba):
            for j in i:
                ba_lh = 'Brodmann.' + str(j) + '-lh'
                ba_rh = 'Brodmann.' + str(j) + '-rh'

                if j == i[0]:
                    sum_lh = lab_dict[ba_lh]
                    sum_rh = lab_dict[ba_rh]
                else:
                    sum_lh += lab_dict[ba_lh]
                    sum_rh += lab_dict[ba_rh]
            new_label.append(sum_lh)
            new_label.append(sum_rh)

        labels = new_label

    elif atlas == 'DKLobes':
        # Brodmann areas collected as in Di Lorenzo et al.
        labels = read_labels_from_annot(subject,
                                        parc='aparc',
                                        hemi='both',
                                        surf_name='white',
                                        annot_fname=None,
                                        regexp=None,
                                        subjects_dir=subjects_dir,
                                        verbose=None)
        # Divide into lobes based on
        # https://surfer.nmr.mgh.harvard.edu/fswiki/CorticalParcellation
        frontal = [
            'superiorfrontal', 'rostralmiddlefrontal', 'caudalmiddlefrontal',
            'parsopercularis', 'parstriangularis', 'parsorbitalis',
            'lateralorbitofrontal', 'medialorbitofrontal', 'precentral',
            'paracentral', 'frontalpole', 'rostralanteriorcingulate',
            'caudalanteriorcingulate'
        ]
        parietal = [
            'superiorparietal', 'inferiorparietal', 'supramarginal',
            'postcentral', 'precuneus', 'posteriorcingulate',
            'isthmuscingulate'
        ]
        temporal = [
            'superiortemporal', 'middletemporal', 'inferiortemporal',
            'bankssts', 'fusiform', 'transversetemporal', 'entorhinal',
            'temporalpole', 'parahippocampal'
        ]
        occipital = ['lateraloccipital', 'lingual', 'cuneus', 'pericalcarine']

        all_lobes = {
            'frontal': frontal,
            'parietal': parietal,
            'occipital': occipital,
            'temporal': temporal
        }

        labels = labels[:-1]
        lab_dict = {}
        for lab in labels:
            lab_dict[lab.name] = lab

        # Sort labels according to connectivity featurers
        new_label = []
        for lobes in list(all_lobes.keys()):
            for idx, name in enumerate(all_lobes[lobes]):
                name_lh = name + '-lh'
                name_rh = name + '-rh'

                if idx == 0:
                    sum_lh = lab_dict[name_lh]
                    sum_rh = lab_dict[name_rh]
                else:
                    sum_lh += lab_dict[name_lh]
                    sum_rh += lab_dict[name_rh]
            sum_lh.name = lobes + '-lh'
            sum_rh.name = lobes + '-rh'
            new_label.append(sum_lh)
            new_label.append(sum_rh)

        labels = new_label
    # elif finished

    # Create folder if it does not exist
    if not op.exists(dir_save):
        mkdir(dir_save)
        print('\nCreated new path : ' + dir_save)

    # List of time series that have already been saved
    already_saved = [i.split('_' + atlas)[0] for i in listdir(dir_save)]

    count = 0
    run_time = 0

    for filename in tqdm(listdir(dir_prepro_dat)):
        # Only loop over ".set" files
        if not filename.endswith(".set"):
            continue

        # Only choose the files that are not already in the save directory
        if filename.split('.')[0] in already_saved:
            count += 1
            continue

        start = timeit.default_timer()
        timeseries_dict = {}
        ###################################
        # Load preprocessed data
        ###################################
        ID_list = op.join(dir_prepro_dat, filename)
        raw = mne.io.read_raw_eeglab(ID_list, preload=True)

        # Set montage (number of used channels = 64)
        raw.set_montage('biosemi64')

        ##################################################################
        # Forward soultion: from brain to electrode
        ##################################################################
        fwd = mne.make_forward_solution(raw.info,
                                        trans=trans,
                                        src=src,
                                        bem=bem,
                                        eeg=True,
                                        mindist=5.0,
                                        n_jobs=-1)

        ##################################################################
        # Inverse modeling
        ##################################################################
        # Compute noise covariance
        noise_cov = mne.compute_raw_covariance(raw, n_jobs=-1)
        # make an EEG inverse operator
        inverse_operator = make_inverse_operator(raw.info,
                                                 fwd,
                                                 noise_cov,
                                                 loose=0.2,
                                                 depth=0.8)

        raw.set_eeg_reference('average', projection=True)

        # Hardcoded print to give an overview of how much is done and left
        print('\n####################################################' +
              '\n####################################################' +
              '\n####################################################' +
              '\nSubject number: ' + str(count) + ', ' +
              filename.split('.')[0] + '\nRun time/subject = ' +
              str(run_time) + '\nRun time left in hours ~' +
              str((85 - (count + 1)) * (run_time / 60)) +
              '\n####################################################' +
              '\n####################################################' +
              '\n####################################################')

        # Compute inverse solution
        stc = apply_inverse_raw(raw,
                                inverse_operator,
                                lambda2,
                                method=method,
                                nave=1,
                                pick_ori=None,
                                verbose=True,
                                buffer_size=buff_sz)
        #pdb.set_trace()
        del raw

        # Hardcoded print to give an overview of how much is done and left
        print('\n####################################################' +
              '\n####################################################' +
              '\n####################################################' +
              '\nSubject number: ' + str(count) + ', ' +
              filename.split('.')[0] + '\nRun time/subject = ' +
              str(run_time) + '\nRun time left in hours ~' +
              str((85 - (count + 1)) * (run_time / 60)) +
              '\n####################################################' +
              '\n####################################################' +
              '\n####################################################')
        ##################################################################
        # Extract timeseries from DK regions
        ##################################################################
        # Label time series by Desikan-Killiany Atlas -> 68 ts
        label_ts = mne.extract_label_time_course(stc,
                                                 labels,
                                                 inverse_operator['src'],
                                                 mode='pca_flip',
                                                 return_generator=True)
        del stc
        ###################################################################
        # Construct and save dictionary
        ###################################################################
        subject = filename.split('_')[0]
        stop = timeit.default_timer()
        run_time = (stop - start) / 60
        timeseries_dict = {'timeseries': label_ts, 'time': run_time}
        del label_ts
        # Save to computer
        save_name = dir_save + '/' + subject + '_' + atlas + '_timeseries' + '.pkl'
        #save_name = '/share/FannyMaster/PythonNew/DK_timeseries/DK_source_timeseries_'+ date +'.pkl'

        with open(save_name, 'wb') as file:
            pickle.dump(timeseries_dict, file)

        del timeseries_dict

        count += 1
示例#21
0
文件: fiff.py 项目: mhellb/Eelbrain
def raw_ndvar(raw,
              i_start=None,
              i_stop=None,
              decim=1,
              data=None,
              exclude='bads',
              sysname=None,
              connectivity=None,
              inv=None,
              lambda2=1,
              method='dSPM',
              pick_ori=None,
              src=None,
              subjects_dir=None,
              parc='aparc',
              label=None):
    """Raw dta as NDVar

    Parameters
    ----------
    raw : Raw | str
        Raw instance, or path of a raw FIFF file..
    i_start : int | sequence of int
        Start sample (see notes; default is the beginning of the ``raw``).
    i_stop : int | sequence of int
        Stop sample (see notes; default is end of the ``raw``).
    decim : int
        Downsample the data by this factor when importing. ``1`` (default)
        means no downsampling. Note that this function does not low-pass filter
        the data. The data is downsampled by picking out every n-th sample.
    data : 'eeg' | 'mag' | 'grad' | None
        The kind of data to include (default based on data).
    exclude : list of string | str
        Channels to exclude (:func:`mne.pick_types` kwarg).
        If 'bads' (default), exclude channels in info['bads'].
        If empty do not exclude any.
    sysname : str
        Name of the sensor system to load sensor connectivity (e.g. 'neuromag',
        inferred automatically for KIT data converted with a recent version of
        MNE-Python).
    connectivity : str | list of (str, str) | array of int, (n_edges, 2)
        Connectivity between elements. Can be specified as:

        - ``"none"`` for no connections
        - list of connections (e.g., ``[('OZ', 'O1'), ('OZ', 'O2'), ...]``)
        - :class:`numpy.ndarray` of int, shape (n_edges, 2), to specify
          connections in terms of indices. Each row should specify one
          connection [i, j] with i < j. If the array's dtype is uint32,
          property checks are disabled to improve efficiency.
        - ``"grid"`` to use adjacency in the sensor names

        If unspecified, it is inferred from ``sysname`` if possible.
    inv : InverseOperator
        MNE inverse operator to transform data to source space (by default, data
        are loaded in sensor space). If ``inv`` is specified, subsequent
        parameters are required to construct the right source space.
    lambda2 : scalar
        Inverse solution parameter: lambda squared parameter.
    method : str
        Inverse solution parameter: noise normalization method.
    pick_ori : bool
        Inverse solution parameter.
    src : str
        Source space descriptor (e.g. ``'ico-4'``).
    subjects_dir : str
        MRI subjects directory.
    parc : str
        Parcellation to load for the source space.
    label : Label
        Restrict source estimate to this label.

    Returns
    -------
    data : NDVar | list of NDVar
        Data (sensor or source space). If ``i_start`` and ``i_stopr`` are scalar
        then a single NDVar is returned, if they are lists then a list of NDVars
        is returned.

    Notes
    -----
    ``i_start`` and ``i_stop`` are interpreted as event indexes (from
    :func:`mne.find_events`), i.e. relative to ``raw.first_samp``.
    """
    if not isinstance(raw, MNE_RAW):
        raw = mne_raw(raw)
    name = os.path.basename(raw.filenames[0])
    start_scalar = i_start is None or isinstance(i_start, int)
    stop_scalar = i_stop is None or isinstance(i_stop, int)
    if start_scalar or stop_scalar:
        if not start_scalar and stop_scalar:
            raise TypeError(
                "i_start and i_stop must either both be scalar or both "
                "iterable, got i_start=%r, i_stop=%s" % (i_start, i_stop))
        i_start = (i_start, )
        i_stop = (i_stop, )
        scalar = True
    else:
        scalar = False

    # event index to raw index
    i_start = tuple(i if i is None else i - raw.first_samp for i in i_start)
    i_stop = tuple(i if i is None else i - raw.first_samp for i in i_stop)

    # target dimension
    if inv is None:
        if data is None:
            data = _guess_ndvar_data_type(raw.info)
        picks = _picks(raw.info, data, exclude)
        dim = sensor_dim(raw, picks, sysname, connectivity)
        info = _sensor_info(data, None, raw.info)
    else:
        assert data is None
        dim = SourceSpace.from_mne_source_spaces(inv['src'], src, subjects_dir,
                                                 parc, label)
        inv = prepare_inverse_operator(inv, 1, lambda2, method)
        info = {}  # FIXME

    out = []
    for start, stop in zip(i_start, i_stop):
        if inv is None:
            x = raw[picks, start:stop][0]
        else:
            x = apply_inverse_raw(raw,
                                  inv,
                                  lambda2,
                                  method,
                                  label,
                                  start,
                                  stop,
                                  pick_ori=pick_ori,
                                  prepared=True).data

        if decim != 1:
            x = x[:, ::decim]
        time = UTS(0, float(decim) / raw.info['sfreq'], x.shape[1])
        out.append(NDVar(x, (dim, time), name, info))

    if scalar:
        return out[0]
    else:
        return out
示例#22
0
                                noise_cov,
                                loose=0.2,
                                depth=0.8)
    snr_evoked = 3.0
    snr_raw = 1.0
    for label in labels:
        print label.name
        evoked_stc = apply_inverse(evoked,
                                   inv,
                                   lambda2=1.0 / (2**snr_evoked),
                                   method=source_method,
                                   label=label,
                                   pick_ori=None)
        raw_stc = apply_inverse_raw(raw,
                                    inv,
                                    lambda2=1.0 / (2**snr_raw),
                                    method=source_method,
                                    label=label,
                                    pick_ori=None)
        ch_names.append(label.name)
        evoked_stcs.append(evoked_stc.data.mean(0))
        raw_stcs.append(raw_stc.data.mean(0))
    evoked_stcs = np.array(evoked_stcs)
    raw_stcs = np.array(raw_stcs)

# Create info for raw_source and evoked_source
info_raw = mne.create_info(ch_names=ch_names,
                           ch_types=['stim'] + ['mag'] * len(labels),
                           sfreq=600)
info_evoked = mne.create_info(ch_names=ch_names[1:],
                              ch_types=['mag'] * len(labels),
                              sfreq=600)
# get and write inv. operator
inv = minnorm.make_inverse_operator(data1.info,
                                    fwd,
                                    cov,
                                    loose=0.,
                                    depth=None,
                                    fixed=True)
inv1 = minnorm.prepare_inverse_operator(inv, 1, 1. / 9.)
inv_sol = minnorm.inverse._assemble_kernel(
    inv1, None, 'MNE',
    None)[0]  # counterpart to forwardOperator, [sources x sensors]

# test on extract of time series
source_ts = minnorm.apply_inverse_raw(data1,
                                      inv,
                                      method='MNE',
                                      lambda2=1. / 9.,
                                      start=0,
                                      stop=6000)
label_ts = mne.extract_label_time_course(source_ts,
                                         labels_parc,
                                         src,
                                         mode='mean_flip',
                                         allow_empty=True,
                                         return_generator=False)

# get source identities
src_ident_lh = np.full(len(vert_lh), -1)
src_ident_rh = np.full(len(vert_rh), -1)

for l, label in enumerate(
        labels_parc[:201]):  # find sources that belong to the left HS labels
示例#24
0
# Save the source time courses to disk:

stc.save('mne_dSPM_inverse')

##############################################################################
# Now, let's compute dSPM on a raw file within a label:

fname_label = data_path + '/MEG/sample/labels/Aud-lh.label'
label = mne.read_label(fname_label)

##############################################################################
# Compute inverse solution during the first 15s:

from mne.minimum_norm import apply_inverse_raw  # noqa
start, stop = raw.time_as_index([0, 15])  # read the first 15s of data
stc = apply_inverse_raw(raw, inverse_operator, lambda2, method, label, start,
                        stop)

##############################################################################
# Save result in stc files:

stc.save('mne_dSPM_raw_inverse_Aud')

##############################################################################
# What else can you do?
# ^^^^^^^^^^^^^^^^^^^^^
#
#     - detect heart beat QRS component
#     - detect eye blinks and EOG artifacts
#     - compute SSP projections to remove ECG or EOG artifacts
#     - compute Independent Component Analysis (ICA) to remove artifacts or
#       select latent sources
def compute_ROIs_inv_sol(raw_filename, sbj_id, sbj_dir, fwd_filename,
                         cov_fname, is_epoched=False, event_id=None,
                         t_min=None, t_max=None,
                         is_evoked=False, events_id=[],
                         snr=1.0, inv_method='MNE',
                         parc='aparc', aseg=False, aseg_labels=[],
                         is_blind=False, labels_removed=[], save_stc=False):
    import os
    import os.path as op
    import numpy as np
    import mne
    import pickle

    from mne.io import read_raw_fif
    from mne import read_epochs
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from mne.minimum_norm import apply_inverse_epochs, apply_inverse
    from mne import get_volume_labels_from_src

    from nipype.utils.filemanip import split_filename as split_f

    from neuropype_ephy.preproc import create_reject_dict

    try:
        traits.undefined(event_id)
    except NameError:
        event_id = None

    print '\n*** READ raw filename %s ***\n' % raw_filename
    if is_epoched and event_id is None:
        epochs = read_epochs(raw_filename)
        info = epochs.info
    else:
        raw = read_raw_fif(raw_filename)
        info = raw.info

    subj_path, basename, ext = split_f(info['filename'])

    print '\n*** READ noise covariance %s ***\n' % cov_fname
    noise_cov = mne.read_cov(cov_fname)

    print '\n*** READ FWD SOL %s ***\n' % fwd_filename
    forward = mne.read_forward_solution(fwd_filename)

    if not aseg:
        forward = mne.convert_forward_solution(forward, surf_ori=True,
                                               force_fixed=False)

    lambda2 = 1.0 / snr ** 2

    # compute inverse operator
    print '\n*** COMPUTE INV OP ***\n'
    if not aseg:
        loose = 0.2
        depth = 0.8
    else:
        loose = None
        depth = None

    inverse_operator = make_inverse_operator(info, forward, noise_cov,
                                             loose=loose, depth=depth,
                                             fixed=False)

    # apply inverse operator to the time windows [t_start, t_stop]s
    print '\n*** APPLY INV OP ***\n'
    if is_epoched and event_id is not None:
        events = mne.find_events(raw)
        picks = mne.pick_types(info, meg=True, eog=True, exclude='bads')
        reject = create_reject_dict(info)

        if is_evoked:
            epochs = mne.Epochs(raw, events, events_id, t_min, t_max,
                                picks=picks, baseline=(None, 0), reject=reject)
            evoked = [epochs[k].average() for k in events_id]
            snr = 3.0
            lambda2 = 1.0 / snr ** 2

            ev_list = events_id.items()
            for k in range(len(events_id)):
                stc = apply_inverse(evoked[k], inverse_operator, lambda2,
                                    inv_method, pick_ori=None)

                print '\n*** STC for event %s ***\n' % ev_list[k][0]
                stc_file = op.abspath(basename + '_' + ev_list[k][0])

                print '***'
                print 'stc dim ' + str(stc.shape)
                print '***'

                if not aseg:
                    stc.save(stc_file)

        else:
            epochs = mne.Epochs(raw, events, event_id, t_min, t_max,
                                picks=picks, baseline=(None, 0), reject=reject)
            stc = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                       inv_method, pick_ori=None)

            print '***'
            print 'len stc %d' % len(stc)
            print '***'

    elif is_epoched and event_id is None:
        stc = apply_inverse_epochs(epochs, inverse_operator, lambda2,
                                   inv_method, pick_ori=None)

        print '***'
        print 'len stc %d' % len(stc)
        print '***'
    else:
        stc = apply_inverse_raw(raw, inverse_operator, lambda2, inv_method,
                                label=None,
                                start=None, stop=None,
                                buffer_size=1000,
                                pick_ori=None)  # None 'normal'

        print '***'
        print 'stc dim ' + str(stc.shape)
        print '***'

    if save_stc:
        if aseg:
            for i in range(len(stc)):
                try:
                    os.mkdir(op.join(subj_path, 'TS'))
                except OSError:
                    pass
                stc_file = op.join(subj_path, 'TS', basename + '_' +
                                   inv_method + '_stc_' + str(i) + '.npy')

                if not op.isfile(stc_file):
                    np.save(stc_file, stc[i].data)

    labels_cortex = mne.read_labels_from_annot(sbj_id, parc=parc,
                                               subjects_dir=sbj_dir)
    if is_blind:
        for l in labels_cortex:
            if l.name in labels_removed:
                print l.name
                labels_cortex.remove(l)

    print '\n*** %d ***\n' % len(labels_cortex)

    src = inverse_operator['src']

    # allow_empty : bool -> Instead of emitting an error, return all-zero time
    # courses for labels that do not have any vertices in the source estimate
    label_ts = mne.extract_label_time_course(stc, labels_cortex, src,
                                             mode='mean',
                                             allow_empty=True,
                                             return_generator=False)

    # save results in .npy file that will be the input for spectral node
    print '\n*** SAVE ROI TS ***\n'
    print len(label_ts)

    ts_file = op.abspath(basename + '_ROI_ts.npy')
    np.save(ts_file, label_ts)

    if aseg:
        print sbj_id
        labels_aseg = get_volume_labels_from_src(src, sbj_id, sbj_dir)
        labels = labels_cortex + labels_aseg
    else:
        labels = labels_cortex

    print labels[0].pos
    print len(labels)

    labels_file = op.abspath('labels.dat')
    with open(labels_file, "wb") as f:
        pickle.dump(len(labels), f)
        for value in labels:
            pickle.dump(value, f)

    label_names_file = op.abspath('label_names.txt')
    label_coords_file = op.abspath('label_coords.txt')

    label_names = []
    label_coords = []

    for value in labels:
        label_names.append(value.name)
#        label_coords.append(value.pos[0])
        label_coords.append(np.mean(value.pos, axis=0))

    np.savetxt(label_names_file, np.array(label_names, dtype=str),
               fmt="%s")
    np.savetxt(label_coords_file, np.array(label_coords, dtype=float),
               fmt="%f %f %f")

    return ts_file, labels_file, label_names_file, label_coords_file
示例#26
0
import numpy as np
print(__doc__)

data_path = sample.data_path()
fname = data_path
fname += '/MEG/sample/sample_audvis-eeg-oct-6-eeg-inv.fif'

inv = read_inverse_operator(fname)


fs = 500
channels = ['EEG 001', 'EEG 002', 'EEG 003', 'EEG 004', 'EEG 005', 'EEG 006', 'EEG 007', 'EEG 008', 'EEG 009', 'EEG 010', 'EEG 011', 'EEG 012', 'EEG 013', 'EEG 014', 'EEG 015', 'EEG 016', 'EEG 017', 'EEG 018', 'EEG 019', 'EEG 020', 'EEG 021', 'EEG 022', 'EEG 023', 'EEG 024', 'EEG 025', 'EEG 026', 'EEG 027', 'EEG 028', 'EEG 029', 'EEG 030', 'EEG 031', 'EEG 032', 'EEG 033', 'EEG 034', 'EEG 035', 'EEG 036', 'EEG 037', 'EEG 038', 'EEG 039', 'EEG 040', 'EEG 041', 'EEG 042', 'EEG 043', 'EEG 044', 'EEG 045', 'EEG 046', 'EEG 047', 'EEG 048', 'EEG 049', 'EEG 050', 'EEG 051', 'EEG 052', 'EEG 054', 'EEG 055', 'EEG 056', 'EEG 057', 'EEG 058', 'EEG 059', 'EEG 060']
#info = mne.create_info(ch_names=channels, sfreq=fs, montage=mne.channels.read_montage(kind='standard_primed'), ch_types=['eeg' for ch in channels])
info = inv['info']
#info['sfreq'] = 500
data = np.random.normal(loc=0, scale=0.00001, size=(5000, len(info["ch_names"])))
info.plot_sensors()
raw = mne.io.RawArray(data.T, info)
info.plot_sensors()
#raw.set_eeg_reference()
#raw.plot()
#plt.show()

sources = apply_inverse_raw(raw, inv, 0.01)



print("Method: %s" % inv['methods'])
print("fMRI prior: %s" % inv['fmri_prior'])
print("Number of sources: %s" % inv['nsource'])
print("Number of channels: %s" % inv['nchan'])
data_path = sample.data_path()
fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'
fname_raw = data_path + '/MEG/sample/sample_audvis_raw.fif'
label_name = 'Aud-lh'
fname_label = data_path + '/MEG/sample/labels/%s.label' % label_name

snr = 1.0  # use smaller SNR for raw data
lambda2 = 1.0 / snr ** 2
method = "sLORETA"  # use sLORETA method (could also be MNE or dSPM)

# Load data
raw = Raw(fname_raw)
inverse_operator = read_inverse_operator(fname_inv)
label = mne.read_label(fname_label)

start, stop = raw.time_as_index([0, 15])  # read the first 15s of data

# Compute inverse solution
stc = apply_inverse_raw(raw, inverse_operator, lambda2, method, label,
                        start, stop, pick_ori=None)

# Save result in stc files
stc.save('mne_%s_raw_inverse_%s' % (method, label_name))

###############################################################################
# View activation time-series
plt.plot(1e3 * stc.times, stc.data[::100, :].T)
plt.xlabel('time (ms)')
plt.ylabel('%s value' % method)
plt.show()
示例#28
0
def compute_ROIs_inv_sol(raw, sbj_id, sbj_dir, fwd_filename, cov_fname, snr,
                         inv_method, parc, aseg, aseg_labels):
    import os.path as op
    import numpy as np
    import mne
    from mne.minimum_norm import make_inverse_operator, apply_inverse_raw
    from nipype.utils.filemanip import split_filename as split_f
    
    from neuropype_ephy.compute_inv_problem import get_aseg_labels

    print '***** READ noise covariance %s *****' % cov_fname
    noise_cov = mne.read_cov(cov_fname)

    print '***** READ FWD SOL %s *****' % fwd_filename
    forward = mne.read_forward_solution(fwd_filename)

    if not aseg:
        forward = mne.convert_forward_solution(forward, surf_ori=True,
                                               force_fixed=False)

    lambda2 = 1.0 / snr ** 2

    # compute inverse operator
    print '***** COMPUTE INV OP *****'
    if not aseg:
        loose = 0.2
        depth = 0.8
    else:
        loose = None
        depth = None

    inverse_operator = make_inverse_operator(raw.info, forward, noise_cov,
                                             loose=loose, depth=depth,
                                             fixed=False)

    # apply inverse operator to the time windows [t_start, t_stop]s
    print '***** APPLY INV OP *****'
    stc = apply_inverse_raw(raw, inverse_operator, lambda2, inv_method,
                            label=None,
                            start=None, stop=None,
                            buffer_size=1000,
                            pick_ori=None)  # None 'normal'

    print '***'
    print 'stc dim ' + str(stc.shape)
    print '***'

    labels_cortex = mne.read_labels_from_annot(sbj_id, parc=parc,
                                               subjects_dir=sbj_dir)

    src = inverse_operator['src']

    # allow_empty : bool -> Instead of emitting an error, return all-zero time
    # courses for labels that do not have any vertices in the source estimate
    # TODO cosa accade se la uso con solo la cortex? -> OK!!!
    label_ts = mne.extract_label_time_course_AP(stc, labels_cortex, src,
                                                mode='mean_flip',
                                                allow_empty=True,
                                                return_generator=False)

    # save results in .npy file that will be the input for spectral node
    print '***** SAVE SOL *****'
    subj_path, basename, ext = split_f(raw.info['filename'])
    ts_file = op.abspath(basename + '.npy')
    np.save(ts_file, label_ts)

    if aseg:
        labels_aseg = get_aseg_labels(src, sbj_dir, sbj_id, aseg_labels)
        labels = labels_cortex + labels_aseg
    else:
        labels = labels_cortex

    return ts_file, labels
snr = 1.0  # use smaller SNR for raw data
lambda2 = 1.0 / snr**2
method = "sLORETA"  # use sLORETA method (could also be MNE or dSPM)

# Load data
raw = Raw(fname_raw)
inverse_operator = read_inverse_operator(fname_inv)
label = mne.read_label(fname_label)

start, stop = raw.time_as_index([0, 15])  # read the first 15s of data

# Compute inverse solution
stc = apply_inverse_raw(raw,
                        inverse_operator,
                        lambda2,
                        method,
                        label,
                        start,
                        stop,
                        pick_normal=False)

# Save result in stc files
stc.save('mne_%s_raw_inverse_%s' % (method, label_name))

###############################################################################
# View activation time-series
pl.plot(1e3 * stc.times, stc.data[::100, :].T)
pl.xlabel('time (ms)')
pl.ylabel('%s value' % method)
pl.show()
示例#30
0
                     alpha=0.3)
    plt.title("IC %d" % i)
    plt.xlim([0, fmax])
    plt.ylim([0, round(CompsEpochFFTs_ave.max(), 2)])
    plt.xlabel('frequency (Hz)')
    plt.ylabel('Amplitude (a.u.)')
plt.tight_layout()
plt.subplots_adjust(top=0.93)

# < 5. ICA components' projection to surface source space >
CompsSpatDist = np.dot(ICA_raw.mixing_matrix_.T,
                       ICA_raw.pca_components_[:ICA_raw.n_components_])
CompsSpatDist = CompsSpatDist.T
PseudoRaw = mne.io.RawArray(CompsSpatDist,
                            rawdata.copy().pick_types(meg=True).info)
CompsSrc = apply_inverse_raw(PseudoRaw, InvOperator, lambda2, method=method)

# plot surface source space data
surfdata = CompsSrc.data[:nvert_insurf, :]
stcdata = mne.SourceEstimate(surfdata,
                             vertices=vertices,
                             tmin=CompsSrc.tmin,
                             tstep=1 / sfreq,
                             subject=MRIsubject)
brainView = stcdata.plot(subject=MRIsubject,
                         hemi='both',
                         time_label='IC #%03d',
                         time_viewer=True,
                         subjects_dir=subjects_dir,
                         time_unit='ms')
brainView.toggle_toolbars(show=True)
示例#31
0
def raw_ndvar(
    raw,
    i_start=None,
    i_stop=None,
    decim=1,
    inv=None,
    lambda2=1,
    method="dSPM",
    pick_ori=None,
    src=None,
    subjects_dir=None,
    parc="aparc",
    label=None,
):
    """Raw dta as NDVar

    Parameters
    ----------
    raw : Raw
        Raw instance.
    i_start : int | sequence of int
        Start sample (see notes; default is the beginning of the ``raw``).
    i_stop : int | sequence of int
        Stop sample (see notes; default is end of the ``raw``).
    decim : int
        Downsample the data by this factor when importing. ``1`` (default)
        means no downsampling. Note that this function does not low-pass filter
        the data. The data is downsampled by picking out every n-th sample.
    inv : InverseOperator
        MNE inverse operator to transform data to source space (by default, data
        are loaded in sensor space). If ``inv`` is specified, subsequent
        parameters are required to construct the right soure space.
    lambda2 : scalar
        Inverse solution parameter: lambda squared parameter.
    method : str
        Inverse solution parameter: noise normalization method.
    pick_ori : bool
        Inverse solution parameter.
    src : str
        Source space descriptor (e.g. ``'ico-4'``).
    subjects_dir : str
        MRI subjects directory.
    parc : str
        Parcellation to load for the source space.
    label : Label
        Restrict source estimate to this label.

    Returns
    -------
    data : NDVar | list of NDVar
        Data (sensor or source space). If ``i_start`` and ``i_stopr`` are scalar
        then a single NDVar is returned, if they are lists then a list of NDVars
        is returned.

    Notes
    -----
    ``i_start`` and ``i_stop`` are interpreted as event indexes (from
    :func:`mne.find_events`), i.e. relative to ``raw.first_samp``.
    """
    start_scalar = i_start is None or isinstance(i_start, int)
    stop_scalar = i_stop is None or isinstance(i_stop, int)
    if start_scalar or stop_scalar:
        if not start_scalar and stop_scalar:
            raise TypeError(
                "i_start and i_stop must either both be scalar or both "
                "iterable, got i_start=%r, i_stop=%s" % (i_start, i_stop)
            )
        i_start = (i_start,)
        i_stop = (i_stop,)
        scalar = True
    else:
        scalar = False

    # event index to raw index
    i_start = tuple(i if i is None else i - raw.first_samp for i in i_start)
    i_stop = tuple(i if i is None else i - raw.first_samp for i in i_stop)

    # target dimension
    if inv is None:
        picks = mne.pick_types(raw.info, ref_meg=False)
        dim = sensor_dim(raw, picks)
    else:
        dim = SourceSpace.from_mne_source_spaces(inv["src"], src, subjects_dir, parc, label)
        inv = prepare_inverse_operator(inv, 1, lambda2, method)

    out = []
    for start, stop in izip(i_start, i_stop):
        if inv is None:
            x = raw[picks, start:stop][0]
        else:
            x = apply_inverse_raw(raw, inv, lambda2, method, label, start, stop, pick_ori=pick_ori, prepared=True).data

        if decim != 1:
            x = x[:, ::decim]
        time = UTS(0, float(decim) / raw.info["sfreq"], x.shape[1])
        out.append(NDVar(x, (dim, time), _cs.meg_info()))

    if scalar:
        return out[0]
    else:
        return out
示例#32
0
def compute_src_label_ts(subject,
                         crop_to=[0, 250],
                         resample_to=100.,
                         bads=None,
                         mag_reject=5e-12,
                         win_len=2000,
                         n_wins=11,
                         verbose=None,
                         lambda2=1. / 9.,
                         inv_method='dSPM',
                         extract_ts_mode='mean_flip'):
    """
    Compute source label time series
    """
    """
    Compute anatomy
    """

    hcp.make_mne_anatomy(subject,
                         subjects_dir=subjects_dir,
                         hcp_path=hcp_path,
                         recordings_path=hcp_path)
    """
    Read surface labels
    """
    labels = read_labels_from_annot(subject,
                                    parc='aparc',
                                    subjects_dir=subjects_dir)
    labels_fsav = read_labels_from_annot('fsaverage',
                                         parc='aparc',
                                         subjects_dir=subjects_dir)
    """
    Read raw data
    """

    raw = hcp.read_raw(subject=subject,
                       data_type=data_type,
                       hcp_path=hcp_path,
                       run_index=run_index)

    raw.load_data()

    raw.crop(crop_to[0], crop_to[1])

    raw.resample(resample_to)

    raw.info['bads'] = bads

    hcp.preprocessing.set_eog_ecg_channels(raw)

    hcp.preprocessing.apply_ref_correction(raw)

    info = raw.info.copy()

    raw.info['projs'] = []

    ecg_ave = create_ecg_epochs(raw).average()

    eog_ave = create_eog_epochs(raw).average()

    ssp_eog, _ = compute_proj_eog(raw,
                                  n_grad=1,
                                  n_mag=1,
                                  average=True,
                                  reject=dict(mag=mag_reject))
    raw.add_proj(ssp_eog, remove_existing=True)

    n_fft = next_fast_len(int(round(4 * raw.info['sfreq'])))

    sfreq = raw.info['sfreq']
    """
    Compute forward model
    """
    src_outputs = hcp.anatomy.compute_forward_stack(
        subject=subject,
        subjects_dir=subjects_dir,
        hcp_path=hcp_path,
        recordings_path=hcp_path,
        src_params=dict(add_dist=False),
        info_from=dict(data_type=data_type, run_index=run_index))
    fwd = src_outputs['fwd']
    """
    Compute noise covariance
    """
    raw_noise = hcp.read_raw(subject=subject,
                             hcp_path=hcp_path,
                             data_type='noise_empty_room')
    raw_noise.load_data()
    hcp.preprocessing.apply_ref_correction(raw_noise)
    raw_noise.add_proj(ssp_eog)
    noise_cov = compute_raw_covariance(raw_noise, method='oas')
    """
    Compute inverse operator
    """

    raw.info = info
    inv_op = make_inverse_operator(raw.info,
                                   forward=fwd,
                                   noise_cov=noise_cov,
                                   verbose=verbose)
    """
    Compute source activity
    """

    wins = [[0, win_len]]
    for i in range(n_wins):
        new_wins = [
            wins[0][0] + (win_len * (i + 1)), wins[0][1] + (win_len * (i + 1))
        ]
        wins.append(new_wins)

    raw_srcs = []
    for win in wins:
        res = apply_inverse_raw(raw,
                                inv_op,
                                lambda2=lambda2,
                                method=inv_method,
                                label=None,
                                start=win[0],
                                stop=win[1],
                                nave=1,
                                time_func=None,
                                pick_ori=None,
                                buffer_size=None,
                                prepared=False,
                                method_params=None,
                                verbose=verbose)
        raw_srcs.append(res)
    """
    Compute source label time series
    """
    src = inv_op['src']
    label_ts = extract_label_time_course(raw_srcs,
                                         labels,
                                         src,
                                         mode=extract_ts_mode,
                                         return_generator=False)

    return label_ts, sfreq
示例#33
0
            fwd = make_forward_solution(raw_crop.info,
                                        trans=trans,
                                        src=src,
                                        bem=bem,
                                        meg=True,
                                        eeg=False,
                                        mindist=0.0,
                                        n_jobs=3)
            write_forward_solution(fwdFile, fwd, overwrite=True)
        else:
            fwd = read_forward_solution(fwdFile)

        # Make (or read) inverse operator
        if not op.isfile(invFile) or overwrite_pre:
            inv = make_inverse_operator(raw_crop.info, fwd, noise_cov)
            write_inverse_operator(invFile, inv)
        else:
            inv = read_inverse_operator(invFile)

        # Do source recon
        t0 = time.time()
        stc_dSPM = apply_inverse_raw(raw_crop, inv, lambda2, method="dSPM")
        #        stc_MNE     = apply_inverse_raw(raw_crop, inv, lambda2, method="MNE")
        dt = time.time() - t0
        print('Time elapsed: ' + str(dt / 60.0) + ' min')

        #       Save
        stc_dSPM.save(outFile_dSPM)
#        stc_MNE.save(outFile_MNE)

#END
示例#34
0
文件: fiff.py 项目: rbaehr/Eelbrain
def raw_ndvar(raw,
              i_start=None,
              i_stop=None,
              decim=1,
              inv=None,
              lambda2=1,
              method='dSPM',
              pick_ori=None,
              src=None,
              subjects_dir=None,
              parc='aparc',
              label=None):
    """Raw dta as NDVar

    Parameters
    ----------
    raw : Raw | str
        Raw instance, or path of a raw FIFF file..
    i_start : int | sequence of int
        Start sample (see notes; default is the beginning of the ``raw``).
    i_stop : int | sequence of int
        Stop sample (see notes; default is end of the ``raw``).
    decim : int
        Downsample the data by this factor when importing. ``1`` (default)
        means no downsampling. Note that this function does not low-pass filter
        the data. The data is downsampled by picking out every n-th sample.
    inv : InverseOperator
        MNE inverse operator to transform data to source space (by default, data
        are loaded in sensor space). If ``inv`` is specified, subsequent
        parameters are required to construct the right soure space.
    lambda2 : scalar
        Inverse solution parameter: lambda squared parameter.
    method : str
        Inverse solution parameter: noise normalization method.
    pick_ori : bool
        Inverse solution parameter.
    src : str
        Source space descriptor (e.g. ``'ico-4'``).
    subjects_dir : str
        MRI subjects directory.
    parc : str
        Parcellation to load for the source space.
    label : Label
        Restrict source estimate to this label.

    Returns
    -------
    data : NDVar | list of NDVar
        Data (sensor or source space). If ``i_start`` and ``i_stopr`` are scalar
        then a single NDVar is returned, if they are lists then a list of NDVars
        is returned.

    Notes
    -----
    ``i_start`` and ``i_stop`` are interpreted as event indexes (from
    :func:`mne.find_events`), i.e. relative to ``raw.first_samp``.
    """
    if not isinstance(raw, MNE_RAW):
        raw = mne_raw(raw)
    name = os.path.basename(_get_raw_filename(raw))
    start_scalar = i_start is None or isinstance(i_start, int)
    stop_scalar = i_stop is None or isinstance(i_stop, int)
    if start_scalar or stop_scalar:
        if not start_scalar and stop_scalar:
            raise TypeError(
                "i_start and i_stop must either both be scalar or both "
                "iterable, got i_start=%r, i_stop=%s" % (i_start, i_stop))
        i_start = (i_start, )
        i_stop = (i_stop, )
        scalar = True
    else:
        scalar = False

    # event index to raw index
    i_start = tuple(i if i is None else i - raw.first_samp for i in i_start)
    i_stop = tuple(i if i is None else i - raw.first_samp for i in i_stop)

    # target dimension
    if inv is None:
        picks = mne.pick_types(raw.info, ref_meg=False)
        dim = sensor_dim(raw, picks)
    else:
        dim = SourceSpace.from_mne_source_spaces(inv['src'], src, subjects_dir,
                                                 parc, label)
        inv = prepare_inverse_operator(inv, 1, lambda2, method)

    out = []
    for start, stop in izip(i_start, i_stop):
        if inv is None:
            x = raw[picks, start:stop][0]
        else:
            x = apply_inverse_raw(raw,
                                  inv,
                                  lambda2,
                                  method,
                                  label,
                                  start,
                                  stop,
                                  pick_ori=pick_ori,
                                  prepared=True).data

        if decim != 1:
            x = x[:, ::decim]
        time = UTS(0, float(decim) / raw.info['sfreq'], x.shape[1])
        out.append(NDVar(x, (dim, time), _cs.meg_info(), name))

    if scalar:
        return out[0]
    else:
        return out
示例#35
0
def _compute_inverse_solution(raw_filename,
                              sbj_id,
                              subjects_dir,
                              fwd_filename,
                              cov_fname,
                              is_epoched=False,
                              events_id=None,
                              events_file=None,
                              t_min=None,
                              t_max=None,
                              is_evoked=False,
                              snr=1.0,
                              inv_method='MNE',
                              parc='aparc',
                              aseg=False,
                              aseg_labels=[],
                              all_src_space=False,
                              ROIs_mean=True,
                              is_fixed=False):
    """
    Compute the inverse solution on raw/epoched data and return the average
    time series computed in the N_r regions of the source space defined by
    the specified cortical parcellation

    Inputs
        raw_filename : str
            filename of the raw/epoched data
        sbj_id : str
            subject name
        subjects_dir : str
            Freesurfer directory
        fwd_filename : str
            filename of the forward operator
        cov_filename : str
            filename of the noise covariance matrix
        is_epoched : bool
            if True and events_id = None the input data are epoch data
            in the format -epo.fif
            if True and events_id is not None, the raw data are epoched
            according to events_id and t_min and t_max values
        events_id: dict
            the dict of events
        t_min, t_max: int
            define the time interval in which to epoch the raw data
        is_evoked: bool
            if True the raw data will be averaged according to the events
            contained in the dict events_id
        inv_method : str
            the inverse method to use; possible choices: MNE, dSPM, sLORETA
        snr : float
            the SNR value used to define the regularization parameter
        parc: str
            the parcellation defining the ROIs atlas in the source space
        aseg: bool
            if True a mixed source space will be created and the sub cortical
            regions defined in aseg_labels will be added to the source space
        aseg_labels: list
            list of substructures we want to include in the mixed source space
        all_src_space: bool
            if True we compute the inverse for all points of the s0urce space
        ROIs_mean: bool
            if True we compute the mean of estimated time series on ROIs


    Outputs
        ts_file : str
            filename of the file where are saved the estimated time series
        labels_file : str
            filename of the file where are saved the ROIs of the parcellation
        label_names_file : str
            filename of the file where are saved the name of the ROIs of the
            parcellation
        label_coords_file : str
            filename of the file where are saved the coordinates of the
            centroid of the ROIs of the parcellation

    """
    print(('\n*** READ raw filename %s ***\n' % raw_filename))
    if is_epoched and events_id == {}:
        epochs = read_epochs(raw_filename)
        info = epochs.info
    else:
        raw = read_raw_fif(raw_filename, preload=True)
        info = raw.info

    subj_path, basename, ext = split_f(raw_filename)

    print(('\n*** READ noise covariance %s ***\n' % cov_fname))
    noise_cov = mne.read_cov(cov_fname)

    print(('\n*** READ FWD SOL %s ***\n' % fwd_filename))
    forward = mne.read_forward_solution(fwd_filename)

    # TODO check use_cps for force_fixed=True
    if not aseg:
        print(('\n*** fixed orientation {} ***\n'.format(is_fixed)))
        # is_fixed=True => to convert the free-orientation fwd solution to
        # (surface-oriented) fixed orientation.
        forward = mne.convert_forward_solution(forward,
                                               surf_ori=True,
                                               force_fixed=is_fixed,
                                               use_cps=False)

    lambda2 = 1.0 / snr**2

    # compute inverse operator
    print('\n*** COMPUTE INV OP ***\n')
    if is_fixed:
        loose = 0
        depth = None
        pick_ori = None
    elif aseg:
        loose = 1
        depth = None
        pick_ori = None
    else:
        loose = 0.2
        depth = 0.8
        pick_ori = 'normal'

    print(('\n *** loose {}  depth {} ***\n'.format(loose, depth)))
    inverse_operator = make_inverse_operator(info,
                                             forward,
                                             noise_cov,
                                             loose=loose,
                                             depth=depth,
                                             fixed=is_fixed)

    # apply inverse operator to the time windows [t_start, t_stop]s
    print('\n*** APPLY INV OP ***\n')
    good_events_file = ''
    print(events_id)
    if is_epoched and events_id != {}:
        if events_file:
            events = mne.read_events(events_file)
        else:
            events = mne.find_events(raw)
        picks = mne.pick_types(info, meg=True, eog=True, exclude='bads')
        reject = _create_reject_dict(info)

        if is_evoked:
            epochs = mne.Epochs(raw,
                                events,
                                events_id,
                                t_min,
                                t_max,
                                picks=picks,
                                baseline=(t_min, 0),
                                reject=reject)
            evoked = [epochs[k].average() for k in events_id]
            snr = 3.0
            lambda2 = 1.0 / snr**2

            ev_list = list(events_id.items())
            for k in range(len(events_id)):
                stc = apply_inverse(evoked[k],
                                    inverse_operator,
                                    lambda2,
                                    inv_method,
                                    pick_ori=pick_ori)

                print(('\n*** STC for event %s ***\n' % ev_list[k][0]))
                stc_file = op.abspath(basename + '_' + ev_list[k][0])

                print('***')
                print(('stc dim ' + str(stc.shape)))
                print('***')

        else:
            epochs = mne.Epochs(raw,
                                events,
                                events_id,
                                t_min,
                                t_max,
                                picks=picks,
                                baseline=(None, 0),
                                reject=reject)
            epochs.drop_bad()
            good_events_file = op.abspath('good_events.txt')
            np.savetxt(good_events_file, epochs.events)

            stc = apply_inverse_epochs(epochs,
                                       inverse_operator,
                                       lambda2,
                                       inv_method,
                                       pick_ori=pick_ori)

    elif is_epoched and events_id == {}:
        stc = apply_inverse_epochs(epochs,
                                   inverse_operator,
                                   lambda2,
                                   inv_method,
                                   pick_ori=pick_ori)

    else:
        stc = apply_inverse_raw(raw,
                                inverse_operator,
                                lambda2,
                                inv_method,
                                label=None,
                                start=None,
                                stop=None,
                                buffer_size=1000,
                                pick_ori=pick_ori)  # None 'normal'

    if not isinstance(stc, list):
        print('***')
        print(('stc dim ' + str(stc.shape)))
        print('***')

        stc = [stc]
    else:
        print('***')
        print(('len stc %d' % len(stc)))
        print('***')

    print('**************************************************************')
    print('all_src_space: {}'.format(all_src_space))
    print('ROIs_mean: {}'.format(ROIs_mean))
    print('**************************************************************')
    if all_src_space:
        stc_data = list()
        stc_file = op.abspath(basename + '_stc.hdf5')

        for i in range(len(stc)):
            stc_data.append(stc[i].data)

        write_hdf5(stc_file, stc_data, dataset_name='stc_data')

    if ROIs_mean:
        label_ts, labels_file, label_names_file, label_coords_file = \
            _compute_mean_ROIs(stc, sbj_id, subjects_dir, parc,
                               inverse_operator, forward, aseg, is_fixed)

        ts_file = op.abspath(basename + '_ROI_ts.npy')
        np.save(ts_file, label_ts)

    else:
        ts_file = stc_file
        labels_file = ''
        label_names_file = ''
        label_coords_file = ''

    return ts_file, labels_file, label_names_file, \
        label_coords_file, good_events_file
    #    Switch:   Noise + [unit current / label area] in RTPJ
    raw_sim = []
    stc_est = []
    stc_est_sph = []
    for act_scale, trial_type in zip([0, 1], ['switch', 'maintain']):
        # Generate simulated stc activation
        stc_sim = simulate_stc(subj_d[di]['inv']['src'], [subj_d[di]['lab']],
                               stc_data=act_scale * stc_activation, tmin=0,
                               tstep=0.001)

        # Generate simulated raw data
        raw_temp = gen_x_raw(n_trials, raw_template, stc_sim, s_dict)

        # Calculate source estimates
        stc_est_sph.append(apply_inverse_raw(raw_temp, s_dict['inv_sph'],
                                             lambda2, 'MNE',
                                             label=fsaverage['lab']))
        stc_est_temp = apply_inverse_raw(raw_temp, s_dict['inv'], lambda2, 'MNE')
                                         #label=s_dict['lab'])

        # Need to morph to fsaverage to match spherical and morphed data
        stc_est.append(mne.morph_data_precomputed(
            subject_from=s_dict['Struct'], subject_to='fsaverage',
            stc_from=stc_est_temp, vertices_to=fsaverage['vertices'],
            morph_mat=s_dict['fs_morph']).in_label(fsaverage['lab']))

        # Resample and store
        # Use copy in raw_sim because of strange error with in-place resample
        raw_sim.append(raw_temp.resample(1. / bin_width, copy=True,
                                         n_jobs=n_jobs, verbose=False))
        stc_est[-1].resample(1. / bin_width, n_jobs=n_jobs, verbose=False)