Exemple #1
0
def test_LCMV_inverse_solution():
    """Test compute MNE inverse solution."""

    inverse_node = pe.Node(interface=InverseSolution(), name='inverse')
    inverse_node.inputs.sbj_id = 'sample'
    inverse_node.inputs.subjects_dir = subjects_dir
    inverse_node.inputs.raw_filename = raw_fname
    inverse_node.inputs.fwd_filename = fwd_fname
    inverse_node.inputs.cov_filename = cov_fname
    inverse_node.inputs.inv_method = 'LCMV'

    inverse_node.run()

    # test that the number of labels matches with 'aparc' annotation and the
    # number of time points with the ones of raw
    assert inverse_node.result.outputs.ts_file
    data = np.load(inverse_node.result.outputs.ts_file)

    labels = mne.read_labels_from_annot(sbj,
                                        parc='aparc',
                                        subjects_dir=subjects_dir)

    assert data.shape[1] == len(labels)

    raw = mne.io.read_raw_fif(raw_fname)
    assert data.shape[2] == len(raw.times)

    # check if the labels file were created
    assert inverse_node.result.outputs.labels
    assert inverse_node.result.outputs.label_names
    assert inverse_node.result.outputs.label_coords

    label_names = [
        line.strip() for line in open(inverse_node.result.outputs.label_names)
    ]
    assert data.shape[1] == len(label_names)

    label_coo = np.loadtxt(inverse_node.result.outputs.label_coords)
    assert label_coo.shape[1] == 3

    with open(inverse_node.result.outputs.labels, 'rb') as f:
        roi = pickle.load(f)

    assert data.shape[1] == len(roi['ROI_names'])
    assert label_names == roi['ROI_names']

    assert len(roi['ROI_coords']) == len(roi['ROI_names'])
    assert len(roi['ROI_colors']) == len(roi['ROI_colors'])

    assert np.concatenate(roi['ROI_coords']).shape[1] == 3
    assert_array_almost_equal(np.concatenate(roi['ROI_coords']), label_coo)
Exemple #2
0
def test_fixed_mne_inverse_solution():
    """Test compute MNE inverse solution."""

    inverse_node = pe.Node(interface=InverseSolution(), name='inverse')
    inverse_node.inputs.sbj_id = 'sample'
    inverse_node.inputs.subjects_dir = subjects_dir
    inverse_node.inputs.raw_filename = raw_fname
    inverse_node.inputs.fwd_filename = fwd_fname
    inverse_node.inputs.cov_filename = cov_fname
    inverse_node.inputs.is_fixed = True

    inverse_node.run()

    assert inverse_node.result.outputs.ts_file
Exemple #3
0
def test_mne_inverse_solution_epoched_data():
    """Test compute MNE inverse solution."""

    # create epoched data
    raw = mne.io.read_raw_fif(raw_fname)
    picks = mne.pick_types(raw.info,
                           meg=True,
                           eeg=False,
                           eog=False,
                           stim=False,
                           exclude='bads')
    # Define and read epochs:
    events = mne.find_events(raw, stim_channel='STI 014')
    # Define epochs parameters:
    event_id = dict(aud_l=1, aud_r=2)  # event trigger and conditions
    tmin = -0.2  # start of each epoch (200ms before the trigger)
    tmax = 0.5  # end of each epoch (500ms after the trigger)
    epochs = mne.Epochs(raw,
                        events,
                        event_id,
                        tmin,
                        tmax,
                        proj=True,
                        picks=picks,
                        baseline=(None, 0),
                        preload=False)
    # Save all epochs in a fif file:
    epo_fname = raw_fname.replace('.fif', '-epo.fif')
    epochs.save(epo_fname)

    inverse_node = pe.Node(interface=InverseSolution(), name='inverse')
    inverse_node.inputs.sbj_id = 'sample'
    inverse_node.inputs.subjects_dir = subjects_dir
    inverse_node.inputs.raw_filename = epo_fname
    inverse_node.inputs.fwd_filename = fwd_fname
    inverse_node.inputs.cov_filename = cov_fname
    inverse_node.inputs.is_epoched = True

    inverse_node.run()

    assert inverse_node.result.outputs.ts_file

    # check if data contains the same number of epochs as epo_fname
    data = np.load(inverse_node.result.outputs.ts_file)

    epochs = mne.read_epochs(epo_fname)

    assert data.shape[0] == epochs.events.shape[0]
    assert data.shape[2] == epochs.get_data().shape[2]
Exemple #4
0
def test_mne_inverse_solution_evoked_data():
    """Test compute MNE inverse solution."""

    inverse_node = pe.Node(interface=InverseSolution(), name='inverse')
    inverse_node.inputs.sbj_id = 'sample'
    inverse_node.inputs.subjects_dir = subjects_dir
    inverse_node.inputs.raw_filename = ave_fname
    inverse_node.inputs.fwd_filename = fwd_fname
    inverse_node.inputs.cov_filename = cov_fname
    inverse_node.inputs.is_ave = True

    inverse_node.run()

    assert inverse_node.result.outputs.ts_file

    # check if data contains the same number of epochs as epo_fname
    data = np.load(inverse_node.result.outputs.ts_file)

    ave = mne.read_evokeds(ave_fname)

    assert data.shape[0] == len(ave)
    assert data.shape[2] == ave[0].data.shape[1]
Exemple #5
0
def create_pipeline_source_reconstruction(main_path,
                                          sbj_dir,
                                          pipeline_name='inv_sol_pipeline',
                                          spacing='ico-5',
                                          inv_method='MNE',
                                          is_epoched=False,
                                          events_id=None,
                                          t_min=None,
                                          t_max=None,
                                          is_evoked=False,
                                          parc='aparc',
                                          aseg=False,
                                          aseg_labels=[],
                                          noise_cov_fname=None,
                                          save_stc=False,
                                          save_mixed_src_space=False,
                                          is_fixed=False):
    """
    Description:

        Source reconstruction pipeline

    Inputs:

        main_path : str
            the main path of the workflow
        sbj_dir : str
            Freesurfer directory
        pipeline_name : str (default inv_sol_pipeline)
            name of the pipeline
        spacing : str (default 'ico-5')
            spacing to use to setup a source space
        inv_method : str (default MNE)
            the inverse method to use; possible choices: MNE, dSPM, sLORETA
        is_epoched : bool (default False)
            if True and events_id = None the input data are epoch data
            in the format -epo.fif
            if True and events_id is not None, the raw data are epoched
            according to events_id and t_min and t_max values
        is_fixed : bool (default False)
            if True we use fixed orientation
        events_id: dict (default None)
            the dict of events
        t_min, t_max: int (defualt None)
            define the time interval in which to epoch the raw data
        is_evoked: bool (default False)
            if True the raw data will be averaged according to the events
            contained in the dict events_id
        parc: str (default 'aparc')
            the parcellation defining the ROIs atlas in the source space
        aseg: bool (defualt False)
            if True a mixed source space will be created and the sub cortical
            regions defined in aseg_labels will be added to the source space
        aseg_labels: list (default [])
            list of substructures we want to include in the mixed source space
        noise_cov_fname: str (default None)
            template for the path to either the noise covariance matrix file or
            the empty room data
        save_stc: bool (defualt False)
            if True the stc will be saved
        save_mixed_src_space: bool (defualt False)
            if True the mixed src space will be saved in the FS folder

    Inputs (inputnode):

        raw : str
            path to raw data in fif format
        sbj_id : str
            subject id

    Outouts:

        pipeline : instance of Workflow

    """

    pipeline = pe.Workflow(name=pipeline_name)
    pipeline.base_dir = main_path

    inputnode = pe.Node(IdentityInterface(fields=['sbj_id', 'raw']),
                        name='inputnode')

    # Lead Field computation Node
    LF_computation = pe.Node(interface=LFComputation(), name='LF_computation')
    LF_computation.inputs.sbj_dir = sbj_dir
    LF_computation.inputs.spacing = spacing
    LF_computation.inputs.aseg = aseg
    if aseg:
        LF_computation.inputs.aseg_labels = aseg_labels
        LF_computation.inputs.save_mixed_src_space = save_mixed_src_space

    pipeline.connect(inputnode, 'sbj_id', LF_computation, 'sbj_id')

    try:
        events_id
    except NameError:
        events_id = None

    if is_epoched and events_id is None:
        pipeline.connect(inputnode, ('raw', get_epochs_info), LF_computation,
                         'raw_info')
    else:
        pipeline.connect(inputnode, ('raw', get_raw_info), LF_computation,
                         'raw_info')

    pipeline.connect(inputnode, 'raw', LF_computation, 'raw_fname')

    # Noise Covariance Matrix Node
    create_noise_cov = pe.Node(interface=NoiseCovariance(),
                               name="create_noise_cov")

    #    if noise_cov_fname is not None:
    create_noise_cov.inputs.cov_fname_in = noise_cov_fname
    create_noise_cov.inputs.is_epoched = is_epoched
    create_noise_cov.inputs.is_evoked = is_evoked
    if is_evoked:
        create_noise_cov.inputs.events_id = events_id
        create_noise_cov.inputs.t_min = t_min
        create_noise_cov.inputs.t_max = t_max

    pipeline.connect(inputnode, 'raw', create_noise_cov, 'raw_filename')

    # Inverse Solution Node
    inv_solution = pe.Node(interface=InverseSolution(), name='inv_solution')

    inv_solution.inputs.sbj_dir = sbj_dir
    inv_solution.inputs.inv_method = inv_method
    inv_solution.inputs.is_epoched = is_epoched
    inv_solution.inputs.is_fixed = is_fixed

    if is_epoched and events_id is not None:
        inv_solution.inputs.events_id = events_id
        inv_solution.inputs.t_min = t_min
        inv_solution.inputs.t_max = t_max

    inv_solution.inputs.is_evoked = is_evoked
    if is_epoched and is_evoked:
        inv_solution.inputs.events_id = events_id

    inv_solution.inputs.parc = parc
    inv_solution.inputs.aseg = aseg
    if aseg:
        inv_solution.inputs.aseg_labels = aseg_labels

    inv_solution.inputs.save_stc = save_stc

    pipeline.connect(inputnode, 'sbj_id', inv_solution, 'sbj_id')
    pipeline.connect(inputnode, 'raw', inv_solution, 'raw_filename')
    pipeline.connect(LF_computation, 'fwd_filename', inv_solution,
                     'fwd_filename')
    pipeline.connect(create_noise_cov, 'cov_fname_out', inv_solution,
                     'cov_filename')

    return pipeline