Exemple #1
0
main_workflow = pe.Workflow(name=preproc_pipeline_name)
main_workflow.base_dir = data_path

###############################################################################
# Then we create a node to pass input filenames to DataGrabber from nipype

infosource = create_iterator(['subject_id', 'session_id'],
                             [subject_ids, session_ids])

###############################################################################
# and a node to grab data. The template_args in this node iterate upon
# the values in the infosource node

template_path = '*%s/%s/meg/%s*rest*0_60*raw.fif'
template_args = [['subject_id', 'session_id', 'subject_id']]
datasource = create_datagrabber(data_path, template_path, template_args)

###############################################################################
# Ephypype creates for us a pipeline which can be connected to these
# nodes we created. The preprocessing pipeline is implemented by the function
# :func:`ephypype.pipelines.preproc_meeg.create_pipeline_preproc_meeg`, thus to
# instantiate this pipeline node, we import it and pass our
# parameters to it.
# The preprocessing pipeline contains two nodes that are based on the MNE
# Python functions performing the decomposition of the MEG/EEG signal using an
# |ICA| algorithm.
#
# .. |ICA| raw:: html
#
#    <a href="https://mne.tools/stable/auto_tutorials/preprocessing/plot_40_artifact_correction_ica.html" target="_blank">ICA</a>
#
Exemple #2
0
main_workflow.base_dir = data_path

###############################################################################
# Then we create a node to pass input filenames to DataGrabber from nipype

infosource = create_iterator(['subject_id'], [subject_ids])

###############################################################################
# and a node to grab data. The template_args in this node iterate upon
# the values in the infosource node

template_path = '*%s/MEG/%s_sss_filt_ica-raw.fif'
template_args = [['subject_id', 'subject_id']]
infields = ['subject_id']
datasource = create_datagrabber(data_path,
                                template_path,
                                template_args,
                                infields=infields)

###############################################################################
# Ephypype creates for us a pipeline which can be connected to these
# nodes we created. The inverse solution pipeline is implemented by the
# function ephypype.pipelines.preproc_meeg.create_pipeline_source_reconstruction,  # noqa
# thus to instantiate the inverse pipeline node, we import it and pass our
# parameters to it.
# The inverse pipeline contains three nodes that wrap the MNE Python functions
# that perform the source reconstruction steps.
#
# In particular, these three nodes are:
# * ephypype.interfaces.mne.LF_computation.LFComputation compute the
#   Lead Field matrix
# * ephypype.interfaces.mne.Inverse_solution.NoiseCovariance computes
def create_main_workflow_FS_segmentation():

    # Check envoiroment variables
    if not os.environ.get('FREESURFER_HOME'):
        raise RuntimeError('FREESURFER_HOME environment variable not set')

    if not os.environ.get('SUBJECTS_DIR'):
        os.environ["SUBJECTS_DIR"] = subjects_dir

        if not op.exists(subjects_dir):
            os.mkdir(subjects_dir)

    print('SUBJECTS_DIR %s ' % os.environ["SUBJECTS_DIR"])

    main_workflow = pe.Workflow(name=MAIN_WF_name)
    main_workflow.base_dir = subjects_dir

    # (1) we create a node to pass input filenames to DataGrabber from nipype
    #     iterate over subjects
    infosource = create_iterator(['subject_id'], [subject_ids])

    # # and a node to grab data. The template_args in this node iterate upon
    # the values in the infosource node
    # Here we define an input field for datagrabber called subject_id.
    # This is then used to set the template (see %s in the template).
    # we look for .nii files
    template_path = '%s/anatomy/highres001.nii.gz'
    template_args = [['subject_id']]
    infields = ['subject_id']
    datasource = create_datagrabber(data_path,
                                    template_path,
                                    template_args,
                                    infields=infields)

    # (2) ReconAll Node to generate surfaces and parcellations of structural
    #     data from anatomical images of a subject.
    recon_all = pe.Node(interface=ReconAll(),
                        infields=['T1_files'],
                        name='recon_all')
    recon_all.inputs.subjects_dir = subjects_dir
    recon_all.inputs.directive = 'all'

    # reconall_workflow will be a node of the main workflow
    reconall_workflow = pe.Workflow(name=FS_WF_name)
    reconall_workflow.base_dir = wf_path

    reconall_workflow.connect(infosource, 'subject_id', recon_all,
                              'subject_id')

    reconall_workflow.connect(infosource, 'subject_id', datasource,
                              'subject_id')

    reconall_workflow.connect(datasource, 'raw_file', recon_all, 'T1_files')

    # (3) BEM generation by make_watershed_bem of MNE Python package
    bem_generation = pe.Node(interface=Function(
        input_names=['subjects_dir', 'sbj_id'],
        output_names=['sbj_id'],
        function=_create_bem_sol),
                             name='call_mne_watershed_bem')
    bem_generation.inputs.subjects_dir = subjects_dir
    main_workflow.connect(reconall_workflow, 'recon_all.subject_id',
                          bem_generation, 'sbj_id')

    return main_workflow
Exemple #4
0
main_workflow = pe.Workflow(name=workflow_name)
main_workflow.base_dir = data_path

###############################################################################
# Then we create a node to pass input filenames to DataGrabber from nipype

subject_ids = ['SubjectUCI29']
infosource = create_iterator(['subject_id'], [subject_ids])

###############################################################################
# and a node to grab data.

template_path = '%s*.mat'
template_args = [['subject_id']]
datasource = create_datagrabber(data_path,
                                template_path,
                                template_args,
                                infields=['subject_id'])

###############################################################################
# We then the output (subject_id) of the infosource node to the datasource one.
# So, these two nodes taken together can grab data.
main_workflow.connect(infosource, 'subject_id', datasource, 'subject_id')

###############################################################################
# Now, the :class:`ephypype.nodes.Reference` interface is encapsulated in a
# node and connected to the datasource node.
# We set the channel names of sEEG data and refmethod equal to 'bipolar' in
# order to apply a bipolar montage to the depth electrodes.

reference_node = pe.Node(interface=Reference(), name='rereference')
reference_node.inputs.channels = channels_name