'mask':
    os.path.join(
        data_root, 'sub-{subject_id}', 'ses-1', 'func',
        'sub-{subject_id}_ses-1_task-{task_id}_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz'
    ),
    'regressors':
    os.path.join(
        data_root, 'sub-{subject_id}', 'ses-1', 'func',
        'sub-{subject_id}_ses-1_task-{task_id}_desc-confounds_regressors.tsv'),
    'events':
    os.path.join(out_root, 'event_files',
                 'sub-{subject_id}_task-{task_id}_cond.csv')
}

# Flexibly collect data from disk to feed into workflows.
selectfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_root),
                      name="selectfiles")

# Extract motion parameters from regressors file
runinfo = pe.Node(niu.Function(input_names=[
    'in_file', 'events_file', 'regressors_file', 'regressors_names',
    'motion_columns'
],
                               function=_bids2nipypeinfo,
                               output_names=['info', 'realign_file']),
                  name='runinfo')

# Set the column names to be used from the confounds file
# reference a paper from podlrack lab
runinfo.inputs.regressors_names = ['std_dvars', 'framewise_displacement'] + \
                                  ['a_comp_cor_%02d' % i for i in range(6)]
Exemple #2
0
    'mask':
    os.path.join(
        data_root, 'sub-{subject_id}', 'ses-1', 'func',
        'sub-{subject_id}_ses-1_task-{task_id}_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz'
    ),
    'regressors':
    os.path.join(
        data_root, 'sub-{subject_id}', 'ses-1', 'func',
        'sub-{subject_id}_ses-1_task-{task_id}_desc-confounds_regressors.tsv'),
    'events':
    os.path.join(out_root, 'event_files',
                 'sub-{subject_id}_task-{task_id}_cond_v3.csv')
}

# Flexibly collect data from disk to feed into flows.
selectfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_root),
                      name="selectfiles")

selectfiles.inputs.task_id = [1, 2, 3, 4, 5, 6, 7, 8]

# Extract motion parameters from regressors file
runinfo = MapNode(util.Function(input_names=[
    'in_file', 'events_file', 'regressors_file', 'regressors_names',
    'motion_columns'
],
                                function=_bids2nipypeinfo,
                                output_names=['info', 'realign_file']),
                  name='runinfo',
                  iterfield=['in_file', 'events_file', 'regressors_file'])

# Set the column names to be used from the confounds file
Exemple #3
0
def calc_local_metrics(preprocessed_data_dir,
                       subject_id,
                       parcellations_dict,
                       bp_freq_list,
                       fd_thresh,
                       working_dir,
                       ds_dir,
                       use_n_procs,
                       plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    import utils as calc_metrics_utils





    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    wf = Workflow(name='LeiCA_LIFE_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 15})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.regexp_substitutions = [('MNI_resampled_brain_mask_calc.nii.gz', 'falff.nii.gz'),
                                      ('residual_filtered_3dT.nii.gz', 'alff.nii.gz'),
                                      ('_parcellation_', ''),
                                      ('_bp_freqs_', 'bp_'),
                                      ]



    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(util.IdentityInterface(fields=['parcellation']), name='parcellation_infosource')
    parcellation_infosource.iterables = ('parcellation', parcellations_dict.keys())

    bp_filter_infosource = Node(util.IdentityInterface(fields=['bp_freqs']), name='bp_filter_infosource')
    bp_filter_infosource.iterables = ('bp_freqs', bp_freq_list)

    selectfiles = Node(nio.SelectFiles(
        {
            'parcellation_time_series': '{subject_id}/con_mat/parcellated_time_series/bp_{bp_freqs}/{parcellation}/parcellation_time_series.npy'},
        base_directory=preprocessed_data_dir),
        name='selectfiles')
    selectfiles.inputs.subject_id = subject_id
    wf.connect(parcellation_infosource, 'parcellation', selectfiles, 'parcellation')
    wf.connect(bp_filter_infosource, 'bp_freqs', selectfiles, 'bp_freqs')

    fd_file = Node(nio.SelectFiles({'fd_p': '{subject_id}/QC/FD_P_ts'}, base_directory=preprocessed_data_dir),
                   name='fd_file')
    fd_file.inputs.subject_id = subject_id

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############

    get_good_trs = Node(util.Function(input_names=['fd_file', 'fd_thresh'],
                                      output_names=['good_trs', 'fd_scrubbed_file'],
                                      function=calc_metrics_utils.get_good_trs),
                        name='get_good_trs')
    wf.connect(fd_file, 'fd_p', get_good_trs, 'fd_file')
    get_good_trs.inputs.fd_thresh = fd_thresh

    parcellated_ts_scrubbed = Node(util.Function(input_names=['parcellation_time_series_file', 'good_trs'],
                                                 output_names=['parcellation_time_series_scrubbed'],
                                                 function=calc_metrics_utils.parcellation_time_series_scrubbing),
                                   name='parcellated_ts_scrubbed')

    wf.connect(selectfiles, 'parcellation_time_series', parcellated_ts_scrubbed, 'parcellation_time_series_file')
    wf.connect(get_good_trs, 'good_trs', parcellated_ts_scrubbed, 'good_trs')




    ##############
    ## get conmat
    ##############
    con_mat = Node(util.Function(input_names=['in_data', 'extraction_method'],
                                 output_names=['matrix', 'matrix_file'],
                                 function=calc_metrics_utils.calculate_connectivity_matrix),
                   name='con_mat')
    con_mat.inputs.extraction_method = 'correlation'
    wf.connect(parcellated_ts_scrubbed, 'parcellation_time_series_scrubbed', con_mat, 'in_data')


    ##############
    ## ds
    ##############

    wf.connect(get_good_trs, 'fd_scrubbed_file', ds, 'QC.@fd_scrubbed_file')
    fd_str = ('%.1f' % fd_thresh).replace('.', '_')
    wf.connect(con_mat, 'matrix_file', ds, 'con_mat.matrix_scrubbed_%s.@mat' % fd_str)

    # wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    # wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    # wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name, plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
Exemple #4
0
def create_ml_preprocess_workflow(
        name,
        project_dir,
        work_dir,
        sessions_file,
        session_template,
        fs_dir,
        annot_template='{subject_id}/label/{hemi}.aparc.a2009s.annot',
        fwhm_vals=[2],
        ico_order_vals=[4],
        do_save_vol_ds=False,
        do_save_smooth_vol_ds=False,
        do_save_surface_smooth_vol_ds=False,
        do_save_surface_ds=False,
        do_save_smooth_surface_ds=False,
        do_save_sphere_nifti=False,
        do_save_sphere_ds=True,
        do_save_join_sessions_ds=True,
        do_save_join_subjects_ds=True):

    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    sessions_info = ColumnData(sessions_file, dtype=str)
    subject_ids = set(sessions_info['subject_id'])
    session_map = [
        (sid, [s for i, s, r in zip(*sessions_info.values()) if i == sid])
        for sid in subject_ids
    ]

    ##for each subject
    subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
                       name='subjects')
    subjects.iterables = [('subject_id', subject_ids)]

    ##for each session
    sessions = pe.Node(
        interface=util.IdentityInterface(fields=['subject_id', 'session_dir']),
        name='sessions')
    sessions.itersource = ('subjects', 'subject_id')
    sessions.iterables = [('session_dir', dict(session_map))]
    workflow.connect(subjects, 'subject_id', sessions, 'subject_id')

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template),
                              name='get_session_dir')
    workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir, 'session_dir', datasink,
                     'base_directory')

    template = {
        'nifti_file': 'mri/f.nii.gz',
        'attributes_file': 'attributes.txt',
        'reg_file': 'mri/transforms/functional_to_anatomy.dat'
    }
    get_files = pe.Node(nio.SelectFiles(template), name='get_files')
    workflow.connect(get_session_dir, 'session_dir', get_files,
                     'base_directory')

    vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='vol_to_ds')
    vol_to_ds.inputs.ds_file = 'vol.hdf5'

    workflow.connect(get_files, 'nifti_file', vol_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', vol_to_ds,
                     'attributes_file')
    workflow.connect(subjects, 'subject_id', vol_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', vol_to_ds, 'session_id')

    if do_save_vol_ds:
        workflow.connect(vol_to_ds, 'ds_file', datasink, 'ml.@vol')

    fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']), name='fwhm')
    fwhm.iterables = [('fwhm', fwhm_vals)]

    if do_save_smooth_vol_ds:
        smooth_vol = pe.Node(interface=fs.MRIConvert(), name='smooth_vol')
        workflow.connect(get_files, 'nifti_file', smooth_vol, 'in_file')
        workflow.connect(fwhm, 'fwhm', smooth_vol, 'fwhm')

        smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                   name='smooth_vol_to_ds')
        smooth_vol_to_ds.inputs.ds_file = 'smooth_vol.hdf5'

        workflow.connect(smooth_vol, 'out_file', smooth_vol_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_vol_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_vol_to_ds,
                         'session_id')

        workflow.connect(smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@smooth_vol')

    if do_save_surface_smooth_vol_ds:
        surface_smooth_vol = pe.Node(interface=fs.Smooth(),
                                     name='surface_smooth_vol')
        workflow.connect(get_files, 'reg_file', surface_smooth_vol, 'reg_file')
        workflow.connect(get_files, 'nifti_file', surface_smooth_vol,
                         'in_file')
        workflow.connect(fwhm, 'fwhm', surface_smooth_vol, 'surface_fwhm')

        surface_smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                           name='surface_smooth_vol_to_ds')
        surface_smooth_vol_to_ds.inputs.ds_file = 'surface_smooth_vol.hdf5'

        workflow.connect(surface_smooth_vol, 'out_file',
                         surface_smooth_vol_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file',
                         surface_smooth_vol_to_ds, 'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', surface_smooth_vol_to_ds,
                         'session_id')

        workflow.connect(surface_smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@surface_smooth_vol')

    hemi = pe.Node(util.IdentityInterface(fields=['hemi']), name='hemi')
    hemi.iterables = [('hemi', ['lh', 'rh'])]

    to_surface = pe.Node(fs.SampleToSurface(), name='to_surface')
    to_surface.inputs.sampling_method = 'average'
    to_surface.inputs.sampling_range = (0., 1., 0.1)
    to_surface.inputs.sampling_units = 'frac'
    to_surface.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_surface, 'hemi')
    workflow.connect(get_files, 'nifti_file', to_surface, 'source_file')
    workflow.connect(get_files, 'reg_file', to_surface, 'reg_file')

    if do_save_surface_ds:
        surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_to_ds')
        workflow.connect(to_surface, 'out_file', surface_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file', surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_to_ds, 'subject_id')
        workflow.connect(sessions, 'session_dir', surface_to_ds, 'session_id')

        join_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                    name='join_surfaces',
                                    joinsource='hemi',
                                    joinfield='input_datasets')
        join_surfaces.inputs.joined_dataset = 'surface.hdf5'
        join_surfaces.inputs.join_hemispheres = True
        workflow.connect(surface_to_ds, 'ds_file', join_surfaces,
                         'input_datasets')

        workflow.connect(join_surfaces, 'joined_dataset', datasink,
                         'ml.@surface')

    smooth_surface = pe.Node(fs.SurfaceSmooth(), name='smooth_surface')
    smooth_surface.inputs.subjects_dir = fs_dir
    workflow.connect(to_surface, 'out_file', smooth_surface, 'in_file')
    workflow.connect(sessions, 'subject_id', smooth_surface, 'subject_id')
    workflow.connect(hemi, 'hemi', smooth_surface, 'hemi')
    workflow.connect(fwhm, 'fwhm', smooth_surface, 'fwhm')

    if do_save_smooth_surface_ds:
        smooth_surface_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                       name='smooth_surface_to_ds')
        workflow.connect(smooth_surface, 'out_file', smooth_surface_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_surface_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_surface_to_ds,
                         'session_id')

        join_smooth_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                           name='join_smooth_surfaces',
                                           joinsource='hemi',
                                           joinfield='input_datasets')
        join_smooth_surfaces.inputs.joined_dataset = 'smooth_surface.hdf5'
        join_smooth_surfaces.inputs.join_hemispheres = True
        workflow.connect(smooth_surface_to_ds, 'ds_file', join_smooth_surfaces,
                         'input_datasets')

        workflow.connect(join_smooth_surfaces, 'joined_dataset', datasink,
                         'ml.@smooth_surface')

    ico_order = pe.Node(util.IdentityInterface(fields=['ico_order']),
                        name='ico_order')
    ico_order.iterables = [('ico_order', ico_order_vals)]

    to_sphere = pe.Node(fs.SurfaceTransform(), name='to_sphere')
    to_sphere.inputs.target_subject = 'ico'
    to_sphere.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_sphere, 'hemi')
    workflow.connect(smooth_surface, 'out_file', to_sphere, 'source_file')
    workflow.connect(subjects, 'subject_id', to_sphere, 'source_subject')
    workflow.connect(ico_order, 'ico_order', to_sphere, 'target_ico_order')

    if do_save_sphere_nifti:
        workflow.connect(to_sphere, 'out_file', datasink, 'surf.@sphere')

    template = {'annot_file': annot_template}
    get_annot_file = pe.Node(nio.SelectFiles(template), name='get_annot_file')
    get_annot_file.inputs.base_directory = fs_dir
    get_annot_file.inputs.subject_id = 'fsaverage'
    workflow.connect(hemi, 'hemi', get_annot_file, 'hemi')

    transform_annot = pe.Node(fs.SurfaceTransform(), name='transform_annot')
    transform_annot.inputs.source_subject = 'fsaverage'
    transform_annot.inputs.target_subject = 'ico'
    transform_annot.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', transform_annot, 'hemi')
    workflow.connect(get_annot_file, 'annot_file', transform_annot,
                     'source_annot_file')
    workflow.connect(ico_order, 'ico_order', transform_annot,
                     'target_ico_order')

    sphere_to_ds = pe.Node(nmutil.NiftiToDataset(), name='sphere_to_ds')
    workflow.connect(to_sphere, 'out_file', sphere_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', sphere_to_ds,
                     'attributes_file')
    workflow.connect(transform_annot, 'out_file', sphere_to_ds, 'annot_file')
    workflow.connect(subjects, 'subject_id', sphere_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', sphere_to_ds, 'session_id')

    join_hemispheres = pe.JoinNode(nmutil.JoinDatasets(),
                                   name='join_hemispheres',
                                   joinsource='hemi',
                                   joinfield='input_datasets')
    join_hemispheres.inputs.joined_dataset = 'sphere.hdf5'
    join_hemispheres.inputs.join_hemispheres = True

    workflow.connect(sphere_to_ds, 'ds_file', join_hemispheres,
                     'input_datasets')

    if do_save_sphere_ds:
        workflow.connect(join_hemispheres, 'joined_dataset', datasink,
                         'ml.@sphere')

    join_sessions = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_sessions',
                                joinsource='sessions',
                                joinfield='input_datasets')
    workflow.connect(join_hemispheres, 'joined_dataset', join_sessions,
                     'input_datasets')

    if do_save_join_sessions_ds:
        join_sessions_sink = pe.Node(nio.DataSink(), name='join_sessions_sink')
        join_sessions_sink.inputs.parameterization = False
        join_sessions_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(subjects, 'subject_id', join_sessions_sink,
                         'container')
        workflow.connect(join_sessions, 'joined_dataset', join_sessions_sink,
                         '@join_sessions')

    join_subjects = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_subjects',
                                joinsource='subjects',
                                joinfield='input_datasets')
    workflow.connect(join_sessions, 'joined_dataset', join_subjects,
                     'input_datasets')

    if do_save_join_subjects_ds:
        join_subjects_sink = pe.Node(nio.DataSink(), name='join_subjects_sink')
        join_subjects_sink.inputs.parameterization = False
        join_subjects_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(join_subjects, 'joined_dataset', join_subjects_sink,
                         '@join_subjects')

    return workflow
Exemple #5
0
getmuselabel = Node(Function(input_names=['key', 'dict'],
                             output_names=['muselabel'],
                             function=get_value),
                    name='getmuselabel')
getmuselabel.inputs.dict = muselabel_dict

# get the composite transform that corresponds to the MRI "concurrent" with PET
compositetransform_dir = '/output/mri/output/MNI_space'

# Get the affine matrix and warp between MRI space and study-specific template space
templates = {
    'composite':
    os.path.join(compositetransform_dir, sitePrefix + '_{idvi}',
                 'compositetransform_to_mni.nii.gz')
}
selectfiles = Node(nio.SelectFiles(templates), name="selectfiles")

DVR_xlsx = JoinNode(interface=ConcatenateSpreadsheets(outputname='ROI_DVR'),
                    joinsource='infosource',
                    joinfield=['sheetlist'],
                    synchronize=True,
                    unique=True,
                    name='DVR_xlsx')
R1_xlsx = JoinNode(interface=ConcatenateSpreadsheets(outputname='ROI_R1'),
                   joinsource='infosource',
                   joinfield=['sheetlist'],
                   synchronize=True,
                   unique=True,
                   name='R1_xlsx')
R1_lrsc_xlsx = JoinNode(
    interface=ConcatenateSpreadsheets(outputname='ROI_R1_lrsc'),
def run_workflows(session=None, csv_file=None):
    from nipype import config
    #config.enable_debug_mode()

    # ------------------ Specify variables
    ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

    data_dir = ds_root
    output_dir = 'derivatives/undistort'
    working_dir = 'workingdirs'

    # ------------------ Input Files
    infosource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'run_id',
        'refsubject_id',
    ]), name="infosource")

    if csv_file is not None:
      print('=== reading csv ===')
      # Read csv and use pandas to set-up image and ev-processing
      df = pd.read_csv(csv_file)
      # init lists
      sub_img=[]; ses_img=[]; run_img=[]; ref_img=[]
      
      # fill lists to iterate mapnodes
      for index, row in df.iterrows():
        for r in row.run.strip("[]").split(" "):
            sub_img.append(row.subject)
            ses_img.append(row.session)
            run_img.append(r)
            if 'refsubject' in df.columns:
                if row.refsubject == 'nan':
                    # empty field
                    ref_img.append(row.subject)
                else:
                    # non-empty field
                    ref_img.append(row.refsubject) 
            else:
                ref_img.append(row.subject)

      infosource.iterables = [
            ('subject_id', sub_img),
            ('session_id', ses_img),
            ('run_id', run_img),
            ('refsubject_id', ref_img),
        ]
      infosource.synchronize = True
    else:
      print("No csv-file specified. Don't know what data to process.")


    # SelectFiles
    templates = {
        'image': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc.nii.gz',
        'image_invPE': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/fmap/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_epi_res-1x1x1_preproc.nii.gz',
    }
    
    inputfiles = Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), 
                        name="input_files")

    # Datasink
    outfiles = Node(nio.DataSink(
        base_directory=ds_root,
        container=output_dir,
        parameterization=True),
        name="outfiles")

    # Use the following DataSink output substitutions
    outfiles.inputs.substitutions = [
        ('refsubject_id_', 'ref-'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('resampled-isotropic-1mm','undistort'),
        ('undistort/ud_func', 'undistort'),
    ]  
       
    outfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1/func'),
        (r'_ref-([a-zA-Z0-9]+)_run_id_[0-9][0-9]', r''),
    ]
    
    templates_mv = {
        'ud_minus': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_MINUS.nii.gz',
        'ud_minus_warp': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_MINUS_WARP.nii.gz',
        'ud_plus': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_PLUS.nii.gz',
        'ud_plus_warp': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_PLUS_WARP.nii.gz',
    }
    
    mv_infiles = Node(
        nio.SelectFiles(templates_mv,
                        base_directory=data_dir), 
                        name="mv_infiles")

    # Datasink
    mv_outfiles = Node(nio.DataSink(
        base_directory=ds_root,
        container=output_dir,
        parameterization=True),
        name="mv_outfiles")

    # Use the following DataSink output substitutions
    mv_outfiles.inputs.substitutions = [
        ('refsubject_id_', 'ref-'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('resampled-isotropic-1mm','undistort'),
        ('undistort/ud_func', 'undistort'),
    ]  
       
    mv_outfiles.inputs.regexp_substitutions = [
        (r'sub-([a-zA-Z0-9]+)_ses-([a-zA-Z0-9]+)', r'sub-\1/ses-\2/func/qwarp_plusminus/sub-\1_ses-\2'),
    ]    
    
    # -------------------------------------------- Create Pipeline
    undistort = Workflow(
        name='undistort',
        base_dir=os.path.join(ds_root, working_dir))

    undistort.connect([
        (infosource, inputfiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ('run_id', 'run_id'),
          ('refsubject_id', 'refsubject_id'),
          ])])
               
    qwarp = Node(afni.QwarpPlusMinus(
        nopadWARP=True,outputtype='NIFTI_GZ'),
                    iterfield=('in_file'),name='qwarp')       
        
    undistort.connect(inputfiles, 'image',
                        qwarp, 'in_file')
    undistort.connect(inputfiles, 'image_invPE',
                        qwarp, 'base_file') 
    undistort.connect(inputfiles, 'image',
                        qwarp, 'out_file')    
  
    nwarp = Node(afni.NwarpApply(out_file='%s_undistort.nii.gz'),name='nwarp')
    
    undistort.connect(inputfiles, 'image',
                     nwarp, 'in_file')
    undistort.connect(qwarp, 'source_warp',
                     nwarp, 'warp')
    undistort.connect(inputfiles, 'image',
                     nwarp, 'master')
    undistort.connect(nwarp, 'out_file',
                     outfiles, 'ud_func')

    undistort.stop_on_first_crash = False  # True
    undistort.keep_inputs = True
    undistort.remove_unnecessary_outputs = False
    undistort.write_graph()
    undistort.run()

    mv_ud = Workflow(
        name='mv_ud',
        base_dir=os.path.join(ds_root, working_dir))

    mv_ud.connect([
        (infosource, mv_infiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ('run_id', 'run_id'),
          ('refsubject_id', 'refsubject_id'),
          ])])
    
    mv_ud.connect(mv_infiles, 'ud_minus',
                        mv_outfiles, 'ud_func.@ud_minus')
    mv_ud.connect(mv_infiles, 'ud_plus',
                        mv_outfiles, 'ud_func.@ud_plus')
    mv_ud.connect(mv_infiles, 'ud_minus_warp',
                        mv_outfiles, 'ud_func.@ud_minus_warp')
    mv_ud.connect(mv_infiles, 'ud_plus_warp',
                        mv_outfiles, 'ud_func.@ud_plus_warp')
    
    mv_ud.stop_on_first_crash = False  # True
    mv_ud.keep_inputs = True
    mv_ud.remove_unnecessary_outputs = False
    mv_ud.write_graph()
    mv_ud.run()

    # remove the undistorted files from the ...derivatives/resampled folder
    for index, row in df.iterrows():
        fpath = os.path.join(data_dir,'derivatives','resampled-isotropic-1mm',
                     'sub-' + row.subject,'ses-' + str(row.session),'func')
        for f in glob.glob(os.path.join(fpath,'*US*.nii.gz')):
            os.remove(f)
# iterate over sessions
session_infosource = Node(util.IdentityInterface(fields=['session']),
                          name='session_infosource')
session_infosource.iterables = [('session', sessions)]

# select files
templates = {
    'mapping':
    'mappings/rest/fixed_hdr/corr_{subject}_{session}_roi_detrended_median_corrected_mapping_fixed.nii.gz',
    'epi2highres_lin_itk':
    'resting/preprocessed/{subject}/{session}/registration/epi2highres_lin.txt',
    'epi2highres_warp':
    'resting/preprocessed/{subject}/{session}/registration/transform0Warp.nii.gz',
    't1_highres': 'struct/t1/{subject}*T1_Images_merged.nii.gz'
}
selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                   name="selectfiles")

mapping2struct.connect([
    (subject_infosource, selectfiles, [('subject', 'subject')]),
    (session_infosource, selectfiles, [('session', 'session')])
])

# merge func2struct transforms into list
translist_forw = Node(util.Merge(2), name='translist_forw')
mapping2struct.connect([
    (selectfiles, translist_forw, [('epi2highres_lin_itk', 'in2')]),
    (selectfiles, translist_forw, [('epi2highres_warp', 'in1')])
])

# project
Exemple #8
0
def create_resting(subject, working_dir, data_dir, freesurfer_dir, out_dir,
                         vol_to_remove, TR, epi_resolution, highpass, lowpass,
                         echo_space, pe_dir, standard_brain, standard_brain_resampled, standard_brain_mask,
                         standard_brain_mask_resampled, fwhm_smoothing):
    # set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    # main workflow
    func_preproc = Workflow(name='lemon_resting')
    func_preproc.base_dir = working_dir
    func_preproc.config['execution']['crashdump_dir'] = func_preproc.base_dir + "/crash_files"
    # select files
    templates = {'func': 'raw/{subject}/func/EPI_t2.nii',
                 'ap': 'raw/{subject}/topup/se.nii',
                 'pa': 'raw/{subject}/topup/seinv_ph.nii',
                 'anat_head': 'preprocessing/preprocessed/{subject}/structural/T1.nii.gz',  
                 'anat_brain': 'preprocessing/preprocessed/{subject}/structural/brain.nii.gz',
                 'brain_mask': 'preprocessing/preprocessed/{subject}/structural/T1_brain_mask.nii.gz',  
                 'ants_affine': 'preprocessing/preprocessed/{subject}/structural/transforms2mni/transform0GenericAffine.mat',
                 'ants_warp': 'preprocessing/preprocessed/{subject}/structural/transforms2mni/transform1Warp.nii.gz'
                 }

    selectfiles = Node(nio.SelectFiles(templates,
                                       base_directory=data_dir),
                       name="selectfiles")
    selectfiles.inputs.subject = subject


    # node to remove first volumes
    remove_vol = Node(util.Function(input_names=['in_file', 't_min'],
                                    output_names=["out_file"],
                                    function=strip_rois_func),
                      name='remove_vol')
    remove_vol.inputs.t_min = vol_to_remove
    # workflow for motion correction
    moco = create_moco_pipeline()

    # workflow for fieldmap correction and coregistration
    topup_coreg = create_topup_coreg_pipeline()
    topup_coreg.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    topup_coreg.inputs.inputnode.fs_subject_id = subject
    topup_coreg.inputs.inputnode.echo_space = echo_space
    topup_coreg.inputs.inputnode.pe_dir = pe_dir

    # workflow for applying transformations to timeseries
    transform_ts = create_transform_pipeline()
    transform_ts.inputs.inputnode.resolution = epi_resolution


    # workflow to denoise timeseries
    denoise = create_denoise_pipeline()
    denoise.inputs.inputnode.highpass_sigma = 1. / (2 * TR * highpass)
    denoise.inputs.inputnode.lowpass_sigma = 1. / (2 * TR * lowpass)
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4
    denoise.inputs.inputnode.tr = TR

    # workflow to transform timeseries to MNI
    ants_registration = create_ants_registration_pipeline()
    ants_registration.inputs.inputnode.ref = standard_brain
    ants_registration.inputs.inputnode.tr_sec = TR

    # FL added fullspectrum
    # workflow to transform fullspectrum timeseries to MNI
    ants_registration_full = create_ants_registration_pipeline('ants_registration_full')
    ants_registration_full.inputs.inputnode.ref = standard_brain
    ants_registration_full.inputs.inputnode.tr_sec = TR

    # workflow to smooth
    smoothing = create_smoothing_pipeline()
    smoothing.inputs.inputnode.fwhm = fwhm_smoothing

    # visualize registration results
    visualize = create_visualize_pipeline()
    visualize.inputs.inputnode.mni_template = standard_brain



    # sink to store files
    sink = Node(nio.DataSink(parameterization=False,
                             base_directory=out_dir,
                             substitutions=[('fmap_phase_fslprepared', 'fieldmap'),
                                            ('fieldmap_fslprepared_fieldmap_unmasked_vsm', 'shiftmap'),
                                            ('plot.rest_coregistered', 'outlier_plot'),
                                            ('filter_motion_comp_norm_compcor_art_dmotion', 'nuissance_matrix'),
                                            ('rest_realigned.nii.gz_abs.rms', 'rest_realigned_abs.rms'),
                                            ('rest_realigned.nii.gz.par', 'rest_realigned.par'),
                                            ('rest_realigned.nii.gz_rel.rms', 'rest_realigned_rel.rms'),
                                            ('rest_realigned.nii.gz_abs_disp', 'abs_displacement_plot'),
                                            ('rest_realigned.nii.gz_rel_disp', 'rel_displacment_plot'),
                                            ('art.rest_coregistered_outliers', 'outliers'),
                                            ('global_intensity.rest_coregistered', 'global_intensity'),
                                            ('norm.rest_coregistered', 'composite_norm'),
                                            ('stats.rest_coregistered', 'stats'),
                                            ('rest_denoised_bandpassed_norm.nii.gz',
                                             'rest_preprocessed_nativespace.nii.gz'),
                                            ('rest_denoised_bandpassed_norm_trans.nii.gz',
                                             'rest_mni_unsmoothed.nii.gz'),
                                            ('rest_denoised_bandpassed_norm_trans_smooth.nii',
                                             'rest_mni_smoothed.nii'),
                                            # FL added
                                            ('rest2anat_masked.nii.gz', 'rest_coregistered_nativespace.nii.gz'),
                                            ('rest2anat_denoised.nii.gz',
                                             'rest_preprocessed_nativespace_fullspectrum.nii.gz'),
                                            ('rest2anat_denoised_trans.nii.gz',
                                             'rest_mni_unsmoothed_fullspectrum.nii.gz')
                                            ]),
                name='sink')


    # connections
    func_preproc.connect([
        # remove the first volumes
        (selectfiles, remove_vol, [('func', 'in_file')]),

        # align volumes and motion correction
        (remove_vol, moco, [('out_file', 'inputnode.epi')]),

        # prepare field map
        (selectfiles, topup_coreg, [('ap', 'inputnode.ap'),
                                   ('pa', 'inputnode.pa'),
                                   ('anat_head', 'inputnode.anat_head'),
                                   ('anat_brain', 'inputnode.anat_brain')
                                   ]),
        (moco, topup_coreg, [('outputnode.epi_mean', 'inputnode.epi_mean')]),

        # transform timeseries
        (remove_vol, transform_ts, [('out_file', 'inputnode.orig_ts')]),
        (selectfiles, transform_ts, [('anat_head', 'inputnode.anat_head')]),
        (selectfiles, transform_ts, [('brain_mask', 'inputnode.brain_mask')]),
        (moco, transform_ts, [('outputnode.mat_moco', 'inputnode.mat_moco')]),
        (topup_coreg, transform_ts, [('outputnode.fmap_fullwarp', 'inputnode.fullwarp')]),

        # correct slicetiming
        # FIXME slice timing?
        # (transform_ts, slicetiming, [('outputnode.trans_ts_masked', 'inputnode.ts')]),
        # (slicetiming, denoise, [('outputnode.ts_slicetcorrected', 'inputnode.epi_coreg')]),
        (transform_ts, denoise, [('outputnode.trans_ts_masked', 'inputnode.epi_coreg')]),

        # denoise data
        (selectfiles, denoise, [('brain_mask', 'inputnode.brain_mask'),
                                ('anat_brain', 'inputnode.anat_brain')]),
        (moco, denoise, [('outputnode.par_moco', 'inputnode.moco_par')]),
        (topup_coreg, denoise, [('outputnode.epi2anat_dat', 'inputnode.epi2anat_dat'),
                               ('outputnode.unwarped_mean_epi2fmap', 'inputnode.unwarped_mean')]),
        (denoise, ants_registration, [('outputnode.normalized_file', 'inputnode.denoised_ts')]),

        # registration to MNI space
        (selectfiles, ants_registration, [('ants_affine', 'inputnode.ants_affine')]),
        (selectfiles, ants_registration, [('ants_warp', 'inputnode.ants_warp')]),

        # FL added fullspectrum
        (denoise, ants_registration_full, [('outputnode.ts_fullspectrum', 'inputnode.denoised_ts')]),
        (selectfiles, ants_registration_full, [('ants_affine', 'inputnode.ants_affine')]),
        (selectfiles, ants_registration_full, [('ants_warp', 'inputnode.ants_warp')]),

        (ants_registration, smoothing, [('outputnode.ants_reg_ts', 'inputnode.ts_transformed')]),

        (smoothing, visualize, [('outputnode.ts_smoothed', 'inputnode.ts_transformed')]),

        ##all the output
        (moco, sink, [  # ('outputnode.epi_moco', 'realign.@realigned_ts'),
                        ('outputnode.par_moco', 'realign.@par'),
                        ('outputnode.rms_moco', 'realign.@rms'),
                        ('outputnode.mat_moco', 'realign.MAT.@mat'),
                        ('outputnode.epi_mean', 'realign.@mean'),
                        ('outputnode.rotplot', 'realign.plots.@rotplot'),
                        ('outputnode.transplot', 'realign.plots.@transplot'),
                        ('outputnode.dispplots', 'realign.plots.@dispplots'),
                        ('outputnode.tsnr_file', 'realign.@tsnr')]),
        (topup_coreg, sink, [('outputnode.fmap', 'coregister.transforms2anat.@fmap'),
                            # ('outputnode.unwarpfield_epi2fmap', 'coregister.@unwarpfield_epi2fmap'),
                            ('outputnode.unwarped_mean_epi2fmap', 'coregister.@unwarped_mean_epi2fmap'),
                            ('outputnode.epi2fmap', 'coregister.@epi2fmap'),
                            # ('outputnode.shiftmap', 'coregister.@shiftmap'),
                            ('outputnode.fmap_fullwarp', 'coregister.transforms2anat.@fmap_fullwarp'),
                            ('outputnode.epi2anat', 'coregister.@epi2anat'),
                            ('outputnode.epi2anat_mat', 'coregister.transforms2anat.@epi2anat_mat'),
                            ('outputnode.epi2anat_dat', 'coregister.transforms2anat.@epi2anat_dat'),
                            ('outputnode.epi2anat_mincost', 'coregister.@epi2anat_mincost')
                            ]),

        (transform_ts, sink, [('outputnode.trans_ts_masked', 'coregister.@full_transform_ts'),
                              ('outputnode.trans_ts_mean', 'coregister.@full_transform_mean'),
                              ('outputnode.resamp_brain', 'coregister.@resamp_brain')]),

        (denoise, sink, [
            ('outputnode.wmcsf_mask', 'denoise.mask.@wmcsf_masks'),
            ('outputnode.combined_motion', 'denoise.artefact.@combined_motion'),
            ('outputnode.outlier_files', 'denoise.artefact.@outlier'),
            ('outputnode.intensity_files', 'denoise.artefact.@intensity'),
            ('outputnode.outlier_stats', 'denoise.artefact.@outlierstats'),
            ('outputnode.outlier_plots', 'denoise.artefact.@outlierplots'),
            ('outputnode.mc_regressor', 'denoise.regress.@mc_regressor'),
            ('outputnode.comp_regressor', 'denoise.regress.@comp_regressor'),
            ('outputnode.mc_F', 'denoise.regress.@mc_F'),
            ('outputnode.mc_pF', 'denoise.regress.@mc_pF'),
            ('outputnode.comp_F', 'denoise.regress.@comp_F'),
            ('outputnode.comp_pF', 'denoise.regress.@comp_pF'),
            ('outputnode.brain_mask_resamp', 'denoise.mask.@brain_resamp'),
            ('outputnode.brain_mask2epi', 'denoise.mask.@brain_mask2epi'),
            ('outputnode.normalized_file', 'denoise.@normalized'),
            # FL added fullspectrum
            ('outputnode.ts_fullspectrum', 'denoise.@ts_fullspectrum')
        ]),
        (ants_registration, sink, [('outputnode.ants_reg_ts', 'ants.@antsnormalized')]),
        (ants_registration_full, sink, [('outputnode.ants_reg_ts', 'ants.@antsnormalized_fullspectrum')]),
        (smoothing, sink, [('outputnode.ts_smoothed', '@smoothed.FWHM6')]),
    ])

    func_preproc.write_graph(dotfilename='func_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    func_preproc.run(plugin='MultiProc')
Exemple #9
0
def create_skullstrip_workflow(name="skullstrip"):
    """Remove non-brain voxels from the timeseries."""

    # Define the workflow inputs
    inputnode = Node(
        IdentityInterface(["subject_id", "timeseries", "reg_file"]), "inputs")

    # Mean the timeseries across the fourth dimension
    origmean = MapNode(fsl.MeanImage(), "in_file", "origmean")

    # Grab the Freesurfer aparc+aseg file as an anatomical brain mask
    getaseg = Node(
        io.SelectFiles({"aseg": "{subject_id}/mri/aparc+aseg.mgz"},
                       base_directory=os.environ["SUBJECTS_DIR"]), "getaseg")

    # Threshold the aseg volume to get a boolean mask
    makemask = Node(fs.Binarize(dilate=4, min=0.5), "makemask")

    # Transform the brain mask into functional space
    transform = MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                        ["reg_file", "source_file"], "transform")

    # Convert the mask to nifti and rename
    convertmask = MapNode(fs.MRIConvert(out_file="functional_mask.nii.gz"),
                          "in_file", "convertmask")

    # Use the mask to skullstrip the timeseries
    stripts = MapNode(fs.ApplyMask(), ["in_file", "mask_file"], "stripts")

    # Use the mask to skullstrip the mean image
    stripmean = MapNode(fs.ApplyMask(), ["in_file", "mask_file"], "stripmean")

    # Generate images summarizing the skullstrip and resulting data
    reportmask = MapNode(MaskReport(), ["mask_file", "orig_file", "mean_file"],
                         "reportmask")

    # Define the workflow outputs
    outputnode = Node(
        IdentityInterface(["timeseries", "mean_file", "mask_file", "report"]),
        "outputs")

    # Define and connect the workflow
    skullstrip = Workflow(name)

    skullstrip.connect([
        (inputnode, origmean, [("timeseries", "in_file")]),
        (inputnode, getaseg, [("subject_id", "subject_id")]),
        (origmean, transform, [("out_file", "source_file")]),
        (getaseg, makemask, [("aseg", "in_file")]),
        (makemask, transform, [("binary_file", "target_file")]),
        (inputnode, transform, [("reg_file", "reg_file")]),
        (transform, stripts, [("transformed_file", "mask_file")]),
        (transform, stripmean, [("transformed_file", "mask_file")]),
        (inputnode, stripts, [("timeseries", "in_file")]),
        (origmean, stripmean, [("out_file", "in_file")]),
        (stripmean, reportmask, [("out_file", "mean_file")]),
        (origmean, reportmask, [("out_file", "orig_file")]),
        (transform, reportmask, [("transformed_file", "mask_file")]),
        (transform, convertmask, [("transformed_file", "in_file")]),
        (stripts, outputnode, [("out_file", "timeseries")]),
        (stripmean, outputnode, [("out_file", "mean_file")]),
        (convertmask, outputnode, [("out_file", "mask_file")]),
        (reportmask, outputnode, [("out_files", "report")]),
    ])

    return skullstrip
def create_aroma_prep(subject, working_dir, data_dir, freesurfer_dir, out_dir,
                      vol_to_remove, TR, epi_resolution, highpass, echo_space,
                      te_diff, pe_dir, standard_brain,
                      standard_brain_resampled, standard_brain_mask,
                      standard_brain_mask_resampled, fwhm_smoothing):
    # set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    # main workflow
    aroma_prep = Workflow(name='aroma_prep')
    aroma_prep.base_dir = working_dir
    aroma_prep.config['execution'][
        'crashdump_dir'] = aroma_prep.base_dir + "/crash_files"

    #helper function to save array output of AROMA functions
    def small_save(filename, in_data):
        import numpy as np
        np.save(filename, in_data)
        return filename

    # select files
    templates = {
        'anat_head': 'preprocessed/' + subject + '/structural/T1.nii.gz',
        'anat_brain': 'preprocessed/' + subject + '/structural/brain.nii.gz',
        'brain_mask':
        'preprocessed/' + subject + '/structural/T1_brain_mask.nii.gz',
        'func': 'raw_data/' + subject + '/func/EPI_t2.nii',
        'mag': 'raw_data/' + subject + '/unwarp/B0_mag.nii',
        'phase': 'raw_data/' + subject + '/unwarp/B0_ph.nii'
    }

    selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")

    ##################preprocessing############################################
    # node to remove first volumes
    remove_vol = Node(util.Function(input_names=['in_file', 't_min'],
                                    output_names=["out_file"],
                                    function=strip_rois_func),
                      name='remove_vol')
    remove_vol.inputs.t_min = vol_to_remove

    aroma_prep.connect([(selectfiles, remove_vol, [('func', 'in_file')])])

    #motion correction
    moco = create_moco_pipeline()

    #creating and applying the fieldmap
    fmap_coreg = create_fmap_pipeline()
    fmap_coreg.inputs.inputnode.echo_space = echo_space
    fmap_coreg.inputs.inputnode.te_diff = te_diff
    fmap_coreg.inputs.inputnode.pe_dir = pe_dir

    aroma_prep.connect([
        (selectfiles, fmap_coreg, [('mag', 'inputnode.mag')]),
        (selectfiles, fmap_coreg, [('phase', 'inputnode.phase')]),
        (moco, fmap_coreg, [('outputnode.epi_moco', 'inputnode.epi_coreg')]),
        (moco, fmap_coreg, [('outputnode.epi_mean', 'inputnode.epi_mean')])
    ])

    #reorient to std
    reorient2std = Node(fsl.Reorient2Std(), name="reorient2std")
    #mean intensity normalization
    meanintensnorm = Node(fsl.ImageMaths(op_string='-ing 10000'),
                          name='meanintensnorm')

    aroma_prep.connect([
        #(unwarp, moco,    [('warped_file', 'inputnode.epi')]),
        (remove_vol, moco, [('out_file', 'inputnode.epi')]),
        (selectfiles, reorient2std, [
            ('anat_brain', 'in_file')
        ]),  #reorient to standard to avoid registration issues seen previously
        (fmap_coreg, meanintensnorm, [('outputnode.unwarped_epi', 'in_file')])
    ])

    #mask functional image
    betfunctional = Node(fsl.BET(frac=0.3), name='betfunctional')
    binmask = Node(fsl.ImageMaths(op_string='-bin'), name='binmask')

    #smoothing (@6mm FWHM)
    smoothing = create_smoothing_pipeline()
    smoothing.inputs.inputnode.fwhm = fwhm_smoothing

    aroma_prep.connect([(meanintensnorm, smoothing,
                         [('out_file', 'inputnode.ts_transformed')]),
                        (fmap_coreg, betfunctional,
                         [('outputnode.unwarped_mean_epi2fmap', 'in_file')]),
                        (betfunctional, binmask, [('out_file', 'in_file')])])

    # Func > Anat
    # register example func to high-resolution (use linear registration with 7 degrees of freedom and output
    #matrix example_func2highres.mat
    flirt = Node(fsl.FLIRT(cost_func='mutualinfo', interp='trilinear'),
                 name='flirt')
    flirt.inputs.dof = 7
    #
    # Anat > Standard
    # register high-resolution to standard template - ###flirt## (as preparation for fnirt)
    flirt_prep = Node(fsl.FLIRT(cost_func='mutualinfo', interp='trilinear'),
                      name='flirt_prep')
    flirt_prep.inputs.reference = standard_brain
    flirt_prep.inputs.interp = 'trilinear'
    flirt_prep.inputs.dof = 12

    fnirt = Node(fsl.FNIRT(), name='fnirt')
    fnirt.inputs.ref_file = standard_brain
    fnirt.inputs.field_file = True
    fnirt.inputs.fieldcoeff_file = True

    aroma_prep.connect([
        (reorient2std, flirt, [('out_file', 'reference')]),
        (betfunctional, flirt, [('out_file', 'in_file')]),
        (reorient2std, flirt_prep, [('out_file', 'in_file')]),
        (flirt_prep, fnirt, [('out_matrix_file', 'affine_file')]),
        (reorient2std, fnirt, [('out_file', 'in_file')]),
    ])

    ##################ICA-AROMA###############################################
    #Step 1) MELODIC
    runICA = Node(
        name="runICA",
        interface=Function(input_names=[
            "fslDir", "outDir", "inFile", "melDirIn", "mask", "dim", "TR"
        ],
                           output_names=["mdir", 'melICmix', 'melodic_FTmix'],
                           function=aromafunc.runICA))
    runICA.inputs.fslDir = os.path.join(os.environ["FSLDIR"], 'bin', '')
    runICA.inputs.dim = 0  #automatically estimates network number using MDL
    runICA.inputs.TR = 2.0
    runICA.inputs.melDirIn = ""
    runICA.inputs.outDir = out_dir

    #Step 2) Automatic classification of the components
    #  - registering the spatial maps to MNI
    regMNI = Node(name="regMNI",
                  interface=Function(input_names=[
                      "fslDir", "inFile", "outFile", "affmat", "warp"
                  ],
                                     output_names=['melodic_IC_MNI2mm'],
                                     function=aromafunc.register2MNI))

    regMNI.inputs.fslDir = os.path.join(os.environ["FSLDIR"], 'bin', '')
    regMNI.inputs.outFile = out_dir + 'melodic.ica/melodic_IC_thr_MNI2mm.nii.gz'

    #connect inputs to Melodic-ICA
    aroma_prep.connect([
        (binmask, runICA, [('out_file', 'mask')]),
        (smoothing, runICA, [('outputnode.ts_smoothed', 'inFile')]),
        #connect inputs to registration node
        (runICA, regMNI, [('mdir', 'inFile')]),
        (flirt, regMNI, [('out_matrix_file', 'affmat')]),
        (fnirt, regMNI, [('fieldcoeff_file', 'warp')])
    ])

    #extracting the Maximum RP correlation feature
    feature_time_series = Node(name="feature_time_series",
                               interface=Function(
                                   input_names=["melmix", "mc"],
                                   output_names=["maxRPcorr"],
                                   function=aromafunc.feature_time_series))

    save_featts = Node(name="save_featts",
                       interface=Function(input_names=["filename", "in_data"],
                                          output_names=["filename"],
                                          function=small_save))

    save_featts.inputs.filename = working_dir + '/aroma_prep/save_featts/maxRPcorr.npy'

    aroma_prep.connect([(runICA, feature_time_series, [('melICmix', 'melmix')])
                        ])

    #connect moco to time_series features
    aroma_prep.connect([
        (moco, feature_time_series, [('outputnode.par_moco', 'mc')]),
        (feature_time_series, save_featts, [("maxRPcorr", "in_data")])
    ])

    #extracting the High-frequency content feature
    feature_freq = Node(name="feature_freq",
                        interface=Function(
                            input_names=["melFTmix", "TR"],
                            output_names=["HFC"],
                            function=aromafunc.feature_frequency))
    feature_freq.inputs.TR = 2.0

    save_featfreq = Node(name="save_featfreq",
                         interface=Function(
                             input_names=["filename", "in_data"],
                             output_names=["filename"],
                             function=small_save))
    save_featfreq.inputs.filename = working_dir + '/aroma_prep/feature_freq/HFC.npy'

    aroma_prep.connect([(runICA, feature_freq, [('melodic_FTmix', 'melFTmix')
                                                ]),
                        (feature_freq, save_featfreq, [('HFC', 'in_data')])])

    #extracting the CSF & Edge fraction features
    feature_spatial = Node(
        name="feature_spatial",
        interface=Function(
            input_names=["fslDir", "tempDir", "aromaDir", "melIC"],
            output_names=["edgeFract", "csfFract"],
            function=aromafunc.feature_spatial))

    feature_spatial.inputs.fslDir = os.path.join(os.environ["FSLDIR"], 'bin',
                                                 '')
    feature_spatial.inputs.tempDir = working_dir + "/aroma_prep/feature_spatial/"
    feature_spatial.inputs.aromaDir = "/home/raid1/fbeyer/Documents/Scripts/ICA-AROMA/"

    save_featsp_edge = Node(name="save_featsp_edge",
                            interface=Function(
                                input_names=["filename", "in_data"],
                                output_names=["filename"],
                                function=small_save))
    save_featsp_edge.inputs.filename = working_dir + '/aroma_prep/feature_spatial/edge.npy'

    save_featsp_csf = Node(name="save_featsp_csf",
                           interface=Function(
                               input_names=["filename", "in_data"],
                               output_names=["filename"],
                               function=small_save))
    save_featsp_csf.inputs.filename = working_dir + '/aroma_prep/feature_spatial/csf.npy'

    aroma_prep.connect([
        (regMNI, feature_spatial, [('melodic_IC_MNI2mm', 'melIC')]),
        (feature_spatial, save_featsp_edge, [('edgeFract', 'in_data')]),
        (feature_spatial, save_featsp_csf, [('csfFract', 'in_data')])
    ])

    #classification of features using predefined feature space
    classification = Node(
        name="classification",
        interface=Function(input_names=[
            "outDir", "maxRPcorr", "edgeFract", "HFC", "csfFract"
        ],
                           output_names=["motionICs"],
                           function=aromafunc.classification))
    classification.inputs.outDir = out_dir + '/melodic.ica/'

    save_class_mic = Node(name="save_class_mic",
                          interface=Function(
                              input_names=["filename", "in_data"],
                              output_names=["filename"],
                              function=small_save))
    save_class_mic.inputs.filename = working_dir + '/aroma_prep/classification/motionICs.npy'

    aroma_prep.connect([(classification, save_class_mic, [('motionICs',
                                                           'in_data')])])

    #connections of classification with all features
    aroma_prep.connect([
        (feature_time_series, classification, [('maxRPcorr', 'maxRPcorr')]),
        (feature_spatial, classification, [('edgeFract', 'edgeFract')]),
        (feature_spatial, classification, [('csfFract', 'csfFract')]),
        (feature_freq, classification, [('HFC', 'HFC')])
    ])

    #Step 3) Data denoising' (using input data (=intensity normalized, motion corrected, smoothed fMRI in subject space))
    denoising_ICA = Node(name="denoising_ICA",
                         interface=Function(input_names=[
                             "fslDir", "inFile", "outDir", "melmix", "denType",
                             "denIdx"
                         ],
                                            output_names=["denoised_func"],
                                            function=aromafunc.denoising))
    denoising_ICA.inputs.fslDir = os.path.join(os.environ["FSLDIR"], 'bin', '')
    denoising_ICA.inputs.outDir = out_dir
    denoising_ICA.inputs.melmix = out_dir + '/melodic.ica/melodic_mix'
    denoising_ICA.inputs.denType = 2  #1=aggr, 2=non-aggr, 3=both

    aroma_prep.connect([
        (classification, denoising_ICA, [('motionICs', 'denIdx')]),
        (smoothing, denoising_ICA, [('outputnode.ts_smoothed', 'inFile')])
    ])

    ##################post-processing###########################################
    #WM/CSF/linear trend removal using regression of compcor-components
    #highpass-filtering here!!
    #registration of denoised image to MNI space
    postprocess = create_denoise_pipeline()
    postprocess.inputs.inputnode.highpass_sigma = 1. / (2 * TR * highpass)
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4
    postprocess.inputs.inputnode.tr = TR

    aroma_prep.connect([
        (binmask, postprocess, [('out_file', 'inputnode.brain_mask')]),
        (denoising_ICA, postprocess, [('denoised_func', 'inputnode.epi_coreg')
                                      ]),
        (moco, postprocess, [('outputnode.epi_mean', 'inputnode.unwarped_mean')
                             ]),
        (flirt, postprocess, [('out_matrix_file', 'inputnode.flirt_mat')]),
        (reorient2std, postprocess, [('out_file', 'inputnode.anat_brain')])
    ])

    #register only-AROMA denoised file	into MNI space (without CSF/WM removal + highpass-filtering)
    apply_TF = Node(fsl.ApplyWarp(), name="apply_TF")
    apply_TF.inputs.ref_file = standard_brain

    #register AROMA and CSF/WM denoised file	into MNI space
    apply_TF_denoised = Node(fsl.ApplyWarp(), name="apply_TF_denoised")
    apply_TF_denoised.inputs.ref_file = standard_brain

    aroma_prep.connect([
        (postprocess, apply_TF_denoised, [('outputnode.normalized_file',
                                           'in_file')]),
        (flirt, apply_TF_denoised, [('out_matrix_file', 'premat')]),
        (fnirt, apply_TF_denoised, [('fieldcoeff_file', 'field_file')]),
        (denoising_ICA, apply_TF, [('denoised_func', 'in_file')]),
        (flirt, apply_TF, [('out_matrix_file', 'premat')]),
        (fnirt, apply_TF, [('fieldcoeff_file', 'field_file')]),
    ])

    #sink to store files
    sink = Node(nio.DataSink(
        parameterization=False,
        base_directory=out_dir,
        substitutions=[
            ('brain_reoriented_warped.nii', 'brain2mni_warp_fnirt.nii'),
            ('brain_reoriented_flirt.nii', 'brain2mni_aff_flirt.nii'),
            ('brain_reoriented_fieldwarp.nii', 'brain2mni_warpcoeff_cout.nii'),
            ('brain_reoriented_field.nii', 'brain2mni_warpfield_fout.nii'),
            ('rest2anat_maths_smooth', 'func_preproc_smoothed'),
            ('rest_mean2fmap_unwarped_brain_maths', 'func_brain_mask'),
            ('denoised_func_data_nonaggr_warp', 'aroma_denoised_MNI'),
            ('rest_denoised_highpassed_norm_warp.nii',
             'aroma_csfwm_denoised_MNI.nii'),
            ('rest2anat_denoised', 'rest_denoised_fullspectrum'),
            ('rest_denoised_highpassed_norm', 'rest_denoised_highpass'),
            ('wmcsf_mask_lowres_flirt', 'wmcsf_mask2epi'),
            ('brain2mni_aff_flirt', 'brain2epi')
        ]),
                name='sink')

    ##all the output
    aroma_prep.connect([
        (moco, sink, [('outputnode.epi_moco', 'realign.@realigned_ts'),
                      ('outputnode.par_moco', 'realign.@par'),
                      ('outputnode.rms_moco', 'realign.@rms'),
                      ('outputnode.mat_moco', 'realign.MAT.@mat'),
                      ('outputnode.epi_mean', 'realign.@mean'),
                      ('outputnode.rotplot', 'realign.plots.@rotplot'),
                      ('outputnode.transplot', 'realign.plots.@transplot'),
                      ('outputnode.dispplots', 'realign.plots.@dispplots'),
                      ('outputnode.tsnr_file', 'realign.@tsnr')]),
        (fmap_coreg, sink,
         [('outputnode.fmap', 'fmap.transforms2anat.@fmap'),
          ('outputnode.unwarped_mean_epi2fmap',
           'fmap.@unwarped_mean_epi2fmap'),
          ('outputnode.epi2fmap', 'fmap.@epi2fmap'),
          ('outputnode.unwarpfield_epi2fmap', 'fmap.@fmap_fullwarp'),
          ('outputnode.mean_from_unwarped_epi', 'fmap.@mean_from_unwarped')]),
        (binmask, sink, [('out_file', 'aroma_inputs.@brainmask')]),
        (smoothing, sink, [('outputnode.ts_smoothed', 'aroma_inputs.@FWHM6')]),
        (flirt, sink, [('out_file', 'aroma_inputs.reg.affine.@result')]),
        (flirt, sink, [('out_matrix_file', 'aroma_inputs.reg.affine.@matrix')
                       ]),
        (fnirt, sink, [('warped_file', 'aroma_inputs.reg.fnirt.@image')]),
        (fnirt, sink, [('fieldcoeff_file',
                        'aroma_inputs.reg.fnirt.@fieldcoeff')]),
        (fnirt, sink, [('field_file', 'aroma_inputs.reg.fnirt.@field')]),
        (flirt_prep, sink, [('out_matrix_file', 'aroma_inputs.reg.fnirt.@mat')
                            ]),
        (flirt_prep, sink, [('out_file', 'aroma_inputs.reg.fnirt.@resultflirt')
                            ]),
        (save_featts, sink, [('filename', 'aroma_res.features.@rescorr')]),
        (save_featsp_edge, sink, [('filename', 'aroma_res.features.@edge')]),
        (save_featsp_csf, sink, [('filename', 'aroma_res.features.@csf')]),
        (save_featfreq, sink, [('filename', 'aroma_res.features.@HFC')]),
        (denoising_ICA, sink, [('denoised_func', 'aroma_res.@denoised_func')]),
        (
            postprocess,
            sink,
            [
                ('outputnode.wmcsf_mask', 'denoise.mask.@wmcsf_masks'),
                ('outputnode.combined_motion',
                 'denoise.artefact.@combined_motion'),
                ('outputnode.comp_regressor',
                 'denoise.regress.@comp_regressor'),
                ('outputnode.comp_F', 'denoise.regress.@comp_F'),
                ('outputnode.comp_pF', 'denoise.regress.@comp_pF'),
                ('outputnode.brain2epi', 'denoise.mask.@brain2epi'),
                ('outputnode.wmcsf_mask2epi', 'denoise.mask.@wmcsf_mask2epi'),
                ('outputnode.normalized_file', 'denoise.@normalized'),
                # FL added fullspectrum
                ('outputnode.ts_fullspectrum', 'denoise.@ts_fullspectrum')
            ]),
        (apply_TF, sink, [('out_file',
                           'transforms.withoutCSFWM.@denoised_result_MNI')]),
        (apply_TF_denoised, sink,
         [('out_file', 'transforms.withCSFWM.@denoised_result_MNI')])
    ])

    aroma_prep.write_graph(dotfilename='aroma_prep.dot',
                           graph2use='colored',
                           format='pdf',
                           simple_form=True)
    aroma_prep.run()  #plugin='CondorDAGMan'
sublist=[g.split('/')[-1] for g in glob.glob(globaldir+'organized_inputs/*')][0:4]
seslist=list(set([g.split('/')[-1] for g in glob.glob(globaldir+'organized_inputs/*/*')]))



## Setup data managment nodes

infosource = pe.Node(util.IdentityInterface(fields=['subject_id','session_id']),name="infosource")
infosource.iterables = [('subject_id', sublist),('session_id', seslist)]

templates={
'anat':'organized_inputs/{subject_id}/{session_id}/dmn_fb_anat_defaced/dmn_fb_anat_defaced.nii.gz',\
'func':'organized_inputs/{subject_id}/{session_id}/msit/msit.nii.gz',\
'fmap_mag':'organized_inputs/{subject_id}/{session_id}/fieldmap_mag/fieldmap.nii.gz',\
'fmap_pha':'organized_inputs/{subject_id}/{session_id}/fieldmap_pha/fieldmap.nii.gz'}
selectfiles = pe.Node(nio.SelectFiles(templates,base_directory=globaldir),name="selectfiles")



datasink = pe.Node(nio.DataSink(base_directory=globaldir, container=workdir),name="datasink")


## Specify commands to be run

# Extract first three volumes from fmri
fslroi = pe.Node(interface=fsl.ExtractROI(),name='fslroi')
fslroi.inputs.t_min=0
fslroi.inputs.t_size=3

# Skullstrip
skullstrip = pe.Node(interface=afni.preprocess.SkullStrip(),name='skullstrip')
Exemple #12
0
# ### `selectfiles` node
#
# * match template to find source files (functional) for use in subsequent parts of pipeline

# SelectFiles - to grab the data (alternativ to DataGrabber)

## TODO: here need to figure out how to incorporate the run number and task name in call
templates = {
    'func':
    '{subject_id}/{resolution}/{smoothing}/sr{subject_id}_task-' + TASK_NAME +
    '_run-0*_space-MNI152-T1-1mm_desc-preproc_bold.nii'
}

selectfiles = pe.Node(nio.SelectFiles(
    templates,
    base_directory=
    '/data00/projects/megameta/{}/derivatives/nipype/resampled_and_smoothed'.
    format(PROJECT_NAME)),
                      working_dir=working_dir,
                      name="selectfiles")

# ### Specify datasink node
#
# * copy files to keep from various working folders to output folder for model for subject

# Datasink - creates output folder for important outputs
datasink = pe.Node(
    nio.DataSink(
        base_directory=SUBJ_DIR,
        parameterization=True,
        #container=output_dir
Exemple #13
0
def normalize_epi(subjects_list,
                  TR_list,
                  preprocessed_data_dir,
                  working_dir,
                  ds_dir,
                  template_dir,
                  plugin_name,
                  use_n_procs):

    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.io as nio
    from nipype.interfaces import fsl
    import nipype.interfaces.utility as util


    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='normalize')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')


    ds = Node(nio.DataSink(), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]
    ds.inputs.regexp_substitutions = [('_subject_id_[A0-9]*/', '')]

    #####################################
    # SET ITERATORS
    #####################################
    # GET SCAN TR_ID ITERATOR
    scan_infosource = Node(util.IdentityInterface(fields=['TR_id']), name='scan_infosource')
    scan_infosource.iterables = ('TR_id', TR_list)

    subjects_infosource = Node(util.IdentityInterface(fields=['subject_id']), name='subjects_infosource')
    subjects_infosource.iterables = ('subject_id', subjects_list)

    def add_subject_id_to_ds_dir_fct(subject_id, ds_path):
        import os
        out_path = os.path.join(ds_path, subject_id)
        return out_path

    add_subject_id_to_ds_dir = Node(util.Function(input_names=['subject_id', 'ds_path'],
                                                  output_names=['out_path'],
                                                  function=add_subject_id_to_ds_dir_fct),
                                    name='add_subject_id_to_ds_dir')
    wf.connect(subjects_infosource, 'subject_id', add_subject_id_to_ds_dir, 'subject_id')
    add_subject_id_to_ds_dir.inputs.ds_path = ds_dir

    wf.connect(add_subject_id_to_ds_dir, 'out_path', ds, 'base_directory')



   # get atlas data
    templates_atlases = {'FSL_MNI_3mm_template': 'MNI152_T1_3mm_brain.nii.gz',
                         }

    selectfiles_anat_templates = Node(nio.SelectFiles(templates_atlases,
                                                      base_directory=template_dir),
                                      name="selectfiles_anat_templates")

    # GET SUBJECT SPECIFIC FUNCTIONAL AND STRUCTURAL DATA
    selectfiles_templates = {
        'epi_2_MNI_warp': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_MNI_warp/TR_{TR_id}/*.nii.gz',
        'preproc_epi_full_spectrum': '{subject_id}/rsfMRI_preprocessing/epis/01_denoised/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp': '{subject_id}/rsfMRI_preprocessing/epis/02_denoised_BP/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp_tNorm': '{subject_id}/rsfMRI_preprocessing/epis/03_denoised_BP_tNorm/TR_{TR_id}/*.nii.gz',
    }

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name="selectfiles")
    wf.connect(scan_infosource, 'TR_id', selectfiles, 'TR_id')
    wf.connect(subjects_infosource, 'subject_id', selectfiles, 'subject_id')



    # CREATE TS IN MNI SPACE
    epi_MNI_01_denoised = Node(fsl.ApplyWarp(), name='epi_MNI_01_denoised')
    epi_MNI_01_denoised.inputs.interp = 'spline'
    epi_MNI_01_denoised.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles, 'preproc_epi_full_spectrum', epi_MNI_01_denoised, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', epi_MNI_01_denoised, 'field_file')
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_MNI_01_denoised, 'ref_file')
    epi_MNI_01_denoised.inputs.out_file = 'preprocessed_fullspectrum_MNI_3mm.nii.gz'

    wf.connect(epi_MNI_01_denoised, 'out_file', ds, 'rsfMRI_preprocessing.epis_MNI_3mm.01_denoised')



    epi_MNI_03_bp_tNorm = Node(fsl.ApplyWarp(), name='epi_MNI_03_bp_tNorm')
    epi_MNI_03_bp_tNorm.inputs.interp = 'spline'
    epi_MNI_03_bp_tNorm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles, 'preproc_epi_bp_tNorm', epi_MNI_03_bp_tNorm, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', epi_MNI_03_bp_tNorm, 'field_file')
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_MNI_03_bp_tNorm, 'ref_file')
    epi_MNI_03_bp_tNorm.inputs.out_file = 'residual_filt_norm_warp.nii.gz'

    wf.connect(epi_MNI_03_bp_tNorm, 'out_file', ds, 'rsfMRI_preprocessing.epis_MNI_3mm.03_denoised_BP_tNorm')


    #####################################
    # RUN WF
    #####################################
    wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name)
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
Exemple #14
0
def build_pipeline(model_def):

    # create pointers to needed values from
    # the model dictionary
    # TODO - this could be refactored
    TR = model_def['TR']
    subject_list = model_def['subject_list']
    JSON_MODEL_FILE = model_def['model_path']

    working_dir = model_def['working_dir']
    output_dir = model_def['output_dir']

    SUBJ_DIR = model_def['SUBJ_DIR']
    PROJECT_DIR = model_def['PROJECT_DIR']
    TASK_NAME = model_def['TaskName']
    RUNS = model_def['Runs']
    MODEL_NAME = model_def['ModelName']
    PROJECT_NAME = model_def['ProjectID']
    BASE_DIR = model_def['BaseDirectory']

    SERIAL_CORRELATIONS = "AR(1)" if not model_def.get(
        'SerialCorrelations') else model_def.get('SerialCorrelations')
    RESIDUALS = model_def.get('GenerateResiduals')

    # SpecifyModel - Generates SPM-specific Model

    modelspec = pe.Node(model.SpecifySPMModel(concatenate_runs=False,
                                              input_units='secs',
                                              output_units='secs',
                                              time_repetition=TR,
                                              high_pass_filter_cutoff=128),
                        output_units='scans',
                        name="modelspec")

    # #### Level 1 Design node
    #
    # ** TODO -- get the right matching template file for fmriprep **
    #
    # * ??do we need a different mask than:
    #
    #     `'/data00/tools/spm8/apriori/brainmask_th25.nii'`

    # Level1Design - Generates an SPM design matrix
    level1design = pe.Node(
        spm.Level1Design(
            bases={'hrf': {
                'derivs': [0, 0]
            }},
            timing_units='secs',
            interscan_interval=TR,
            # model_serial_correlations='AR(1)', # [none|AR(1)|FAST]',
            # 8/21/20 mbod - allow for value to be set in JSON model spec
            model_serial_correlations=SERIAL_CORRELATIONS,

            # TODO - allow for specified masks
            mask_image=BRAIN_MASK_PATH,
            global_intensity_normalization='none'),
        name="level1design")

    # #### Estimate Model node
    # EstimateModel - estimate the parameters of the model
    level1estimate = pe.Node(
        spm.EstimateModel(
            estimation_method={'Classical': 1},
            # 8/21/20 mbod - allow for value to be set in JSON model spec
            write_residuals=RESIDUALS),
        name="level1estimate")

    # #### Estimate Contrasts node
    # EstimateContrast - estimates contrasts
    conestimate = pe.Node(spm.EstimateContrast(), name="conestimate")

    # ## Setup pipeline workflow for level 1 model
    # Initiation of the 1st-level analysis workflow
    l1analysis = pe.Workflow(name='l1analysis')

    # Connect up the 1st-level analysis components
    l1analysis.connect([
        (modelspec, level1design, [('session_info', 'session_info')]),
        (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
        (level1estimate, conestimate, [('spm_mat_file', 'spm_mat_file'),
                                       ('beta_images', 'beta_images'),
                                       ('residual_image', 'residual_image')])
    ])

    # ## Set up nodes for file handling and subject selection
    # ### `getsubjectinfo` node
    #
    # * Use `get_subject_info()` function to generate spec data structure for first level model design matrix

    # Get Subject Info - get subject specific condition information
    getsubjectinfo = pe.Node(util.Function(
        input_names=['subject_id', 'model_path'],
        output_names=['subject_info', 'realign_params', 'condition_names'],
        function=get_subject_info),
                             name='getsubjectinfo')

    makecontrasts = pe.Node(util.Function(
        input_names=['subject_id', 'condition_names', 'model_path'],
        output_names=['contrasts'],
        function=make_contrast_list),
                            name='makecontrasts')

    if model_def.get('ExcludeDummyScans'):
        ExcludeDummyScans = model_def['ExcludeDummyScans']
    else:
        ExcludeDummyScans = 0

    #if DEBUG:
    #    print(f'Excluding {ExcludeDummyScans} dummy scans.')

    trimdummyscans = pe.MapNode(Trim(begin_index=ExcludeDummyScans),
                                name='trimdummyscans',
                                iterfield=['in_file'])

    # ### `infosource` node
    #
    # * iterate over list of subject ids and generate subject ids and produce list of contrasts for subsequent nodes

    # Infosource - a function free node to iterate over the list of subject names
    infosource = pe.Node(util.IdentityInterface(
        fields=['subject_id', 'model_path', 'resolution', 'smoothing']),
                         name="infosource")

    try:
        fwhm_list = model_def['smoothing_list']
    except:
        fwhm_list = [4, 6, 8]

    try:
        resolution_list = model_def['resolutions']
    except:
        resolution_list = ['low', 'medium', 'high']

    infosource.iterables = [
        ('subject_id', subject_list),
        ('model_path', [JSON_MODEL_FILE] * len(subject_list)),
        ('resolution', resolution_list),
        ('smoothing', ['fwhm_{}'.format(s) for s in fwhm_list])
    ]

    # SelectFiles - to grab the data (alternativ to DataGrabber)

    ## TODO: here need to figure out how to incorporate the run number and task name in call
    templates = {
        'func':
        '{subject_id}/{resolution}/{smoothing}/sr{subject_id}_task-' +
        TASK_NAME + '_run-0*_*MNI*preproc*.nii'
    }

    selectfiles = pe.Node(nio.SelectFiles(
        templates,
        base_directory='{}/{}/derivatives/nipype/resampled_and_smoothed'.
        format(BASE_DIR, PROJECT_NAME)),
                          working_dir=working_dir,
                          name="selectfiles")

    # ### Specify datasink node
    #
    # * copy files to keep from various working folders to output folder for model for subject

    # Datasink - creates output folder for important outputs
    datasink = pe.Node(
        nio.DataSink(
            base_directory=SUBJ_DIR,
            parameterization=True,
            #container=output_dir
        ),
        name="datasink")

    datasink.inputs.base_directory = output_dir

    # Use the following DataSink output substitutions
    substitutions = []
    subjFolders = [(
        '_model_path.*resolution_(low|medium|high)_smoothing_(fwhm_\\d{1,2})_subject_id_sub-.*/(.*)$',
        '\\1/\\2/\\3')]
    substitutions.extend(subjFolders)
    datasink.inputs.regexp_substitutions = substitutions

    # datasink connections

    datasink_in_outs = [('conestimate.spm_mat_file', '@spm'),
                        ('level1estimate.beta_images', '@betas'),
                        ('level1estimate.mask_image', '@mask'),
                        ('conestimate.spmT_images', '@spmT'),
                        ('conestimate.con_images', '@con'),
                        ('conestimate.spmF_images', '@spmF')]

    if model_def.get('GenerateResiduals'):
        datasink_in_outs.append(
            ('level1estimate.residual_images', '@residuals'))

    # ---------

    # ## Set up workflow for whole process

    pipeline = pe.Workflow(
        name='first_level_model_{}_{}'.format(TASK_NAME.upper(), MODEL_NAME))
    pipeline.base_dir = os.path.join(SUBJ_DIR, working_dir)

    pipeline.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id'),
                                   ('resolution', 'resolution'),
                                   ('smoothing', 'smoothing')]),
        (infosource, getsubjectinfo, [('subject_id', 'subject_id'),
                                      ('model_path', 'model_path')]),
        (infosource, makecontrasts, [('subject_id', 'subject_id'),
                                     ('model_path', 'model_path')]),
        (getsubjectinfo, makecontrasts, [('condition_names', 'condition_names')
                                         ]),
        (getsubjectinfo, l1analysis,
         [('subject_info', 'modelspec.subject_info'),
          ('realign_params', 'modelspec.realignment_parameters')]),
        (makecontrasts, l1analysis, [('contrasts', 'conestimate.contrasts')]),

        #                  (selectfiles, l1analysis, [('func',
        #                                          'modelspec.functional_runs')]),
        (selectfiles, trimdummyscans, [('func', 'in_file')]),
        (trimdummyscans, l1analysis, [('out_file', 'modelspec.functional_runs')
                                      ]),
        (infosource, datasink, [('subject_id', 'container')]),
        (l1analysis, datasink, datasink_in_outs)
    ])

    return pipeline
Exemple #15
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.afni as afni

#Flexibly collect data from disk to feed into workflows.
io_SelectFiles = pe.Node(io.SelectFiles(templates={'subj':['001', '002']}), name='io_SelectFiles')
)
io_SelectFiles.inputs.base_directory = '/input'
io_SelectFiles.inputs.subj = ['001', '002']

#Wraps the executable command ``3dTshift``.
afni_TShift = pe.Node(interface = afni.TShift(), name='afni_TShift')

#Wraps the executable command ``3dvolreg``.
afni_Volreg = pe.Node(interface = afni.Volreg(), name='afni_Volreg')

#Wraps the executable command ``align_epi_anat.py``.
afni_AlignEpiAnatPy = pe.Node(interface = afni.AlignEpiAnatPy(), name='afni_AlignEpiAnatPy')
afni_AlignEpiAnatPy.inputs.epi_base = 0
afni_AlignEpiAnatPy.inputs.anat2epi = False
afni_AlignEpiAnatPy.inputs.epi2anat = True
afni_AlignEpiAnatPy.inputs.volreg = 'off'
afni_AlignEpiAnatPy.inputs.tshift = 'off'
afni_AlignEpiAnatPy.inputs.outputtype = 'NIFTI_GZ'
Exemple #16
0
# SUBJECTS ITERATOR
subjects_infosource = Node(util.IdentityInterface(fields=['subject_id']),
                           name='subjects_infosource')
subjects_infosource.iterables = ('subject_id', subjects_list)

roi_infosource = Node(util.IdentityInterface(fields=['roi']),
                      name='roi_infosource')
roi_infosource.iterables = ('roi', rois_list)

# GET SUBJECT SPECIFIC FUNCTIONAL DATA
selectfiles_templates = {
    'preproc_epi_mni':
    '{subject_id}/rsfMRI_preprocessing/epis_MNI_3mm/03_denoised_BP_tNorm/TR_645/residual_filt_norm_warp.nii.gz',
}

selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                   base_directory=in_data_base_dir),
                   name="selectfiles")
wf.connect(subjects_infosource, 'subject_id', selectfiles, 'subject_id')

######################
# SCA
######################
# FOR EACH SUBJECT CALCULATE SCA MAP
sca_wf = create_sca_wf(working_dir=wd_dir, name='sca_single_subject')
wf.connect(selectfiles, 'preproc_epi_mni', sca_wf, 'inputnode.rs_preprocessed')
sca_wf.inputs.inputnode.MNI_template = brain_img
wf.connect(roi_infosource, 'roi', sca_wf, 'inputnode.roi_coords')

wf.connect(sca_wf, 'outputnode.seed_based_z', ds, 'seed_based_z')
wf.connect(sca_wf, 'outputnode.roi_img', ds, 'roi')
Exemple #17
0
    data_dir + '/fmriprep/sub-{subject_id}/ses-' + session +
    '/func/sub-{subject_id}_ses-' + session +
    '_task-Memory_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz',
    'mask':
    data_dir + '/fmriprep/sub-{subject_id}/ses-' + session +
    '/func/sub-{subject_id}_ses-' + session +
    '_task-Memory_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz',
    'regressors':
    data_dir + '/fmriprep/sub-{subject_id}/ses-' + session +
    '/func/sub-{subject_id}_ses-' + session +
    '_task-Memory_desc-confounds_regressors.tsv',
    'events':
    data_dir + '/condition_files/withNumbers/sub-{subject_id}_ses-' + session +
    '_30sec_window' + '.csv'
}
selectfiles = pe.Node(nio.SelectFiles(templates, ), name="selectfiles")
# %%

# Extract motion parameters from regressors file
runinfo = pe.Node(util.Function(input_names=[
    'in_file', 'events_file', 'regressors_file', 'regressors_names', 'removeTR'
],
                                function=_bids2nipypeinfo,
                                output_names=['info', 'realign_file']),
                  name='runinfo')
runinfo.inputs.removeTR = removeTR

# Set the column names to be used from the confounds file
runinfo.inputs.regressors_names = ['dvars', 'framewise_displacement'] + \
    ['a_comp_cor_%02d' % i for i in range(6)] + ['cosine%02d' % i for i in range(4)]
# %%
Exemple #18
0
def calc_local_metrics(brain_mask, preprocessed_data_dir, subject_id,
                       parcellations_dict, bp_freq_list, TR,
                       selectfiles_templates, working_dir, ds_dir, use_n_procs,
                       plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    from nipype.interfaces.freesurfer.preprocess import MRIConvert

    import CPAC.alff.alff as cpac_alff
    import CPAC.reho.reho as cpac_reho
    import CPAC.utils.utils as cpac_utils

    import utils as calc_metrics_utils
    from motion import calculate_FD_P, calculate_FD_J

    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    wf = Workflow(name='LeiCA_LIFE_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'),
                      execution={
                          'stop_on_first_crash': True,
                          'remove_unnecessary_outputs': True,
                          'job_finished_timeout': 15
                      })
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(
        working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.regexp_substitutions = [
        ('MNI_resampled_brain_mask_calc.nii.gz', 'falff.nii.gz'),
        ('residual_filtered_3dT.nii.gz', 'alff.nii.gz'),
        ('_parcellation_', ''),
        ('_bp_freqs_', 'bp_'),
    ]

    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(
        util.IdentityInterface(fields=['parcellation']),
        name='parcellation_infosource')
    parcellation_infosource.iterables = ('parcellation',
                                         parcellations_dict.keys())

    # BP FILTER ITERATOR
    bp_filter_infosource = Node(util.IdentityInterface(fields=['bp_freqs']),
                                name='bp_filter_infosource')
    bp_filter_infosource.iterables = ('bp_freqs', bp_freq_list)

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name='selectfiles')
    selectfiles.inputs.subject_id = subject_id

    # #####################
    # # FIX TR IN HEADER
    # #####################
    # tr_msec = int(TR * 1000)
    # tr_str = '-tr %s' % tr_msec
    #
    # fixed_tr_bp = Node(MRIConvert(out_type='niigz', args=tr_str), name='fixed_tr_bp')
    # wf.connect(selectfiles, 'epi_MNI_bp', fixed_tr_bp, 'in_file')
    #
    # fixed_tr_fullspectrum = Node(MRIConvert(out_type='niigz', args=tr_str), name='fixed_tr_fullspectrum')
    # wf.connect(selectfiles, 'epi_MNI_fullspectrum', fixed_tr_fullspectrum, 'in_file')

    #####################
    # calc FD
    #####################
    FD_P = Node(util.Function(
        input_names=['in_file'],
        output_names=['FD_ts_file', 'mean_FD_file', 'max_FD_file'],
        function=calculate_FD_P),
                name='FD_P')
    wf.connect(selectfiles, 'moco_parms_file', FD_P, 'in_file')
    wf.connect(FD_P, 'FD_ts_file', ds, 'QC.@FD')
    wf.connect(FD_P, 'mean_FD_file', ds, 'QC.@mean_FD')
    wf.connect(FD_P, 'max_FD_file', ds, 'QC.@max_FD')

    FD_J = Node(util.Function(
        input_names=['in_file'],
        output_names=['FD_ts_file', 'mean_FD_file', 'max_FD_file'],
        function=calculate_FD_J),
                name='FD_J')
    wf.connect(selectfiles, 'jenkinson_file', FD_J, 'in_file')
    wf.connect(FD_J, 'FD_ts_file', ds, 'QC.@FD_J')
    wf.connect(FD_J, 'mean_FD_file', ds, 'QC.@mean_FD_J')
    wf.connect(FD_J, 'max_FD_file', ds, 'QC.@max_FD_J')

    wf.connect(selectfiles, 'rest2anat_cost_file', ds, 'QC.@cost_file')

    #####################
    # CALCULATE METRICS
    #####################

    # f/ALFF
    alff = cpac_alff.create_alff('alff')
    alff.inputs.hp_input.hp = 0.01
    alff.inputs.lp_input.lp = 0.1
    alff.inputs.inputspec.rest_mask = brain_mask
    #wf.connect(fixed_tr_fullspectrum, 'out_file', alff, 'inputspec.rest_res')
    wf.connect(selectfiles, 'epi_MNI_fullspectrum', alff, 'inputspec.rest_res')
    wf.connect(alff, 'outputspec.alff_img', ds, 'alff.@alff')
    wf.connect(alff, 'outputspec.falff_img', ds, 'alff.@falff')

    # f/ALFF_MNI Z-SCORE
    alff_z = cpac_utils.get_zscore(input_name='alff', wf_name='alff_z')
    alff_z.inputs.inputspec.mask_file = brain_mask
    wf.connect(alff, 'outputspec.alff_img', alff_z, 'inputspec.input_file')
    wf.connect(alff_z, 'outputspec.z_score_img', ds, 'alff_z.@alff')

    falff_z = cpac_utils.get_zscore(input_name='falff', wf_name='falff_z')
    falff_z.inputs.inputspec.mask_file = brain_mask
    wf.connect(alff, 'outputspec.falff_img', falff_z, 'inputspec.input_file')
    wf.connect(falff_z, 'outputspec.z_score_img', ds, 'alff_z.@falff')

    # REHO
    reho = cpac_reho.create_reho()
    reho.inputs.inputspec.cluster_size = 27
    reho.inputs.inputspec.rest_mask = brain_mask
    #wf.connect(fixed_tr_bp, 'out_file', reho, 'inputspec.rest_res_filt')
    wf.connect(selectfiles, 'epi_MNI_BP', reho, 'inputspec.rest_res_filt')
    wf.connect(reho, 'outputspec.raw_reho_map', ds, 'reho.@reho')

    # VARIABILITY SCORES
    variability = Node(util.Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=calc_metrics_utils.calc_variability),
                       name='variability')
    #wf.connect(fixed_tr_bp, 'out_file', variability, 'in_file')
    wf.connect(selectfiles, 'epi_MNI_BP', variability, 'in_file')
    wf.connect(variability, 'out_file', ds, 'variability.@SD')

    variability_z = cpac_utils.get_zscore(input_name='ts_std',
                                          wf_name='variability_z')
    variability_z.inputs.inputspec.mask_file = brain_mask
    wf.connect(variability, 'out_file', variability_z, 'inputspec.input_file')
    wf.connect(variability_z, 'outputspec.z_score_img', ds,
               'variability_z.@variability_z')

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############
    parcellated_ts = Node(util.Function(
        input_names=[
            'in_data', 'parcellation_name', 'parcellations_dict', 'bp_freqs',
            'tr'
        ],
        output_names=[
            'parcellation_time_series', 'parcellation_time_series_file',
            'masker_file'
        ],
        function=calc_metrics_utils.extract_parcellation_time_series),
                          name='parcellated_ts')

    parcellated_ts.inputs.parcellations_dict = parcellations_dict
    parcellated_ts.inputs.tr = TR
    #wf.connect(fixed_tr_fullspectrum, 'out_file', parcellated_ts, 'in_data')
    wf.connect(selectfiles, 'epi_MNI_fullspectrum', parcellated_ts, 'in_data')
    wf.connect(parcellation_infosource, 'parcellation', parcellated_ts,
               'parcellation_name')
    wf.connect(bp_filter_infosource, 'bp_freqs', parcellated_ts, 'bp_freqs')

    ##############
    ## get conmat
    ##############
    con_mat = Node(util.Function(
        input_names=['in_data', 'extraction_method'],
        output_names=['matrix', 'matrix_file'],
        function=calc_metrics_utils.calculate_connectivity_matrix),
                   name='con_mat')
    con_mat.inputs.extraction_method = 'correlation'
    wf.connect(parcellated_ts, 'parcellation_time_series', con_mat, 'in_data')

    ##############
    ## ds
    ##############

    wf.connect(parcellated_ts, 'parcellation_time_series_file', ds,
               'con_mat.parcellated_time_series.@parc_ts')
    wf.connect(parcellated_ts, 'masker_file', ds,
               'con_mat.parcellated_time_series.@masker')
    wf.connect(con_mat, 'matrix_file', ds, 'con_mat.matrix.@mat')

    wf.write_graph(dotfilename=wf.name, graph2use='colored',
                   format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name,
               plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
Exemple #19
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.interfaces.afni as afni
import nipype.algorithms.confounds as confounds

#Flexibly collect data from disk to feed into workflows.
io_select_files = pe.Node(io.SelectFiles(templates={'anat':'sub-{sub_id}/anat/sub-{sub_id}_*_T1w.nii.gz','func':'sub-{sub_id}/func/sub-{sub_id}_*_bold.nii.gz'}), name='io_select_files', iterfield = ['subID'])
io_select_files.inputs.base_directory = bids_dir
io_select_files.inputs.anat = 'sub-{sub_id}/anat/sub-{sub_id}_*_T1w.nii.gz'
io_select_files.inputs.func = 'sub-{sub_id}/func/sub-{sub_id}_*_bold.nii.gz'
io_select_files.iterables = [('subID', sub_id)]

#Wraps the executable command ``mcflirt``.
fsl_mcflirt = pe.MapNode(interface = fsl.MCFLIRT(), name='fsl_mcflirt', iterfield = ['in_file'])

#Wraps the executable command ``bet``.
fsl_bet = pe.MapNode(interface = fsl.BET(), name='fsl_bet', iterfield = ['in_file'])
fsl_bet.inputs.frac = 0.6

#Wraps the executable command ``flirt``.
flirt_EPItoT1 = pe.MapNode(interface = fsl.FLIRT(), name='flirt_EPItoT1', iterfield = ['in_file', 'reference'])

#Wraps the executable command ``flirt``.
flirt_T1toMNI = pe.Node(interface = fsl.FLIRT(), name='flirt_T1toMNI')
def create_lemon_resting(subject, working_dir, data_dir, freesurfer_dir,
                         out_dir, vol_to_remove, TR, epi_resolution, highpass,
                         lowpass, echo_space, te_diff, pe_dir):

    # set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # main workflow
    func_preproc = Workflow(name='lemon_resting')
    func_preproc.base_dir = working_dir
    func_preproc.config['execution'][
        'crashdump_dir'] = func_preproc.base_dir + "/crash_files"

    # select files
    templates = {
        'func': 'nifti/lemon_resting/rest.nii.gz',
        'fmap_phase': 'nifti/lemon_resting/fmap_phase.nii.gz',
        'fmap_mag': 'nifti/lemon_resting/fmap_mag.nii.gz',
        'anat_head': 'preprocessed/anat/T1.nii.gz',
        'anat_brain': 'preprocessed/anat/T1_brain.nii.gz',
        'brain_mask': 'preprocessed/anat/T1_brain_mask.nii.gz'
    }
    selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")

    # node to remove first volumes
    remove_vol = Node(util.Function(input_names=['in_file', 't_min'],
                                    output_names=["out_file"],
                                    function=strip_rois_func),
                      name='remove_vol')
    remove_vol.inputs.t_min = vol_to_remove

    # workflow for motion correction
    moco = create_moco_pipeline()

    # workflow for fieldmap correction and coregistration
    fmap_coreg = create_fmap_coreg_pipeline()
    fmap_coreg.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    fmap_coreg.inputs.inputnode.fs_subject_id = subject
    fmap_coreg.inputs.inputnode.echo_space = echo_space
    fmap_coreg.inputs.inputnode.te_diff = te_diff
    fmap_coreg.inputs.inputnode.pe_dir = pe_dir

    # workflow for applying transformations to timeseries
    transform_ts = create_transform_pipeline()
    transform_ts.inputs.inputnode.resolution = epi_resolution

    # workflow to denoise timeseries
    denoise = create_denoise_pipeline()
    denoise.inputs.inputnode.highpass_sigma = 1. / (2 * TR * highpass)
    denoise.inputs.inputnode.lowpass_sigma = 1. / (2 * TR * lowpass)
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4
    denoise.inputs.inputnode.tr = TR

    #sink to store files
    sink = Node(nio.DataSink(
        parameterization=False,
        base_directory=out_dir,
        substitutions=[
            ('fmap_phase_fslprepared', 'fieldmap'),
            ('fieldmap_fslprepared_fieldmap_unmasked_vsm', 'shiftmap'),
            ('plot.rest_coregistered', 'outlier_plot'),
            ('filter_motion_comp_norm_compcor_art_dmotion',
             'nuissance_matrix'),
            ('rest_realigned.nii.gz_abs.rms', 'rest_realigned_abs.rms'),
            ('rest_realigned.nii.gz.par', 'rest_realigned.par'),
            ('rest_realigned.nii.gz_rel.rms', 'rest_realigned_rel.rms'),
            ('rest_realigned.nii.gz_abs_disp', 'abs_displacement_plot'),
            ('rest_realigned.nii.gz_rel_disp', 'rel_displacment_plot'),
            ('art.rest_coregistered_outliers', 'outliers'),
            ('global_intensity.rest_coregistered', 'global_intensity'),
            ('norm.rest_coregistered', 'composite_norm'),
            ('stats.rest_coregistered', 'stats'),
            ('rest_denoised_bandpassed_norm.nii.gz',
             'rest_preprocessed.nii.gz')
        ]),
                name='sink')

    # connections
    func_preproc.connect([
        (selectfiles, remove_vol, [('func', 'in_file')]),
        (remove_vol, moco, [('out_file', 'inputnode.epi')]),
        (selectfiles, fmap_coreg, [('fmap_phase', 'inputnode.phase'),
                                   ('fmap_mag', 'inputnode.mag'),
                                   ('anat_head', 'inputnode.anat_head'),
                                   ('anat_brain', 'inputnode.anat_brain')]),
        (moco, fmap_coreg, [('outputnode.epi_mean', 'inputnode.epi_mean')]),
        (remove_vol, transform_ts, [('out_file', 'inputnode.orig_ts')]),
        (selectfiles, transform_ts, [('anat_head', 'inputnode.anat_head')]),
        (moco, transform_ts, [('outputnode.mat_moco', 'inputnode.mat_moco')]),
        (fmap_coreg, transform_ts, [('outputnode.fmap_fullwarp',
                                     'inputnode.fullwarp')]),
        (selectfiles, denoise, [('brain_mask', 'inputnode.brain_mask'),
                                ('anat_brain', 'inputnode.anat_brain')]),
        (moco, denoise, [('outputnode.par_moco', 'inputnode.moco_par')]),
        (fmap_coreg, denoise,
         [('outputnode.epi2anat_dat', 'inputnode.epi2anat_dat'),
          ('outputnode.unwarped_mean_epi2fmap', 'inputnode.unwarped_mean')]),
        (transform_ts, denoise, [('outputnode.trans_ts', 'inputnode.epi_coreg')
                                 ]),
        (
            moco,
            sink,
            [  #('outputnode.epi_moco', 'realign.@realigned_ts'),
                ('outputnode.par_moco', 'realign.@par'),
                ('outputnode.rms_moco', 'realign.@rms'),
                ('outputnode.mat_moco', 'realign.MAT.@mat'),
                ('outputnode.epi_mean', 'realign.@mean'),
                ('outputnode.rotplot', 'realign.plots.@rotplot'),
                ('outputnode.transplot', 'realign.plots.@transplot'),
                ('outputnode.dispplots', 'realign.plots.@dispplots'),
                ('outputnode.tsnr_file', 'realign.@tsnr')
            ]),
        (
            fmap_coreg,
            sink,
            [
                ('outputnode.fmap', 'coregister.transforms2anat.@fmap'),
                #('outputnode.unwarpfield_epi2fmap', 'coregister.@unwarpfield_epi2fmap'),
                ('outputnode.unwarped_mean_epi2fmap',
                 'coregister.@unwarped_mean_epi2fmap'),
                ('outputnode.epi2fmap', 'coregister.@epi2fmap'),
                #('outputnode.shiftmap', 'coregister.@shiftmap'),
                ('outputnode.fmap_fullwarp',
                 'coregister.transforms2anat.@fmap_fullwarp'),
                ('outputnode.epi2anat', 'coregister.@epi2anat'),
                ('outputnode.epi2anat_mat',
                 'coregister.transforms2anat.@epi2anat_mat'),
                ('outputnode.epi2anat_dat',
                 'coregister.transforms2anat.@epi2anat_dat'),
                ('outputnode.epi2anat_mincost', 'coregister.@epi2anat_mincost')
            ]),
        (
            transform_ts,
            sink,
            [  #('outputnode.trans_ts', 'coregister.@full_transform_ts'),
                ('outputnode.trans_ts_mean',
                 'coregister.@full_transform_mean'),
                ('outputnode.resamp_brain', 'coregister.@resamp_brain')
            ]),
        (denoise, sink,
         [('outputnode.wmcsf_mask', 'denoise.mask.@wmcsf_masks'),
          ('outputnode.combined_motion', 'denoise.artefact.@combined_motion'),
          ('outputnode.outlier_files', 'denoise.artefact.@outlier'),
          ('outputnode.intensity_files', 'denoise.artefact.@intensity'),
          ('outputnode.outlier_stats', 'denoise.artefact.@outlierstats'),
          ('outputnode.outlier_plots', 'denoise.artefact.@outlierplots'),
          ('outputnode.mc_regressor', 'denoise.regress.@mc_regressor'),
          ('outputnode.comp_regressor', 'denoise.regress.@comp_regressor'),
          ('outputnode.mc_F', 'denoise.regress.@mc_F'),
          ('outputnode.mc_pF', 'denoise.regress.@mc_pF'),
          ('outputnode.comp_F', 'denoise.regress.@comp_F'),
          ('outputnode.comp_pF', 'denoise.regress.@comp_pF'),
          ('outputnode.brain_mask_resamp', 'denoise.mask.@brain_resamp'),
          ('outputnode.brain_mask2epi', 'denoise.mask.@brain_mask2epi'),
          ('outputnode.normalized_file', '@normalized')])
    ])

    #func_preproc.write_graph(dotfilename='func_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    func_preproc.run()
Exemple #21
0
def create_preprocess_workflow(name,
                               work_dir,
                               sessions_file,
                               session_template,
                               scan_list,
                               fs_dir,
                               do_extract_inplane=True,
                               do_save_inplane=True,
                               do_align_to_anatomy=True,
                               do_align_qa=True,
                               do_save_align_qa=True,
                               do_save_strip=True,
                               do_save_align=True,
                               do_extract_functionals=True,
                               do_save_functionals=True,
                               do_within_run_align=True,
                               do_slice_timing_correction=True,
                               do_between_run_align=True,
                               do_merge_functionals=True,
                               do_within_subject_align=True,
                               do_save_merge=True):
    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    ##for each session
    sessions_info = ColumnData(sessions_file, dtype=str)
    sessions = pe.Node(interface=util.IdentityInterface(
        fields=['session_dir', 'subject_id', 'ref_vol']),
                       name='sessions')
    sessions.iterables = sessions_info.items()
    sessions.synchronize = True

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template),
                              name='get_session_dir')
    workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir, 'session_dir', datasink,
                     'base_directory')

    #extract inplane
    if do_extract_inplane:
        extract_inplane = create_extract_inplane_workflow()
        workflow.connect(get_session_dir, 'session_dir', extract_inplane,
                         'inputs.session_dir')
        workflow.connect(sessions, 'ref_vol', extract_inplane,
                         'inputs.ref_vol')

        if do_save_inplane:
            workflow.connect(extract_inplane, 'outputs.out_file', datasink,
                             'mri.@inplane')

            #align inplanes to anatomy
            if do_align_to_anatomy:
                get_anatomy = pe.Node(interface=nio.FreeSurferSource(),
                                      name='get_anatomy')
                get_anatomy.inputs.subjects_dir = fs_dir
                workflow.connect(sessions, 'subject_id', get_anatomy,
                                 'subject_id')

                align_to_anatomy = create_align_to_anatomy_workflow()
                workflow.connect(extract_inplane, 'outputs.out_file',
                                 align_to_anatomy, 'inputs.inplane_file')
                workflow.connect(get_anatomy, 'brain', align_to_anatomy,
                                 'inputs.anatomy_file')

                if do_align_qa:
                    align_qa = pe.Node(interface=nmutil.AlignmentQA(),
                                       name='align_qa')
                    workflow.connect(get_anatomy, 'brain', align_qa,
                                     'target_file')
                    workflow.connect(align_to_anatomy, 'outputs.strip_file',
                                     align_qa, 'source_file')
                    workflow.connect(align_to_anatomy, 'outputs.xfm_file',
                                     align_qa, 'reg_file')

                    if do_save_align_qa:
                        workflow.connect(align_qa, 'out_file', datasink,
                                         'qa.inplane_to_anatomy')

                if do_save_strip:
                    workflow.connect(align_to_anatomy, 'outputs.strip_file',
                                     datasink, 'mri.@inplane.@strip')

                if do_save_align:
                    workflow.connect(align_to_anatomy, 'outputs.xfm_file',
                                     datasink,
                                     'mri.transforms.@inplane_to_anatomy')

    if do_extract_functionals:
        ##for each functional
        scans = pe.Node(interface=util.IdentityInterface(fields=['scan']),
                        name='scans')
        scans.iterables = ('scan', scan_list)

        #extract functionals
        extract_functional = create_extract_functional_workflow()
        workflow.connect(get_session_dir, 'session_dir', extract_functional,
                         'inputs.session_dir')
        workflow.connect(scans, 'scan', extract_functional, 'inputs.scan')
        last_node = extract_functional

        #simultaneous slicing timing and motion correction
        if do_within_run_align:
            within_run_align = create_within_run_align_workflow(
                slice_timing_correction=do_slice_timing_correction)
            workflow.connect(last_node, 'outputs.out_file', within_run_align,
                             'inputs.in_file')
            last_node = within_run_align

        ##with all functionals
        join_functionals = pe.JoinNode(
            interface=util.IdentityInterface(fields=['functionals']),
            name='join_functionals',
            joinsource='scans')

        workflow.connect(last_node, 'outputs.out_file', join_functionals,
                         'functionals')

        #between run align
        if do_between_run_align:
            between_run_align = create_between_run_align_workflow()
            workflow.connect(join_functionals, 'functionals',
                             between_run_align, 'inputs.in_files')
            workflow.connect(sessions, 'ref_vol', between_run_align,
                             'inputs.ref_vol')

            workflow.connect(between_run_align, 'outputs.out_files', datasink,
                             'mri.@functionals')

            #merge functionals
            if do_merge_functionals:
                merge_functionals = pe.Node(interface=fsl.Merge(),
                                            name='merge_functionals')
                merge_functionals.inputs.dimension = 't'
                format_string = 'f'
                rename_merged = pe.Node(interface=util.Rename(format_string),
                                        name='rename_merged')
                rename_merged.inputs.keep_ext = True
                workflow.connect(between_run_align, 'outputs.out_files',
                                 merge_functionals, 'in_files')
                workflow.connect(merge_functionals, 'merged_file',
                                 rename_merged, 'in_file')

            if do_save_merge:
                workflow.connect(rename_merged, 'out_file', datasink,
                                 'mri.@functionals.@merged')

    return workflow
def run_workflow(csv_file, res_fld, contrasts_name, RegSpace,
                 motion_outliers_type):
    # Define outputfolder
    if res_fld == 'use_csv':
        # get a unique label, derived from csv name
        csv_stem = get_csv_stem(csv_file)
        out_label = csv_stem.replace('-', '_')  # replace - with _
    else:
        out_label = res_fld
    workflow = pe.Workflow(name='run_level2flow_' + out_label)
    workflow.base_dir = os.path.abspath('./workingdirs')

    from nipype import config, logging
    config.update_config({
        'logging': {
            'log_directory': os.path.join(workflow.base_dir, 'logs'),
            'log_to_file': True,
            # 'workflow_level': 'DEBUG', #  << massive output
            # 'interface_level': 'DEBUG', #  << massive output
            'workflow_level': 'INFO',
            'interface_level': 'INFO',
        }
    })
    logging.update_logging(config)
    #config.enable_debug_mode()

    # redundant with enable_debug_mode() ...
    workflow.stop_on_first_crash = True
    workflow.remove_unnecessary_outputs = False
    workflow.keep_inputs = True
    workflow.hash_method = 'content'

    modelfit = create_workflow(out_label, contrasts_name, RegSpace)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'run_id',
        'refsubject_id',
    ]),
                        name='input')

    assert csv_file is not None, "--csv argument must be defined!"

    if csv_file is not None:
        # Read csv and use pandas to set-up image and ev-processing
        df = pd.read_csv(csv_file)
        # init lists
        sub_img = []
        ses_img = []
        run_img = []
        ref_img = []

        # fill lists to iterate mapnodes
        for index, row in df.iterrows():
            for r in row.run.strip("[]").split(" "):
                sub_img.append(row.subject)
                ses_img.append(row.session)
                run_img.append(r)
                if 'refsubject' in df.columns:
                    if row.refsubject == 'nan':
                        # empty field
                        ref_img.append(row.subject)
                    else:
                        # non-empty field
                        ref_img.append(row.refsubject)
                else:
                    ref_img.append(row.subject)

        inputnode.iterables = [
            ('subject_id', sub_img),
            ('session_id', ses_img),
            ('run_id', run_img),
            ('refsubject_id', ref_img),
        ]
        inputnode.synchronize = True
    else:
        print("No csv-file specified. Don't know what data to process.")

    # Registration space determines which files to use
    if RegSpace == 'nmt':
        # use the warped files
        maskfld = 'transforms'
        maskfn = 'func2nmt_mask_res-1x1x1.nii.gz'
    elif RegSpace == 'native':
        # use the functional files
        maskfld = 'func'
        maskfn = 'ref_func_mask_res-1x1x1.nii.gz'
    else:
        raise RuntimeError('ERROR - Unknown reg-space "%s"' % RegSpace)

    if motion_outliers_type == 'selected':
        mofmod = 'selected/'
    elif motion_outliers_type == 'single' or motion_outliers_type == 'merged':
        mofmod = 'mo-' + motion_outliers_type + '/'
    else:
        raise RuntimeError('ERROR - Unknown motion_outliers_type "%s"' %
                           motion_outliers_type)

    templates = {
        'ref_funcmask':  # was: manualmask
        'reference-vols/sub-{refsubject_id}/' + maskfld +
        '/sub-{subject_id}_' + maskfn,
        'copes':
        'derivatives/modelfit/' + contrasts_name +'/' + RegSpace + '/level1/'
        + mofmod + 'sub-{subject_id}/ses-{session_id}/run-{run_id}/copes/cope*.nii.gz',
        'dof_file':
        'derivatives/modelfit/' + contrasts_name +'/' + RegSpace + '/level1/'
        + mofmod + 'sub-{subject_id}/ses-{session_id}/run-{run_id}/dof_files/dof',
        'roi_file':
        'derivatives/modelfit/' + contrasts_name +'/' + RegSpace + '/level1/'
        + mofmod + 'sub-{subject_id}/ses-{session_id}/run-{run_id}/roi_file/*.nii.gz',
        'varcopes':
        'derivatives/modelfit/' + contrasts_name +'/' + RegSpace + '/level1/'
        + mofmod + 'sub-{subject_id}/ses-{session_id}/run-{run_id}/varcopes/varcope*.nii.gz',
    }

    inputfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_dir),
                         name='in_files')

    workflow.connect([
        (inputnode, inputfiles, [
            ('subject_id', 'subject_id'),
            ('session_id', 'session_id'),
            ('refsubject_id', 'refsubject_id'),
            ('run_id', 'run_id'),
        ]),
    ])

    join_input = pe.JoinNode(
        niu.IdentityInterface(fields=[
            'copes',
            'dof_file',
            'varcopes',
            'ref_funcmask',
        ]),
        joinsource='input',
        joinfield=[
            'copes',
            'dof_file',
            'varcopes',
        ],
        # unique=True,
        name='join_input')

    workflow.connect([
        (inputfiles, join_input, [
            ('copes', 'copes'),
            ('dof_file', 'dof_file'),
            ('varcopes', 'varcopes'),
            ('ref_funcmask', 'ref_funcmask'),
        ]),
        (join_input, modelfit, [
            ('copes', 'inputspec.copes'),
            ('dof_file', 'inputspec.dof_file'),
            ('varcopes', 'inputspec.varcopes'),
            ('ref_funcmask', 'inputspec.ref_funcmask'),
        ]),
    ])

    workflow.workflow_level = 'INFO'  # INFO/DEBUG
    # workflow.stop_on_first_crash = True
    workflow.keep_inputs = True
    workflow.remove_unnecessary_outputs = False
    workflow.write_graph(simple_form=True)
    workflow.write_graph(graph2use='colored', format='png', simple_form=True)
    workflow.run()

    # copy csv file for convenience
    basedir = './derivatives/modelfit/' + contrasts_name + '/' + RegSpace + '/level2/' + out_label
    shutil.copyfile(csv_file, basedir + '/included_files.csv')
Exemple #23
0
'''workflow'''
if __name__ == '__main__':
    wf = pe.Workflow(name="map_to_surface")
    wf.base_dir = workingDir
    wf.config['execution']['crashdump_dir'] = wf.base_dir + "/crash_files"

    infosource = pe.Node(util.IdentityInterface(fields=['subject_id', 'scan', 'hemi']), name="infosource")
    infosource.iterables = [('subject_id', subjects),
                            ('scan', ['1a', '1b', '2a', '2b']),
                            ('hemi', ['lh', 'rh'])]


    selectfiles_templates = {'resting_lsd': '{subject_id}/preprocessed/lsd_resting/rest{scan}/rest_preprocessed.nii.gz'}


    selectfiles = pe.Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocDir),
                       name="selectfiles")
    
    
    vol2surf = pe.Node(SampleToSurface(subjects_dir=freesurferDir,
                                       target_subject='fsaverage5',
                                       args='--surfreg sphere.reg',
                                       reg_header=True,
                                       cortex_mask=True,
                                       sampling_method="average",
                                       sampling_range=(0.2, 0.8, 0.1),
                                       sampling_units="frac",
                                       smooth_surf=6.0),
                       name='vol2surf_lsd')

    def gen_out_file(subject_id, scan, hemi):
def min_func_preproc(subject, sessions, data_dir, fs_dir, wd, sink, TR,
                     EPI_resolution):

    #initiate min func preproc workflow
    wf = pe.Workflow(name='MPP')
    wf.base_dir = wd
    wf.config['execution']['crashdump_dir'] = wf.base_dir + "/crash_files"

    ## set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # I/O nodes
    inputnode = pe.Node(util.IdentityInterface(fields=['subjid', 'fs_dir']),
                        name='inputnode')
    inputnode.inputs.subjid = subject
    inputnode.inputs.fs_dir = fs_dir

    ds = pe.Node(nio.DataSink(base_directory=sink, parameterization=False),
                 name='sink')

    ds.inputs.substitutions = [('moco.nii.gz.par', 'moco.par'),
                               ('moco.nii.gz_', 'moco_')]

    #infosource to interate over sessions: COND, EXT1, EXT2
    sessions_infosource = pe.Node(util.IdentityInterface(fields=['session']),
                                  name='session')
    sessions_infosource.iterables = [('session', sessions)]

    #select files
    templates = {
        'func_data': '{session}/func_data.nii.gz',
        'T1_brain': 'T1/T1_brain.nii.gz',
        'wmedge': 'T1/MASKS/aparc_aseg.WMedge.nii.gz'
    }

    selectfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_dir),
                          name='selectfiles')

    wf.connect(sessions_infosource, 'session', selectfiles, 'session')
    wf.connect(sessions_infosource, 'session', ds, 'container')

    ##########################################################################
    ########################    START   ######################################
    ##########################################################################

    ###########################################################################
    ########################    No. 3   ######################################

    #change the data type to float
    fsl_float = pe.Node(fsl.maths.MathsCommand(output_datatype='float'),
                        name='fsl_float')

    wf.connect(selectfiles, 'func_data', fsl_float, 'in_file')

    ###########################################################################
    ########################    No. 4   ######################################

    #get FD from fsl_motion_outliers
    FD = pe.Node(fsl.MotionOutliers(out_file='func_data_FD_outliers.txt',
                                    out_metric_values='func_data_FD.txt',
                                    metric='fd'),
                 name='FD')

    wf.connect(fsl_float, 'out_file', FD, 'in_file')
    wf.connect(FD, 'out_metric_values', ds, 'QC.@FD')
    wf.connect(FD, 'out_file', ds, 'QC.@FDoutliers')

    ###########################################################################
    ########################    No. 5   ######################################

    #slice timing correction: sequential ascending
    slicetimer = pe.Node(
        fsl.SliceTimer(
            index_dir=False,
            interleaved=False,
            #slice_direction=3, #z direction
            time_repetition=TR,
            out_file='func_data_stc.nii.gz'),
        name='slicetimer')

    wf.connect(fsl_float, 'out_file', slicetimer, 'in_file')
    wf.connect(slicetimer, 'slice_time_corrected_file', ds, 'TEMP.@slicetimer')

    ###########################################################################
    ########################    No. 6   ######################################
    #do realignment to the middle or first volume
    mcflirt = pe.Node(fsl.MCFLIRT(save_mats=True,
                                  save_plots=True,
                                  save_rms=True,
                                  ref_vol=1,
                                  out_file='func_data_stc_moco.nii.gz'),
                      name='mcflirt')

    wf.connect(slicetimer, 'slice_time_corrected_file', mcflirt, 'in_file')
    wf.connect(mcflirt, 'out_file', ds, 'TEMP.@mcflirt')
    wf.connect(mcflirt, 'par_file', ds, 'MOCO.@par_file')
    wf.connect(mcflirt, 'rms_files', ds, 'MOCO.@rms_files')
    wf.connect(mcflirt, 'mat_file', ds, 'MOCO_MAT.@mcflirt')

    # plot motion parameters
    rotplotter = pe.Node(fsl.PlotMotionParams(in_source='fsl',
                                              plot_type='rotations',
                                              out_file='rotation.png'),
                         name='rotplotter')

    transplotter = pe.Node(fsl.PlotMotionParams(in_source='fsl',
                                                plot_type='translations',
                                                out_file='translation.png'),
                           name='transplotter')

    dispplotter = pe.Node(
        interface=fsl.PlotMotionParams(in_source='fsl',
                                       plot_type='displacement',
                                       out_file='displacement.png'),
        name='dispplotter')

    wf.connect(mcflirt, 'par_file', rotplotter, 'in_file')
    wf.connect(mcflirt, 'par_file', transplotter, 'in_file')
    wf.connect(mcflirt, 'rms_files', dispplotter, 'in_file')
    wf.connect(rotplotter, 'out_file', ds, 'PLOTS.@rotplot')
    wf.connect(transplotter, 'out_file', ds, 'PLOTS.@transplot')
    wf.connect(dispplotter, 'out_file', ds, 'PLOTS.@disppplot')

    #calculate tSNR and the mean

    moco_Tmean = pe.Node(fsl.maths.MathsCommand(args='-Tmean',
                                                out_file='moco_Tmean.nii.gz'),
                         name='moco_Tmean')

    moco_Tstd = pe.Node(fsl.maths.MathsCommand(args='-Tstd',
                                               out_file='moco_Tstd.nii.gz'),
                        name='moco_Tstd')

    tSNR0 = pe.Node(fsl.maths.MultiImageMaths(op_string='-div %s',
                                              out_file='moco_tSNR.nii.gz'),
                    name='moco_tSNR')

    wf.connect(mcflirt, 'out_file', moco_Tmean, 'in_file')
    wf.connect(mcflirt, 'out_file', moco_Tstd, 'in_file')
    wf.connect(moco_Tmean, 'out_file', tSNR0, 'in_file')
    wf.connect(moco_Tstd, 'out_file', tSNR0, 'operand_files')
    wf.connect(moco_Tmean, 'out_file', ds, 'TEMP.@moco_Tmean')
    wf.connect(moco_Tstd, 'out_file', ds, 'TEMP.@moco_Tstd')
    wf.connect(tSNR0, 'out_file', ds, 'TEMP.@moco_Tsnr')

    ###########################################################################
    ########################    No. 7   ######################################

    #bias field correction of mean epi for better coregistration
    bias = pe.Node(
        fsl.FAST(
            img_type=2,
            #restored_image='epi_Tmeanrestored.nii.gz',
            output_biascorrected=True,
            out_basename='moco_Tmean',
            no_pve=True,
            probability_maps=False),
        name='bias')

    wf.connect(moco_Tmean, 'out_file', bias, 'in_files')
    wf.connect(bias, 'restored_image', ds, 'TEMP.@restored_image')

    #co-registration to anat using FS BBregister and mean EPI
    bbregister = pe.Node(fs.BBRegister(
        subject_id=subject,
        subjects_dir=fs_dir,
        contrast_type='t2',
        init='fsl',
        out_fsl_file='func2anat.mat',
        out_reg_file='func2anat.dat',
        registered_file='moco_Tmean_restored2anat.nii.gz',
        epi_mask=True),
                         name='bbregister')

    wf.connect(bias, 'restored_image', bbregister, 'source_file')
    wf.connect(bbregister, 'registered_file', ds, 'TEMP.@registered_file')
    wf.connect(bbregister, 'out_fsl_file', ds, 'COREG.@out_fsl_file')
    wf.connect(bbregister, 'out_reg_file', ds, 'COREG.@out_reg_file')
    wf.connect(bbregister, 'min_cost_file', ds, 'COREG.@min_cost_file')

    #inverse func2anat mat
    inverseXFM = pe.Node(fsl.ConvertXFM(invert_xfm=True,
                                        out_file='anat2func.mat'),
                         name='inverseXFM')

    wf.connect(bbregister, 'out_fsl_file', inverseXFM, 'in_file')
    wf.connect(inverseXFM, 'out_file', ds, 'COREG.@out_fsl_file_inv')

    #plot the corregistration quality
    slicer = pe.Node(fsl.Slicer(middle_slices=True, out_file='func2anat.png'),
                     name='slicer')

    wf.connect(selectfiles, 'wmedge', slicer, 'image_edges')
    wf.connect(bbregister, 'registered_file', slicer, 'in_file')
    wf.connect(slicer, 'out_file', ds, 'PLOTS.@func2anat')

    ###########################################################################
    ########################    No. 8   ######################################
    #MOCO and COREGISTRATION

    #resample T1 to EPI resolution to use it as a reference image
    resample_T1 = pe.Node(
        fsl.FLIRT(datatype='float',
                  apply_isoxfm=EPI_resolution,
                  out_file='T1_brain_EPI.nii.gz'),
        #interp='nearestneighbour'),keep spline so it looks nicer
        name='resample_T1')

    wf.connect(selectfiles, 'T1_brain', resample_T1, 'in_file')
    wf.connect(selectfiles, 'T1_brain', resample_T1, 'reference')
    wf.connect(resample_T1, 'out_file', ds, 'COREG.@resample_T1')

    #concate matrices (moco and func2anat) volume-wise
    concat_xfm = pe.MapNode(fsl.ConvertXFM(concat_xfm=True),
                            iterfield=['in_file'],
                            name='concat_xfm')

    wf.connect(mcflirt, 'mat_file', concat_xfm, 'in_file')
    wf.connect(bbregister, 'out_fsl_file', concat_xfm, 'in_file2')
    wf.connect(concat_xfm, 'out_file', ds, 'MOCO2ANAT_MAT.@concat_out')

    #split func_data
    split = pe.Node(fsl.Split(dimension='t'), name='split')

    wf.connect(slicetimer, 'slice_time_corrected_file', split, 'in_file')

    #motion correction and corregistration in one interpolation step
    flirt = pe.MapNode(fsl.FLIRT(apply_xfm=True,
                                 interp='spline',
                                 datatype='float'),
                       iterfield=['in_file', 'in_matrix_file'],
                       name='flirt')

    wf.connect(split, 'out_files', flirt, 'in_file')
    wf.connect(resample_T1, 'out_file', flirt, 'reference')
    wf.connect(concat_xfm, 'out_file', flirt, 'in_matrix_file')

    #merge the files to have 4d dataset motion corrected and co-registerd to T1
    merge = pe.Node(fsl.Merge(dimension='t',
                              merged_file='func_data_stc_moco2anat.nii.gz'),
                    name='merge')

    wf.connect(flirt, 'out_file', merge, 'in_files')
    wf.connect(merge, 'merged_file', ds, 'TEMP.@merged')

    ###########################################################################
    ########################    No. 9   ######################################

    #run BET on co-registered EPI in 1mm and get the mask
    bet = pe.Node(fsl.BET(mask=True,
                          functional=True,
                          out_file='moco_Tmean_restored2anat_BET.nii.gz'),
                  name='bet')

    wf.connect(bbregister, 'registered_file', bet, 'in_file')
    wf.connect(bet, 'out_file', ds, 'TEMP.@func_data_example')
    wf.connect(bet, 'mask_file', ds, 'TEMP.@func_data_mask')

    #resample BET mask to EPI resolution
    resample_mask = pe.Node(fsl.FLIRT(
        datatype='int',
        apply_isoxfm=EPI_resolution,
        interp='nearestneighbour',
        out_file='prefiltered_func_data_mask.nii.gz'),
                            name='resample_mask')

    wf.connect(bet, 'mask_file', resample_mask, 'in_file')
    wf.connect(resample_T1, 'out_file', resample_mask, 'reference')
    wf.connect(resample_mask, 'out_file', ds, '@mask')

    #apply the mask to 4D data to get rid of the "eyes and the rest"
    mask4D = pe.Node(fsl.maths.ApplyMask(), name='mask')

    wf.connect(merge, 'merged_file', mask4D, 'in_file')
    wf.connect(resample_mask, 'out_file', mask4D, 'mask_file')

    ###########################################################################
    ########################    No. 10   ######################################

    #get the values necessary for intensity normalization
    median = pe.Node(fsl.utils.ImageStats(op_string='-k %s -p 50'),
                     name='median')

    wf.connect(resample_mask, 'out_file', median, 'mask_file')
    wf.connect(mask4D, 'out_file', median, 'in_file')

    #compute the scaling factor
    def get_factor(val):

        factor = 10000 / val
        return factor

    get_scaling_factor = pe.Node(util.Function(input_names=['val'],
                                               output_names=['out_val'],
                                               function=get_factor),
                                 name='scaling_factor')

    #normalize the 4D func data with one scaling factor
    multiplication = pe.Node(fsl.maths.BinaryMaths(
        operation='mul', out_file='prefiltered_func_data.nii.gz'),
                             name='multiplication')

    wf.connect(median, 'out_stat', get_scaling_factor, 'val')
    wf.connect(get_scaling_factor, 'out_val', multiplication, 'operand_value')
    wf.connect(mask4D, 'out_file', multiplication, 'in_file')
    wf.connect(multiplication, 'out_file', ds, '@prefiltered_func_data')

    ###########################################################################
    ########################    No. 11   ######################################

    #calculate tSNR and the mean of the new prefiltered and detrend dataset

    tsnr_detrend = pe.Node(misc.TSNR(
        regress_poly=1,
        detrended_file='prefiltered_func_data_detrend.nii.gz',
        mean_file='prefiltered_func_data_detrend_Tmean.nii.gz',
        tsnr_file='prefiltered_func_data_detrend_tSNR.nii.gz'),
                           name='tsnr_detrend')

    wf.connect(multiplication, 'out_file', tsnr_detrend, 'in_file')
    wf.connect(tsnr_detrend, 'tsnr_file', ds, 'QC.@tsnr_detrend')
    wf.connect(tsnr_detrend, 'mean_file', ds, 'QC.@detrend_mean_file')
    wf.connect(tsnr_detrend, 'detrended_file', ds, '@detrend_file')

    #resample the EPI mask to original EPI dimensions
    convert2func = pe.Node(fsl.FLIRT(apply_xfm=True,
                                     interp='nearestneighbour',
                                     out_file='func_data_mask2func.nii.gz'),
                           name='conver2func')

    wf.connect(resample_mask, 'out_file', convert2func, 'in_file')
    wf.connect(bias, 'restored_image', convert2func, 'reference')
    wf.connect(inverseXFM, 'out_file', convert2func, 'in_matrix_file')
    wf.connect(convert2func, 'out_file', ds, 'QC.@inv')

    ###########################################################################
    ########################    RUN   ######################################
    wf.write_graph(dotfilename='wf.dot',
                   graph2use='colored',
                   format='pdf',
                   simple_form=True)
    wf.run(plugin='MultiProc', plugin_args={'n_procs': 2})
    #wf.run()
    return
def run_workflow(session, csv_file, use_pbs, stop_on_first_crash,
                 ignore_events):
    import bids_templates as bt

    from nipype import config
    config.enable_debug_mode()

    # ------------------ Specify variables
    ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

    data_dir = ds_root
    output_dir = ''
    working_dir = 'workingdirs/minimal_processing'

    # ------------------ Input Files
    infosource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
    ]),
                      name="infosource")

    if csv_file is not None:
        reader = niu.CSVReader()
        reader.inputs.header = True
        reader.inputs.in_file = csv_file
        out = reader.run()
        subject_list = out.outputs.subject
        session_list = out.outputs.session
        infosource.iterables = [
            ('session_id', session_list),
            ('subject_id', subject_list),
        ]
        if 'run' in out.outputs.traits().keys():
            print('Ignoring the "run" field of %s.' % csv_file)

        infosource.synchronize = True
    else:  # neglected code
        if session is not None:
            session_list = [session]  # ['20170511']
        else:
            session_list = bt.session_list  # ['20170511']

        infosource.iterables = [
            ('session_id', session_list),
            ('subject_id', bt.subject_list),
        ]

    process_images = True

    if process_images:
        datatype_list = bt.datatype_list

        imgsource = Node(IdentityInterface(fields=[
            'subject_id',
            'session_id',
            'datatype',
        ]),
                         name="imgsource")
        imgsource.iterables = [
            ('session_id', session_list),
            ('subject_id', subject_list),
            ('datatype', datatype_list),
        ]

        # SelectFiles
        imgfiles = Node(nio.SelectFiles(
            {
                'images': 'sourcedata/%s' % bt.templates['images'],
            },
            base_directory=data_dir),
                        name="img_files")

    evsource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
    ]),
                    name="evsource")
    evsource.iterables = [
        ('session_id', session_list),
        ('subject_id', subject_list),
    ]
    evfiles = Node(nio.SelectFiles(
        {
            'csv_eventlogs':
            'sourcedata/sub-{subject_id}/ses-{session_id}/func/'
            'sub-{subject_id}_ses-{session_id}_*events/Log_*_eventlog.csv',
            'stim_dir':
            'sourcedata/sub-{subject_id}/ses-{session_id}/func/'
            'sub-{subject_id}_ses-{session_id}_*events/',
        },
        base_directory=data_dir),
                   name="evfiles")

    # ------------------ Output Files
    # Datasink
    outputfiles = Node(nio.DataSink(base_directory=ds_root,
                                    container=output_dir,
                                    parameterization=True),
                       name="output_files")

    # Use the following DataSink output substitutions
    outputfiles.inputs.substitutions = [('subject_id_', 'sub-'),
                                        ('session_id_', 'ses-'),
                                        ('/minimal_processing/', '/'),
                                        ('_out_reoriented.nii.gz', '.nii.gz')]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_datatype_([a-z]*)_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)',
         r'sub-\3/ses-\2/\1'),
        (r'/_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)', r'/sub-\2/ses-\1/'),
        (r'/_ro[0-9]+/', r'/'),
        (r'/_csv2tsv[0-9]+/', r'/func/'),
    ]

    # -------------------------------------------- Create Pipeline
    workflow = Workflow(name='wrapper',
                        base_dir=os.path.join(ds_root, working_dir))

    if process_images:
        workflow.connect([(imgsource, imgfiles, [
            ('subject_id', 'subject_id'),
            ('session_id', 'session_id'),
            ('datatype', 'datatype'),
        ])])

    workflow.connect([
        (evsource, evfiles, [
            ('subject_id', 'subject_id'),
            ('session_id', 'session_id'),
        ]),
    ])

    if process_images:
        minproc = create_images_workflow()
        workflow.connect(imgfiles, 'images', minproc, 'in.images')
        workflow.connect(minproc, 'out.images', outputfiles,
                         'minimal_processing.@images')

    if not ignore_events:
        csv2tsv = MapNode(ConvertCSVEventLog(),
                          iterfield=['in_file', 'stim_dir'],
                          name='csv2tsv')
        workflow.connect(evfiles, 'csv_eventlogs', csv2tsv, 'in_file')
        workflow.connect(evfiles, 'stim_dir', csv2tsv, 'stim_dir')
        workflow.connect(csv2tsv, 'out_file', outputfiles,
                         'minimal_processing.@eventlogs')

    workflow.stop_on_first_crash = stop_on_first_crash
    workflow.keep_inputs = True
    workflow.remove_unnecessary_outputs = False
    workflow.write_graph()
    #workflow.run(plugin='MultiProc', plugin_args={'n_procs' : 10})
    workflow.run()
Exemple #26
0
                        name='subject_infosource')
subject_infosource.iterables=('subject_id', subjects)

# infosource to iterate over scans
scan_infosource=Node(util.IdentityInterface(fields=['scan']),
                        name='scan_infosource')
scan_infosource.iterables=('scan', scans)

# select files
templates_1={'tsnr': '{subject_id}/preprocessed/lsd_resting/{scan}/realign/*tsnr.nii.gz',
           'anat_resamp' : '{subject_id}/preprocessed/lsd_resting/{scan}/coregister/T1_resampled.nii.gz',
           #'affine': '{subject_id}/preprocessed/anat/transforms2mni/transform0GenericAffine.mat',
           #'warp': '{subject_id}/preprocessed/anat/transforms2mni/transform1Warp.nii.gz',
           'func_warp' : '{subject_id}/preprocessed/lsd_resting/{scan}/coregister/transforms2anat/fullwarpfield.nii.gz'
           }
selectfiles_1 = Node(nio.SelectFiles(templates_1,
                                   base_directory=afs_dir),
                   name="selectfiles_1")

mni.connect([(subject_infosource, selectfiles_1, [('subject_id', 'subject_id')]),
             (scan_infosource, selectfiles_1, [('scan', 'scan')])])

# select files
templates_2={ 'affine': '{subject_id}/preprocessed/anat/transforms2mni/transform0GenericAffine.mat',
             'warp': '{subject_id}/preprocessed/anat/transforms2mni/transform1Warp.nii.gz',
           }
selectfiles_2 = Node(nio.SelectFiles(templates_2,
                                   base_directory=ilz_dir),
                   name="selectfiles_2")

mni.connect([(subject_infosource, selectfiles_2, [('subject_id', 'subject_id')])])
Exemple #27
0
def calc_local_metrics(brain_mask, preprocessed_data_dir, subject_id,
                       parcellations_dict, bp_freq_list, TR,
                       selectfiles_templates, working_dir, ds_dir, use_n_procs,
                       plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    from nipype.interfaces.freesurfer.preprocess import MRIConvert

    import CPAC.alff.alff as cpac_alff
    import CPAC.reho.reho as cpac_reho
    import CPAC.utils.utils as cpac_utils

    import utils as calc_metrics_utils
    from motion import calculate_FD_P, calculate_FD_J

    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    wf = Workflow(name='LeiCA_LIFE_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'),
                      execution={
                          'stop_on_first_crash': True,
                          'remove_unnecessary_outputs': True,
                          'job_finished_timeout': 15
                      })
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(
        working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.regexp_substitutions = [
        ('MNI_resampled_brain_mask_calc.nii.gz', 'falff.nii.gz'),
        ('residual_filtered_3dT.nii.gz', 'alff.nii.gz'),
        ('_parcellation_', ''),
        ('_bp_freqs_', 'bp_'),
    ]

    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(
        util.IdentityInterface(fields=['parcellation']),
        name='parcellation_infosource')
    parcellation_infosource.iterables = ('parcellation',
                                         parcellations_dict.keys())

    # BP FILTER ITERATOR
    bp_filter_infosource = Node(util.IdentityInterface(fields=['bp_freqs']),
                                name='bp_filter_infosource')
    bp_filter_infosource.iterables = ('bp_freqs', bp_freq_list)

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name='selectfiles')
    selectfiles.inputs.subject_id = subject_id

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############
    parcellated_ts = Node(util.Function(
        input_names=[
            'in_data', 'parcellation_name', 'parcellations_dict', 'bp_freqs',
            'tr'
        ],
        output_names=[
            'parcellation_time_series', 'parcellation_time_series_file',
            'masker_file'
        ],
        function=calc_metrics_utils.extract_parcellation_time_series),
                          name='parcellated_ts')

    parcellated_ts.inputs.parcellations_dict = parcellations_dict
    parcellated_ts.inputs.tr = TR
    wf.connect(selectfiles, 'epi_MNI_fullspectrum', parcellated_ts, 'in_data')
    wf.connect(parcellation_infosource, 'parcellation', parcellated_ts,
               'parcellation_name')
    wf.connect(bp_filter_infosource, 'bp_freqs', parcellated_ts, 'bp_freqs')

    ##############
    ## get conmat
    ##############
    con_mat = Node(util.Function(
        input_names=['in_data', 'extraction_method'],
        output_names=['matrix', 'matrix_file'],
        function=calc_metrics_utils.calculate_connectivity_matrix),
                   name='con_mat')
    con_mat.inputs.extraction_method = 'correlation'
    wf.connect(parcellated_ts, 'parcellation_time_series', con_mat, 'in_data')

    ##############
    ## ds
    ##############

    wf.connect(parcellated_ts, 'parcellation_time_series_file', ds,
               'con_mat.parcellated_time_series.@parc_ts')
    wf.connect(parcellated_ts, 'masker_file', ds,
               'con_mat.parcellated_time_series.@masker')
    wf.connect(con_mat, 'matrix_file', ds, 'con_mat.matrix.@mat')

    wf.write_graph(dotfilename=wf.name, graph2use='colored',
                   format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name,
               plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
import nipype.interfaces.dipy as dipy
import nipype.interfaces.fsl as fsl
import nipype.algorithms.confounds as confounds
import nipype.interfaces.ants as ants
import nipype.interfaces.camino as camino
import nipype.interfaces.mrtrix as mrtrix

#Generic datasink module to store structured outputs
io_DataSink = pe.Node(interface=io.DataSink(), name='io_DataSink')

#BIDS datagrabber module that wraps around pybids to allow arbitrary
io_BIDSDataGrabber = pe.Node(interface=io.BIDSDataGrabber(),
                             name='io_BIDSDataGrabber')

#Flexibly collect data from disk to feed into workflows.
io_SelectFiles = pe.Node(io.SelectFiles(templates={}), name='io_SelectFiles')

#Use spm_realign for estimating within modality rigid body alignment
spm_Realign = pe.Node(interface=spm.Realign(), name='spm_Realign')

#Use spm_smooth for 3D Gaussian smoothing of image volumes.
spm_Smooth = pe.Node(interface=spm.Smooth(), name='spm_Smooth')

#Use spm_coreg for estimating cross-modality rigid body alignment
spm_Coregister = pe.Node(interface=spm.Coregister(), name='spm_Coregister')

#Wraps the executable command ``3dSkullStrip``.
afni_SkullStrip = pe.Node(interface=afni.SkullStrip(), name='afni_SkullStrip')

#Wraps the executable command ``3dretroicor``.
afni_Retroicor = pe.Node(interface=afni.Retroicor(), name='afni_Retroicor')
def create_workflow():
    featpreproc = pe.Workflow(name="featpreproc")

    featpreproc.base_dir = os.path.join(ds_root, 'workingdirs')

    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'funcs',
        'subject_id',
        'session_id',
        'fwhm',  # smoothing
        'highpass'
    ]), name="inputspec")

    # SelectFiles
    templates = {
        'ref_manual_fmapmask':  # was: manual_fmapmask
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_manualmask.nii.gz',

        'ref_fmap_magnitude':
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_reference.nii.gz',

        'ref_fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        # 'manualweights':
        # 'manual-masks/sub-eddy/ses-20170511/func/'
        #     'sub-eddy_ses-20170511_task-curvetracing_run-01_frame-50_bold'
        #     '_res-1x1x1_manualweights.nii.gz',

        'ref_func':  # was: manualmask_func_ref
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_reference.nii.gz',

        'ref_funcmask':  # was: manualmask
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_mask.nii.gz',

        'ref_t1':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_reference.nii.gz',

        'ref_t1mask':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_manualmask.nii.gz',

        # 'funcs':
        # 'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/func/'
        #     # 'sub-{subject_id}_ses-{session_id}*_bold_res-1x1x1_preproc'
        #     'sub-{subject_id}_ses-{session_id}*run-01_bold_res-1x1x1_preproc'
        #     # '.nii.gz',
        #     '_nvol10.nii.gz',

        'fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        'fmap_magnitude':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_magnitude1_res-1x1x1_preproc'
            '.nii.gz',

        # 'fmap_mask':
        # 'transformed-manual-fmap-mask/sub-{subject_id}/ses-{session_id}/fmap/'
        #     'sub-{subject_id}_ses-{session_id}_'
        #     'magnitude1_res-1x1x1_preproc.nii.gz',
    }

    inputfiles = pe.Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), name="input_files")

    featpreproc.connect(
        [(inputnode, inputfiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ])])

    # ===================================================================
    #                   ____        _               _
    #                  / __ \      | |             | |
    #                 | |  | |_   _| |_ _ __  _   _| |_
    #                 | |  | | | | | __| '_ \| | | | __|
    #                 | |__| | |_| | |_| |_) | |_| | |_
    #                  \____/ \__,_|\__| .__/ \__,_|\__|
    #                                  | |
    #                                  |_|
    # ===================================================================

    # ------------------ Output Files
    # Datasink
    outputfiles = pe.Node(nio.DataSink(
        base_directory=ds_root,
        container='derivatives/featpreproc',
        parameterization=True),
        name="output_files")

    # Use the following DataSink output substitutions
    # each tuple is only matched once per file
    outputfiles.inputs.substitutions = [
        ('/_mc_method_afni3dAllinSlices/', '/'),
        ('/_mc_method_afni3dAllinSlices/', '/'),  # needs to appear twice
        ('/oned_file/', '/'),
        ('/out_file/', '/'),
        ('/oned_matrix_save/', '/'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
        (r'/_addmean[0-9]+/', r'/func/'),
        (r'/_funcbrains[0-9]+/', r'/func/'),
        (r'/_maskfunc[0-9]+/', r'/func/'),
        (r'/_mc[0-9]+/', r'/func/'),
        (r'/_meanfunc[0-9]+/', r'/func/'),
        (r'/_outliers[0-9]+/', r'/func/'),
        (r'_run_id_[0-9][0-9]', r''),
    ]
    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=['motion_parameters',
                'motion_corrected',
                'motion_plots',
                'motion_outlier_files',
                'mask',
                'smoothed_files',
                'highpassed_files',
                'mean',
                'func_unwarp',
                'ref_func',
                'ref_funcmask',
                'ref_t1',
                'ref_t1mask',
                ]),
        name='outputspec')

    # ===================================================================
    #                  _____ _            _ _
    #                 |  __ (_)          | (_)
    #                 | |__) | _ __   ___| |_ _ __   ___
    #                 |  ___/ | '_ \ / _ \ | | '_ \ / _ \
    #                 | |   | | |_) |  __/ | | | | |  __/
    #                 |_|   |_| .__/ \___|_|_|_| |_|\___|
    #                         | |
    #                         |_|
    # ===================================================================

    #  ~|~ _ _  _  _ |` _  _ _ _    _ _  _  _|  _
    #   | | (_|| |_\~|~(_)| | | |  | | |(_|_\|<_\
    #
    # Transform manual skull-stripped masks to multiple images
    # --------------------------------------------------------
    # should just be used as input to motion correction,
    # after mc, all functionals should be aligned to reference
    transmanmask_mc = transform_manualmask.create_workflow()

    # - - - - - - Connections - - - - - - -
    featpreproc.connect(
        [(inputfiles, transmanmask_mc,
         [('subject_id', 'in.subject_id'),
          ('session_id', 'in.session_id'),
          ])])

    featpreproc.connect(inputfiles, 'ref_funcmask',
                        transmanmask_mc, 'in.manualmask')
    featpreproc.connect(inputnode, 'funcs',
                        transmanmask_mc, 'in.funcs')
    featpreproc.connect(inputfiles, 'ref_func',
                        transmanmask_mc, 'in.manualmask_func_ref')

    # fieldmaps not being used
    if False:
        trans_fmapmask = transmanmask_mc.clone('trans_fmapmask')
        featpreproc.connect(inputfiles, 'ref_manual_fmapmask',
                            trans_fmapmask, 'in.manualmask')
        featpreproc.connect(inputfiles, 'fmap_magnitude',
                            trans_fmapmask, 'in.funcs')
        featpreproc.connect(inputfiles, 'ref_func',
                            trans_fmapmask, 'in.manualmask_func_ref')

    #  |\/| _ _|_. _  _    _ _  _ _ _  __|_. _  _
    #  |  |(_) | |(_)| |  (_(_)| | (/_(_ | |(_)| |
    #
    # Perform motion correction, using some pipeline
    # --------------------------------------------------------
    # mc = motioncorrection_workflow.create_workflow_afni()

    # Register an image from the functionals to the reference image
    median_func = pe.MapNode(
        interface=fsl.maths.MedianImage(dimension="T"),
        name='median_func',
        iterfield=('in_file'),
    )
    pre_mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='premotioncorrection')

    featpreproc.connect(
        [
         (inputnode, median_func,
          [
           ('funcs', 'in_file'),
           ]),
         (median_func, pre_mc,
          [
           ('out_file', 'in.funcs'),
           ]),
         (inputfiles, pre_mc,
          [
           # median func image will be used a reference / base
           ('ref_func', 'in.ref_func'),
           ('ref_funcmask', 'in.ref_func_weights'),
          ]),
         (transmanmask_mc, pre_mc,
          [
           ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
         ]),
         (pre_mc, outputnode,
          [
           ('mc.out_file', 'pre_motion_corrected'),
           ('mc.oned_file', 'pre_motion_parameters.oned_file'),
           ('mc.oned_matrix_save', 'pre_motion_parameters.oned_matrix_save'),
         ]),
         (outputnode, outputfiles,
          [
           ('pre_motion_corrected', 'pre_motion_corrected.out_file'),
           ('pre_motion_parameters.oned_file', 'pre_motion_corrected.oned_file'), # warp parameters in ASCII (.1D)
           ('pre_motion_parameters.oned_matrix_save', 'pre_motion_corrected.oned_matrix_save'), # transformation matrices for each sub-brick
         ]),
    ])

    mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='motioncorrection',
        iterfield=('in_file', 'ref_file', 'in_weight_file'))
    # - - - - - - Connections - - - - - - -
    featpreproc.connect(
        [(inputnode, mc,
          [
           ('funcs', 'in.funcs'),
           ]),
         (pre_mc, mc, [
             # the median image realigned to the reference functional will serve as reference
             #  this way motion correction is done to an image more similar to the functionals
             ('mc.out_file', 'in.ref_func'),
           ]),
         (inputfiles, mc, [
             # Check and make sure the ref func mask is close enough to the registered median
             # image.
             ('ref_funcmask', 'in.ref_func_weights'),
           ]),
         (transmanmask_mc, mc, [
             ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
         ]),
         (mc, outputnode, [
             ('mc.out_file', 'motion_corrected'),
             ('mc.oned_file', 'motion_parameters.oned_file'),
             ('mc.oned_matrix_save', 'motion_parameters.oned_matrix_save'),
         ]),
         (outputnode, outputfiles, [
             ('motion_corrected', 'motion_corrected.out_file'),
             ('motion_parameters.oned_file', 'motion_corrected.oned_file'), # warp parameters in ASCII (.1D)
             ('motion_parameters.oned_matrix_save', 'motion_corrected.oned_matrix_save'), # transformation matrices for each sub-brick
         ]),
    ])

    #  |~. _ | _| _ _  _  _    _ _  _ _ _  __|_. _  _
    #  |~|(/_|(_|| | |(_||_)  (_(_)| | (/_(_ | |(_)| |
    #                    |
    # Unwarp EPI distortions
    # --------------------------------------------------------

    # Performing motion correction to a reference that is undistorted,
    # so b0_unwarp is currently not needed
    if False:
        b0_unwarp = undistort_workflow.create_workflow()

        featpreproc.connect(
            [(inputfiles, b0_unwarp,
              [  # ('subject_id', 'in.subject_id'),
               # ('session_id', 'in.session_id'),
               ('fmap_phasediff', 'in.fmap_phasediff'),
               ('fmap_magnitude', 'in.fmap_magnitude'),
               ]),
             (mc, b0_unwarp,
              [('mc.out_file', 'in.funcs'),
               ]),
             (transmanmask_mc, b0_unwarp,
              [('funcreg.out_file', 'in.funcmasks'),
               ]),
             (trans_fmapmask, b0_unwarp,
              [('funcreg.out_file', 'in.fmap_mask')]),
             (b0_unwarp, outputfiles,
              [('out.funcs', 'func_unwarp.funcs'),
               ('out.funcmasks', 'func_unwarp.funcmasks'),
               ]),
             (b0_unwarp, outputnode,
              [('out.funcs', 'func_unwarp.funcs'),
               ('out.funcmasks', 'mask'),
               ]),
             ])

    # undistort the reference images
    if False:
        b0_unwarp_ref = b0_unwarp.clone('b0_unwarp_ref')
        featpreproc.connect(
            [(inputfiles, b0_unwarp_ref,
              [  # ('subject_id', 'in.subject_id'),
               # ('session_id', 'in.session_id'),
               ('ref_fmap_phasediff', 'in.fmap_phasediff'),
               ('ref_fmap_magnitude', 'in.fmap_magnitude'),
               ('ref_manual_fmapmask', 'in.fmap_mask'),
               ('ref_func', 'in.funcs'),
               ('ref_funcmask', 'in.funcmasks'),
               ]),
             (b0_unwarp_ref, outputfiles,
              [('out.funcs', 'func_unwarp_ref.func'),
               ('out.funcmasks', 'func_unwarp_ref.funcmask'),
               ]),
             (b0_unwarp_ref, outputnode,
              [('out.funcs', 'ref_func'),
               ('out.funcmasks', 'ref_mask'),
               ]),
             ])
    else:
        featpreproc.connect(
            [(inputfiles, outputfiles,
              [('ref_func', 'reference/func'),
               ('ref_funcmask', 'reference/func_mask'),
               ]),
             (inputfiles, outputnode,
              [('ref_func', 'ref_func'),
               ('ref_funcmask', 'ref_funcmask'),
               ]),
             ])

    # |~) _  _ . __|_ _  _  _|_ _   |~) _  |` _  _ _  _  _ _ 
    # |~\(/_(_||_\ | (/_|    | (_)  |~\(/_~|~(/_| (/_| |(_(/_
    #        _|
    # Register all functionals to common reference
    # --------------------------------------------------------
    if False:  # this is now done during motion correction
        # FLIRT cost: intermodal: corratio, intramodal: least squares and normcorr
        reg_to_ref = pe.MapNode(  # intra-modal
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='normcorr'),
            name='reg_to_ref',
            iterfield=('in_file', 'in_weight'),
        )
        refEPI_to_refT1 = pe.Node(
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='corratio'),
            name='refEPI_to_refT1',
        )
        # combine func -> ref_func and ref_func -> ref_T1
        reg_to_refT1 = pe.MapNode(
            interface=fsl.ConvertXFM(concat_xfm=True),
            name='reg_to_refT1',
            iterfield=('in_file'),
        )

        reg_funcs = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcs',
            iterfield=('in_file', 'in_matrix_file'),
        )
        reg_funcmasks = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcmasks',
            iterfield=('in_file', 'in_matrix_file')
        )

        def deref_list(x):
            assert len(x)==1
            return x[0]

        featpreproc.connect(
            [
             (b0_unwarp, reg_to_ref,  # --> reg_to_ref, (A)
              [
               ('out.funcs', 'in_file'),
               ('out.funcmasks', 'in_weight'),
              ]),
             (b0_unwarp_ref, reg_to_ref,
              [
               (('out.funcs', deref_list), 'reference'),
               (('out.funcmasks', deref_list), 'ref_weight'),
              ]),

             (b0_unwarp_ref, refEPI_to_refT1,  # --> refEPI_to_refT1 (B)
              [
               (('out.funcs', deref_list), 'in_file'),
               (('out.funcmasks', deref_list), 'in_weight'),
              ]),
             (inputfiles, refEPI_to_refT1,
              [
               ('ref_t1', 'reference'),
               ('ref_t1mask', 'ref_weight'),
              ]),

             (reg_to_ref, reg_to_refT1,  # --> reg_to_refT1 (A*B)
              [
               ('out_matrix_file', 'in_file'),
              ]),
             (refEPI_to_refT1, reg_to_refT1,
              [
               ('out_matrix_file', 'in_file2'),
              ]),

             (reg_to_refT1, reg_funcs,  # --> reg_funcs
              [
               # ('out_matrix_file', 'in_matrix_file'),
               ('out_file', 'in_matrix_file'),
              ]),
             (b0_unwarp, reg_funcs,
              [
               ('out.funcs', 'in_file'),
              ]),
             (b0_unwarp_ref, reg_funcs,
              [
               (('out.funcs', deref_list), 'reference'),
              ]),

             (reg_to_refT1, reg_funcmasks,  # --> reg_funcmasks
              [
               # ('out_matrix_file', 'in_matrix_file'),
               ('out_file', 'in_matrix_file'),
              ]),
             (b0_unwarp, reg_funcmasks,
              [
               ('out.funcmasks', 'in_file'),
              ]),
             (b0_unwarp_ref, reg_funcmasks,
              [
               (('out.funcs', deref_list), 'reference'),
              ]),

             (reg_funcs, outputfiles,
              [
               ('out_file', 'common_ref.func'),
              ]),
             (reg_funcmasks, outputfiles,
              [
               ('out_file', 'common_ref.funcmask'),
              ]),
        ])


    #  |\/| _ _|_. _  _    _   _|_|. _  _ _
    #  |  |(_) | |(_)| |  (_)|_|| ||(/_| _\
    #
    # --------------------------------------------------------

    # Apply brain masks to functionals
    # --------------------------------------------------------

    # Dilate mask
    """
    Dilate the mask
    """
    if False:
        dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                         op_string='-dilF'),
                                iterfield=['in_file'],
                                name='dilatemask')
        featpreproc.connect(reg_funcmasks, 'out_file', dilatemask, 'in_file')
    else:
        dilatemask = pe.Node(
            interface=fsl.ImageMaths(suffix='_dil', op_string='-dilF'),
            name='dilatemask')
        featpreproc.connect(inputfiles, 'ref_funcmask', dilatemask, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', outputfiles, 'dilate_mask')

    funcbrains = pe.MapNode(
        fsl.BinaryMaths(operation='mul'),
        iterfield=('in_file', 'operand_file'),
        name='funcbrains'
    )

    featpreproc.connect(
        [(mc, funcbrains,
          [('mc.out_file', 'in_file'),
          ]),
         (dilatemask, funcbrains,
          [('out_file', 'operand_file'),
          ]),
         (funcbrains, outputfiles,
          [('out_file', 'funcbrains'),
           ]),
         ])
    # Detect motion outliers
    # --------------------------------------------------------

    import nipype.algorithms.rapidart as ra
    outliers = pe.MapNode(
        ra.ArtifactDetect(
            mask_type='file',
            # trying to "disable" `norm_threshold`:
            use_norm=True,
            norm_threshold=10.0,  # combines translations in mm and rotations
            # use_norm=Undefined,
            # translation_threshold=1.0,  # translation in mm
            # rotation_threshold=0.02,  # rotation in radians
            zintensity_threshold=3.0,  # z-score
            parameter_source='AFNI',
            save_plot=True),
        iterfield=('realigned_files', 'realignment_parameters', 'mask_file'),
        name='outliers')

    featpreproc.connect([
        (mc, outliers,
         [  # ('mc.par_file', 'realignment_parameters'),
             ('mc.oned_file', 'realignment_parameters'),
         ]),
        (funcbrains, outliers,
         [('out_file', 'realigned_files'),
          ]),
        (dilatemask, outliers,
         [('out_file', 'mask_file'),
          ]),
        (outliers, outputfiles,
         [('outlier_files', 'motion_outliers.@outlier_files'),
          ('plot_files', 'motion_outliers.@plot_files'),
          ('displacement_files', 'motion_outliers.@displacement_files'),
          ('intensity_files', 'motion_outliers.@intensity_files'),
          ('mask_files', 'motion_outliers.@mask_files'),
          ('statistic_files', 'motion_outliers.@statistic_files'),
          # ('norm_files', 'outliers.@norm_files'),
          ]),
        (mc, outputnode,
         [('mc.oned_file', 'motion_parameters'),
          ]),
        (outliers, outputnode,
         [('outlier_files', 'motion_outlier_files'),
          ('plot_files', 'motion_plots.@plot_files'),
          ('displacement_files', 'motion_outliers.@displacement_files'),
          ('intensity_files', 'motion_outliers.@intensity_files'),
          ('mask_files', 'motion_outliers.@mask_files'),
          ('statistic_files', 'motion_outliers.@statistic_files'),
          # ('norm_files', 'outliers.@norm_files'),
          ])
    ])

    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', getthresh, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', getthresh, 'in_file')

    """
    Threshold the first run of functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', threshold, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', threshold, 'in_file')

    """
    Define a function to get 10% of the intensity
    """
    def getthreshop(thresh):
        return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh]

    featpreproc.connect(
        getthresh, ('out_stat', getthreshop),
        threshold, 'op_string')

    """
    Determine the median value of the functional runs using the mask
    """
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', medianval, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', medianval, 'in_file')

    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')

    # (~ _  _ _|_. _ |  (~ _ _  _  _ _|_|_ . _  _
    # _)|_)(_| | |(_||  _)| | |(_)(_) | | ||| |(_|
    #   |                                       _|
    # Spatial smoothing (SUSAN)
    # --------------------------------------------------------

    # create_susan_smooth takes care of calculating the mean and median
    #   functional, applying mask to functional, and running the smoothing
    smooth = create_susan_smooth(separate_masks=False)
    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')

    # featpreproc.connect(b0_unwarp, 'out.funcs', smooth, 'inputnode.in_files')
    if False:
        featpreproc.connect(reg_funcs, 'out_file', smooth, 'inputnode.in_files')
    else:
        featpreproc.connect(mc, 'mc.out_file', smooth, 'inputnode.in_files')

    featpreproc.connect(dilatemask, 'out_file',
                        smooth, 'inputnode.mask_file')

    # -------------------------------------------------------
    # The below is from workflows/fmri/fsl/preprocess.py

    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    featpreproc.connect(
        smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2),
                         name='concat')

    tolist = lambda x: [x]

    def chooseindex(fwhm):
        if fwhm < 1:
            return [0]
        else:
            return [1]

    # maskfunc2 is the functional data before SUSAN
    if False:
        featpreproc.connect(b0_unwarp, ('out.funcs', tolist), concatnode, 'in1')
    else:
        featpreproc.connect(mc, ('mc.out_file', tolist), concatnode, 'in1')
    # maskfunc3 is the functional data after SUSAN
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')

    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputfiles, 'smoothed_files')

    """
    Scale the median value of the run is set to 10000.
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')

    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(
        medianval, ('out_stat', getmeanscale),
        meanscale, 'op_string')

    # |_|. _ |_  _  _  _ _
    # | ||(_|| ||_)(_|_\_\
    #      _|   |
    # Temporal filtering
    # --------------------------------------------------------

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file'],
                          name='highpass')
    highpass_operand = lambda x: '-bptf %.10f -1' % x
    featpreproc.connect(
        inputnode, ('highpass', highpass_operand),
        highpass, 'op_string')
    featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')

    version = 0
    if fsl.Info.version() and \
            LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    if version < 507:
        featpreproc.connect(
            highpass, 'out_file', outputnode, 'highpassed_files')
    else:
        """
        Add back the mean removed by the highpass filter operation as
            of FSL 5.0.7
        """
        meanfunc4 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                        suffix='_mean'),
                               iterfield=['in_file'],
                               name='meanfunc4')

        featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file')
        addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),
                             iterfield=['in_file', 'operand_file'],
                             name='addmean')
        featpreproc.connect(highpass, 'out_file', addmean, 'in_file')
        featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file')
        featpreproc.connect(
            addmean, 'out_file', outputnode, 'highpassed_files')

    """
    Generate a mean functional image from the first run
    """
    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc3')

    featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')
    featpreproc.connect(meanfunc3, 'out_file', outputfiles, 'mean')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean_highpassed')
    featpreproc.connect(outputnode, 'highpassed_files', outputfiles, 'highpassed_files')

    return(featpreproc)
def main(derivatives, ds):

    if ds == 'ds-01':
        subjects = ['{:02d}'.format(s) for s in range(1, 20)]
    elif ds == 'ds-02':
        subjects = ['{:02d}'.format(s) for s in range(1, 16)]
        subjects.pop(3) # Remove 4

    subjects = subjects
    wf_folder = '/tmp/workflow_folders'

    templates = {'preproc':op.join(derivatives, ds, 'fmriprep', 'sub-{subject}', 'func',
                                   'sub-{subject}_task-randomdotmotion_run-*_space-T1w_desc-preproc_bold.nii.gz')}

    templates['individual_mask'] = op.join(derivatives, ds, 'pca_masks', 'sub-{subject}', 'anat',
                                           'sub-{subject}_desc-{mask}_space-T1w_subroi-{subroi}_roi.nii.gz')

    wf = pe.Workflow(name='extract_signal_submasks_{}'.format(ds),
                     base_dir=wf_folder)

    mask_identity = pe.Node(niu.IdentityInterface(fields=['mask', 'subroi']),
                            name='mask_identity')
    mask_identity.iterables = [('mask', ['stnl', 'stnr']), ('subroi', ['A', 'B', 'C'])]

    selector = pe.Node(nio.SelectFiles(templates),
                       name='selector')

    selector.iterables = [('subject', subjects)]
    wf.connect(mask_identity, 'mask', selector, 'mask')
    wf.connect(mask_identity, 'subroi', selector, 'subroi')

    def extract_signal(preproc, mask):
        from nilearn import image
        from nilearn import input_data
        from nipype.utils.filemanip import split_filename
        import os.path as op
        import pandas as pd

        _, fn, ext = split_filename(preproc)
        masker = input_data.NiftiMasker(mask, standardize='psc')

        data = pd.DataFrame(masker.fit_transform(preproc))

        new_fn = op.abspath('{}_signal.csv'.format(fn))
        data.to_csv(new_fn)

        return new_fn

    extract_signal_node = pe.MapNode(niu.Function(function=extract_signal,
                                     input_names=['preproc', 'mask'],
                                     output_names=['signal']),
                         iterfield=['preproc'],
                        name='extract_signal_node')

    wf.connect(selector, 'preproc', extract_signal_node, 'preproc')
    wf.connect(selector, 'individual_mask', extract_signal_node, 'mask')

    datasink_signal = pe.MapNode(DerivativesDataSink(base_directory=op.join(derivatives, ds),
                                                      out_path_base='extracted_signal'),
                                 iterfield=['source_file', 'in_file'],
                                  name='datasink_signal')

    wf.connect(selector, 'preproc', datasink_signal, 'source_file')
    wf.connect(extract_signal_node, 'signal', datasink_signal, 'in_file')
    wf.connect(mask_identity, 'mask', datasink_signal, 'desc')

    def get_subroi_suffix(subroi):
        return 'subroi-{}_roi'.format(subroi)

    wf.connect(mask_identity, ('subroi', get_subroi_suffix), datasink_signal, 'suffix')


    wf.run(plugin='MultiProc',
           plugin_args={'n_procs':4})