Exemplo n.º 1
0
def vol2png(qcname, tag="", overlay=True, overlayiterated=True):
    import PUMI.func_preproc.Onevol as onevol

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    inputspec = pe.Node(
        utility.IdentityInterface(fields=['bg_image', 'overlay_image']),
        name='inputspec')

    analysisflow = pe.Workflow(name=qcname + tag + '_qc')

    myonevol_bg = onevol.onevol_workflow(wf_name="onebg")
    analysisflow.connect(inputspec, 'bg_image', myonevol_bg, 'inputspec.func')

    if overlay and not overlayiterated:
        #myonevol_ol = onevol.onevol_workflow(wf_name="oneol")
        #analysisflow.connect(inputspec, 'overlay_image', myonevol_ol, 'inputspec.func')
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file'],
                            name='slicer')

    # Create png images for quality check
    if overlay and overlayiterated:
        myonevol_ol = onevol.onevol_workflow(wf_name="oneol")
        analysisflow.connect(inputspec, 'overlay_image', myonevol_ol,
                             'inputspec.func')
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file', 'image_edges'],
                            name='slicer')
    if not overlay:
        slicer = pe.MapNode(interface=fsl.Slicer(),
                            iterfield=['in_file'],
                            name='slicer')

    slicer.inputs.image_width = 2000
    slicer.inputs.out_file = qcname
    # set output all axial slices into one picture
    slicer.inputs.sample_axial = 5
    #slicer.inputs.middle_slices = True

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".ppm")]

    analysisflow.connect(myonevol_bg, 'outputspec.func1vol', slicer, 'in_file')
    if overlay and not overlayiterated:
        analysisflow.connect(inputspec, 'overlay_image', slicer, 'image_edges')
    if overlay and overlayiterated:
        analysisflow.connect(myonevol_ol, 'outputspec.func1vol', slicer,
                             'image_edges')
    analysisflow.connect(slicer, 'out_file', ds_qc, qcname)

    return analysisflow
Exemplo n.º 2
0
def create_non_uniformity_correct_4D_file(auto_clip=False, clip_low=7,
                                          clip_high=200, n_procs=12):
    """non_uniformity_correct_4D_file corrects functional files for nonuniformity on a timepoint by timepoint way.
    Internally it implements a workflow to split the in_file, correct each separately and then merge them back together.
    This is an ugly workaround as we have to find the output of the workflow's datasink somewhere, but it should work.

    Parameters
    ----------
    in_file : str
        Absolute path to nifti-file.
    auto_clip : bool (default: False)
        whether to let 3dUniformize decide on clipping boundaries
    clip_low : float (default: 7),
        lower clipping bound for 3dUniformize
    clip_high : float (default: 200),
        higher clipping bound for 3dUniformize
    n_procs : int (default: 12),
        the number of processes to run the internal workflow with

    Returns
    -------
    out_file : non-uniformity corrected file
        List of absolute paths to nifti-files.    """

    # nodes
    input_node = pe.Node(IdentityInterface(
        fields=['in_file',
                'auto_clip',
                'clip_low',
                'clip_high',
                'output_directory',
                'sub_id']), name='inputspec')
    split = pe.Node(Function(input_names='in_file', output_names=['out_files'],
                             function=split_4D_to_3D), name='split')

    uniformer = pe.MapNode(
        Uniformize(clip_high=clip_high, clip_low=clip_low, auto_clip=auto_clip,
                   outputtype='NIFTI_GZ'), name='uniformer',
        iterfield=['in_file'])
    merge = pe.MapNode(fsl.Merge(dimension='t'), name='merge',
                       iterfield=['in_files'])

    datasink = pe.Node(nio.DataSink(infields=['topup'], container=''),
                       name='sinker')
    datasink.inputs.parameterization = False

    # workflow
    nuc_wf = pe.Workflow(name='nuc')
    nuc_wf.connect(input_node, 'sub_id', datasink, 'container')
    nuc_wf.connect(input_node, 'output_directory', datasink, 'base_directory')
    nuc_wf.connect(input_node, 'in_file', split, 'in_file')
    nuc_wf.connect(split, 'out_files', uniformer, 'in_file')
    nuc_wf.connect(uniformer, 'out_file', merge, 'in_files')
    nuc_wf.connect(merge, 'merged_file', datasink, 'uni')

    # nuc_wf.run('MultiProc', plugin_args={'n_procs': n_procs})
    # out_file = glob.glob(os.path.join(td, 'uni', fn_base + '_0000*.nii.gz'))[0]

    return nuc_wf
Exemplo n.º 3
0
def create_melodic_workflow(name='melodic', template=None, varnorm=True):

    input_node = pe.Node(IdentityInterface(fields=['in_file']),
                         name='inputspec')

    output_node = pe.Node(IdentityInterface(fields=['out_dir']),
                          name='outputspec')

    if template is None:
        template = op.join(op.dirname(op.dirname(op.abspath(__file__))),
                           'data', 'fsf_templates', 'melodic_template.fsf')

    melodic4fix_node = pe.MapNode(interface=Melodic4fix,
                                  iterfield=['in_file', 'out_dir'],
                                  name='melodic4fix')

    # Don't know if this works. Could also set these defaults inside the
    # melodic4fix node definition...
    melodic4fix_node.inputs.template = template
    melodic4fix_node.inputs.varnorm = varnorm

    rename_ica = pe.MapNode(Function(input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=extract_task),
                            name='rename_ica',
                            iterfield=['in_file'])

    mel4fix_workflow = pe.Workflow(name=name)

    mel4fix_workflow.connect(input_node, 'in_file', melodic4fix_node,
                             'in_file')

    mel4fix_workflow.connect(input_node, 'in_file', rename_ica, 'in_file')

    mel4fix_workflow.connect(rename_ica, 'out_file', melodic4fix_node,
                             'out_dir')

    mel4fix_workflow.connect(melodic4fix_node, 'out_dir', output_node,
                             'out_dir')

    return mel4fix_workflow
Exemplo n.º 4
0
def create_motion_confound_workflow(order=2,
                                    fd_cutoff=.2,
                                    name='motion_confound'):

    input_node = pe.Node(interface=IdentityInterface(
        fields=['par_file', 'output_directory', 'sub_id']),
                         name='inputspec')

    output_node = pe.Node(
        interface=IdentityInterface(fields=['out_fd', 'out_ext_moco']),
        name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    extend_motion_parameters = pe.MapNode(Extend_motion_parameters,
                                          iterfield=['par_file'],
                                          name='extend_motion_parameters')
    extend_motion_parameters.inputs.order = order

    framewise_disp = pe.MapNode(FramewiseDisplacement(parameter_source='FSL'),
                                iterfield=['in_file'],
                                name='framewise_disp')

    mcf_wf = pe.Workflow(name=name)
    mcf_wf.connect(input_node, 'output_directory', datasink, 'base_directory')
    mcf_wf.connect(input_node, 'sub_id', datasink, 'container')
    mcf_wf.connect(input_node, 'par_file', extend_motion_parameters,
                   'par_file')
    mcf_wf.connect(input_node, 'par_file', framewise_disp, 'in_file')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', output_node,
                   'out_ext_moco')
    mcf_wf.connect(framewise_disp, 'out_file', output_node, 'out_fd')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', datasink, 'confounds')
    mcf_wf.connect(framewise_disp, 'out_file', datasink, 'confounds.@df')

    return mcf_wf
Exemplo n.º 5
0
def create_confound_workflow(name='confound'):

    input_node = pe.Node(interface=IdentityInterface(fields=[
        'in_file', 'par_file', 'fast_files', 'highres2epi_mat',
        'n_comp_tcompcor', 'n_comp_acompcor', 'output_directory', 'sub_id'
    ]),
                         name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(fields=[
        'all_confounds',
    ]),
                          name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    compute_DVARS = pe.MapNode(ComputeDVARS(save_all=True,
                                            remove_zerovariance=True),
                               iterfield=['in_file', 'in_mask'],
                               name='compute_DVARS')

    motion_wf = create_motion_confound_workflow(order=2)

    confound_wf = pe.Workflow(name=name)
    confound_wf.connect(input_node, 'par_file', motion_wf,
                        'inputspec.par_file')
    confound_wf.connect(input_node, 'sub_id', motion_wf, 'inputspec.sub_id')
    confound_wf.connect(input_node, 'output_directory', motion_wf,
                        'inputspec.output_directory')

    compcor_wf = create_compcor_workflow()
    confound_wf.connect(input_node, 'in_file', compcor_wf, 'inputspec.in_file')
    confound_wf.connect(input_node, 'fast_files', compcor_wf,
                        'inputspec.fast_files')
    confound_wf.connect(input_node, 'highres2epi_mat', compcor_wf,
                        'inputspec.highres2epi_mat')
    confound_wf.connect(input_node, 'n_comp_tcompcor', compcor_wf,
                        'inputspec.n_comp_tcompcor')
    confound_wf.connect(input_node, 'n_comp_acompcor', compcor_wf,
                        'inputspec.n_comp_acompcor')
    confound_wf.connect(input_node, 'sub_id', compcor_wf, 'inputspec.sub_id')
    confound_wf.connect(input_node, 'output_directory', compcor_wf,
                        'inputspec.output_directory')

    confound_wf.connect(compcor_wf, 'outputspec.epi_mask', compute_DVARS,
                        'in_mask')
    confound_wf.connect(input_node, 'in_file', compute_DVARS, 'in_file')

    concat = pe.MapNode(Concat_confound_files,
                        iterfield=['ext_par_file', 'fd_file', 'dvars_file'],
                        name='concat')

    confound_wf.connect(motion_wf, 'outputspec.out_ext_moco', concat,
                        'ext_par_file')
    confound_wf.connect(motion_wf, 'outputspec.out_fd', concat, 'fd_file')
    confound_wf.connect(compcor_wf, 'outputspec.acompcor_file', concat,
                        'acompcor_file')
    #confound_wf.connect(compcor_wf, 'outputspec.tcompcor_file', concat,
    #                    'tcompcor_file')
    confound_wf.connect(compute_DVARS, 'out_all', concat, 'dvars_file')
    confound_wf.connect(input_node, 'sub_id', datasink, 'sub_id')
    confound_wf.connect(input_node, 'output_directory', datasink,
                        'base_directory')
    confound_wf.connect(concat, 'out_file', datasink, 'confounds')

    return confound_wf
Exemplo n.º 6
0
def create_registration_workflow(analysis_info, name='reg'):
    """uses sub-workflows to perform different registration steps.
    Requires fsl and freesurfer tools
    Parameters
    ----------
    name : string
        name of workflow
    analysis_info : dict
        contains session information needed for workflow, such as
        whether to use FreeSurfer or FLIRT etc.
    Example
    -------
    >>> registration_workflow = create_registration_workflow(name = 'registration_workflow', analysis_info = {'use_FS':True})
    >>> registration_workflow.inputs.inputspec.output_directory = '/data/project/raw/BIDS/sj_1/'
    >>> registration_workflow.inputs.inputspec.EPI_space_file = 'example_func.nii.gz'
    >>> registration_workflow.inputs.inputspec.T1_file = 'T1.nii.gz' # if using freesurfer, this file will be created instead of used.
    >>> registration_workflow.inputs.inputspec.freesurfer_subject_ID = 'sub_01'
    >>> registration_workflow.inputs.inputspec.freesurfer_subject_dir = '$SUBJECTS_DIR'
    >>> registration_workflow.inputs.inputspec.reference_file = '/usr/local/fsl/data/standard/standard152_T1_2mm_brain.nii.gz'

    Inputs::
          inputspec.output_directory : directory in which to sink the result files
          inputspec.T1_file : T1 anatomy file
          inputspec.EPI_space_file : EPI session file
          inputspec.freesurfer_subject_ID : FS subject ID
          inputspec.freesurfer_subject_dir : $SUBJECTS_DIR
    Outputs::
           outputspec.out_reg_file : BBRegister registration file that maps EPI space to T1
           outputspec.out_matrix_file : FLIRT registration file that maps EPI space to T1
           outputspec.out_inv_matrix_file : FLIRT registration file that maps T1 space to EPI
    """

    ### NODES
    input_node = pe.Node(IdentityInterface(fields=[
        'EPI_space_file', 'output_directory', 'freesurfer_subject_ID',
        'freesurfer_subject_dir', 'T1_file', 'standard_file', 'sub_id'
    ]),
                         name='inputspec')

    ### Workflow to be returned
    registration_workflow = pe.Workflow(name=name)

    ### sub-workflows
    epi_2_T1 = create_epi_to_T1_workflow(name='epi',
                                         use_FS=analysis_info['use_FS'],
                                         do_FAST=analysis_info['do_FAST'])
    T1_to_standard = create_T1_to_standard_workflow(
        name='T1_to_standard',
        use_FS=analysis_info['use_FS'],
        do_fnirt=analysis_info['do_fnirt'],
        use_AFNI_ss=analysis_info['use_AFNI_ss'])
    concat_2_feat = create_concat_2_feat_workflow(name='concat_2_feat')

    output_node = pe.Node(IdentityInterface(
        fields=('EPI_T1_matrix_file', 'T1_EPI_matrix_file',
                'EPI_T1_register_file', 'T1_standard_matrix_file',
                'standard_T1_matrix_file', 'EPI_T1_matrix_file',
                'T1_EPI_matrix_file', 'T1_file', 'standard_file',
                'EPI_space_file')),
                          name='outputspec')

    ###########################################################################
    # EPI to T1
    ###########################################################################

    registration_workflow.connect([(input_node, epi_2_T1, [
        ('EPI_space_file', 'inputspec.EPI_space_file'),
        ('output_directory', 'inputspec.output_directory'),
        ('freesurfer_subject_ID', 'inputspec.freesurfer_subject_ID'),
        ('freesurfer_subject_dir', 'inputspec.freesurfer_subject_dir'),
        ('T1_file', 'inputspec.T1_file')
    ])])

    ###########################################################################
    # T1 to standard
    ###########################################################################

    registration_workflow.connect([(input_node, T1_to_standard, [
        ('freesurfer_subject_ID', 'inputspec.freesurfer_subject_ID'),
        ('freesurfer_subject_dir', 'inputspec.freesurfer_subject_dir'),
        ('T1_file', 'inputspec.T1_file'),
        ('standard_file', 'inputspec.standard_file')
    ])])

    ###########################################################################
    # concatenation of all matrices
    ###########################################################################

    # then, the inputs from the previous sub-workflows
    registration_workflow.connect([(epi_2_T1, concat_2_feat, [
        ('outputspec.EPI_T1_matrix_file', 'inputspec.EPI_T1_matrix_file'),
    ])])

    registration_workflow.connect([(T1_to_standard, concat_2_feat, [
        ('outputspec.T1_standard_matrix_file',
         'inputspec.T1_standard_matrix_file'),
    ])])

    ###########################################################################
    # Rename nodes, for the datasink
    ###########################################################################

    if analysis_info['use_FS']:
        rename_register = pe.Node(Rename(format_string='register.dat',
                                         keep_ext=False),
                                  name='rename_register')

        registration_workflow.connect(epi_2_T1,
                                      'outputspec.EPI_T1_register_file',
                                      rename_register, 'in_file')

    rename_example_func = pe.Node(Rename(format_string='example_func',
                                         keep_ext=True),
                                  name='rename_example_func')

    registration_workflow.connect(input_node, 'EPI_space_file',
                                  rename_example_func, 'in_file')

    rename_highres = pe.Node(Rename(format_string='highres', keep_ext=True),
                             name='rename_highres')
    registration_workflow.connect(T1_to_standard, 'outputspec.T1_file',
                                  rename_highres, 'in_file')

    rename_standard = pe.Node(Rename(format_string='standard', keep_ext=True),
                              name='rename_standard')

    registration_workflow.connect(input_node, 'standard_file', rename_standard,
                                  'in_file')

    rename_example_func2standard = pe.Node(Rename(
        format_string='example_func2standard.mat', keep_ext=False),
                                           name='rename_example_func2standard')

    registration_workflow.connect(concat_2_feat,
                                  'outputspec.EPI_standard_matrix_file',
                                  rename_example_func2standard, 'in_file')

    rename_example_func2highres = pe.Node(Rename(
        format_string='example_func2highres.mat', keep_ext=False),
                                          name='rename_example_func2highres')

    registration_workflow.connect(epi_2_T1, 'outputspec.EPI_T1_matrix_file',
                                  rename_example_func2highres, 'in_file')

    rename_highres2standard = pe.Node(Rename(
        format_string='highres2standard.mat', keep_ext=False),
                                      name='rename_highres2standard')
    registration_workflow.connect(T1_to_standard,
                                  'outputspec.T1_standard_matrix_file',
                                  rename_highres2standard, 'in_file')

    rename_standard2example_func = pe.Node(Rename(
        format_string='standard2example_func.mat', keep_ext=False),
                                           name='rename_standard2example_func')

    registration_workflow.connect(concat_2_feat,
                                  'outputspec.standard_EPI_matrix_file',
                                  rename_standard2example_func, 'in_file')

    rename_highres2example_func = pe.Node(Rename(
        format_string='highres2example_func.mat', keep_ext=False),
                                          name='rename_highres2example_func')

    registration_workflow.connect(epi_2_T1, 'outputspec.T1_EPI_matrix_file',
                                  rename_highres2example_func, 'in_file')

    rename_standard2highres = pe.Node(Rename(
        format_string='standard2highres.mat', keep_ext=False),
                                      name='rename_standard2highres')
    registration_workflow.connect(T1_to_standard,
                                  'outputspec.standard_T1_matrix_file',
                                  rename_standard2highres, 'in_file')

    # outputs via datasink
    datasink = pe.Node(DataSink(infields=['reg']), name='sinker')
    datasink.inputs.parameterization = False
    registration_workflow.connect(input_node, 'output_directory', datasink,
                                  'base_directory')
    registration_workflow.connect(input_node, 'sub_id', datasink, 'container')

    # NEW SETUP WITH RENAME (WITHOUT MERGER)
    if analysis_info['use_FS']:
        registration_workflow.connect(rename_register, 'out_file', datasink,
                                      'reg.@dat')

    registration_workflow.connect(rename_example_func, 'out_file', datasink,
                                  'reg.@example_func')
    registration_workflow.connect(rename_standard, 'out_file', datasink,
                                  'reg.@standard')
    registration_workflow.connect(rename_highres, 'out_file', datasink,
                                  'reg.@highres')
    registration_workflow.connect(rename_example_func2highres, 'out_file',
                                  datasink, 'reg.@example_func2highres')
    registration_workflow.connect(rename_highres2example_func, 'out_file',
                                  datasink, 'reg.@highres2example_func')
    registration_workflow.connect(rename_highres2standard, 'out_file',
                                  datasink, 'reg.@highres2standard')
    registration_workflow.connect(rename_standard2highres, 'out_file',
                                  datasink, 'reg.@standard2highres')
    registration_workflow.connect(rename_standard2example_func, 'out_file',
                                  datasink, 'reg.@standard2example_func')
    registration_workflow.connect(rename_example_func2standard, 'out_file',
                                  datasink, 'reg.@example_func2standard')

    registration_workflow.connect(rename_highres, 'out_file', output_node,
                                  'T1_file')

    # put the nifti and mat files, renamed above, in the reg/feat directory.
    # don't yet know what's wrong with this merge to datasink
    # registration_workflow.connect(merge_for_reg_N, 'out', datasink, 'reg')

    return registration_workflow
Exemplo n.º 7
0
def extract_timeseries(SinkTag="connectivity",
                       wf_name="extract_timeseries",
                       modularise=True):
    ########################################################################
    # Extract timeseries
    ########################################################################

    import nipype.interfaces.nilearn as learn
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.io as io
    from nipype.interfaces.utility import Function
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc
    import os

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Identitiy mapping for input variables
    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'std_func',
            'atlas_file',  # nii labelmap (or 4D probmaps)
            'labels',  # list of short names to regions
            'modules'  # list of modules of regions
        ]),
        name='inputspec')
    # re-label atlas, so that regions corresponding to the same modules follow each other
    if modularise:
        relabel_atls = pe.Node(interface=Function(
            input_names=['atlas_file', 'modules', 'labels'],
            output_names=[
                'relabelled_atlas_file', 'reordered_modules',
                'reordered_labels', 'newlabels_file'
            ],
            function=relabel_atlas),
                               name='relabel_atlas')
        # Save outputs which are important
        ds_nii = pe.Node(interface=io.DataSink(), name='ds_relabeled_atlas')
        ds_nii.inputs.base_directory = SinkDir
        ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

        # Save outputs which are important
        ds_newlabels = pe.Node(interface=io.DataSink(), name='ds_newlabels')
        ds_newlabels.inputs.base_directory = SinkDir
        ds_newlabels.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")]

    extract_timesereies = pe.MapNode(
        interface=learn.SignalExtraction(detrend=False),
        iterfield=['in_file'],
        name='extract_timeseries')

    # Save outputs which are important
    ds_txt = pe.Node(interface=io.DataSink(), name='ds_txt')
    ds_txt.inputs.base_directory = SinkDir
    ds_txt.inputs.regexp_substitutions = [("(\/)[^\/]*$", wf_name + ".tsv")]

    #QC
    timeseries_qc = qc.regTimeseriesQC("regional_timeseries", tag=wf_name)

    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'timeseries_file', 'relabelled_atlas_file', 'reordered_modules',
        'reordered_labels'
    ]),
                         name='outputspec')

    # Create workflow
    analysisflow = pe.Workflow(wf_name)
    analysisflow.connect(inputspec, 'std_func', extract_timesereies, 'in_file')
    if modularise:
        analysisflow.connect(inputspec, 'atlas_file', relabel_atls,
                             'atlas_file')
        analysisflow.connect(inputspec, 'modules', relabel_atls, 'modules')
        analysisflow.connect(inputspec, 'labels', relabel_atls, 'labels')

        analysisflow.connect(relabel_atls, 'relabelled_atlas_file',
                             extract_timesereies, 'label_files')
        analysisflow.connect(relabel_atls, 'reordered_labels',
                             extract_timesereies, 'class_labels')
        analysisflow.connect(relabel_atls, 'reordered_modules', timeseries_qc,
                             'inputspec.modules')
        analysisflow.connect(relabel_atls, 'relabelled_atlas_file',
                             timeseries_qc, 'inputspec.atlas')
        analysisflow.connect(relabel_atls, 'relabelled_atlas_file', ds_nii,
                             'atlas_relabeled')
        analysisflow.connect(relabel_atls, 'newlabels_file', ds_newlabels,
                             'atlas_relabeled')
        analysisflow.connect(relabel_atls, 'relabelled_atlas_file', outputspec,
                             'relabelled_atlas_file')
        analysisflow.connect(relabel_atls, 'reordered_labels', outputspec,
                             'reordered_labels')
        analysisflow.connect(relabel_atls, 'reordered_modules', outputspec,
                             'reordered_modules')
    else:
        analysisflow.connect(inputspec, 'atlas_file', extract_timesereies,
                             'label_files')
        analysisflow.connect(inputspec, 'labels', extract_timesereies,
                             'class_labels')
        analysisflow.connect(inputspec, 'modules', timeseries_qc,
                             'inputspec.modules')
        analysisflow.connect(inputspec, 'atlas_file', timeseries_qc,
                             'inputspec.atlas')
        analysisflow.connect(inputspec, 'atlas_file', outputspec,
                             'relabelled_atlas_file')
        analysisflow.connect(inputspec, 'labels', outputspec,
                             'reordered_labels')
        analysisflow.connect(inputspec, 'modules', outputspec,
                             'reordered_modules')

    analysisflow.connect(extract_timesereies, 'out_file', ds_txt,
                         'regional_timeseries')
    analysisflow.connect(extract_timesereies, 'out_file', timeseries_qc,
                         'inputspec.timeseries')

    analysisflow.connect(extract_timesereies, 'out_file', outputspec,
                         'timeseries_file')

    return analysisflow
Exemplo n.º 8
0
def create_all_calcarine_reward_preprocessing_workflow(
        analysis_info, name='all_calcarine_reward'):
    import os.path as op
    import tempfile
    import nipype.pipeline as pe
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from spynoza.nodes.utils import get_scaninfo, dyns_min_1, topup_scan_params, apply_scan_params
    from nipype.interfaces.io import SelectFiles, DataSink

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from spynoza.nodes.filtering import savgol_filter
    from spynoza.nodes.utils import get_scaninfo, pickfirst, percent_signal_change, average_over_runs, pickle_to_json, set_nifti_intercept_slope
    from spynoza.workflows.topup_unwarping import create_topup_workflow
    from spynoza.workflows.B0_unwarping import create_B0_workflow
    from spynoza.workflows.registration import create_registration_workflow
    from spynoza.workflows.retroicor import create_retroicor_workflow
    from spynoza.workflows.sub_workflows.masks import create_masks_from_surface_workflow
    from spynoza.nodes.fit_nuisances import fit_nuisances

    from motion_correction import create_motion_correction_workflow
    from utils.utils import convert_edf_2_hdf5, mask_nii_2_hdf5
    from utils.utils import convert_hdf_eye_to_tsv

    ########################################################################################
    # nodes
    ########################################################################################

    input_node = pe.Node(IdentityInterface(fields=[
        'raw_directory', 'output_directory', 'FS_ID', 'FS_subject_dir',
        'sub_id', 'sess_id', 'which_file_is_EPI_space', 'standard_file',
        'psc_func', 'MB_factor', 'tr', 'slice_direction', 'phys_sample_rate',
        'slice_timing', 'slice_order', 'nr_dummies', 'wfs', 'epi_factor',
        'acceleration', 'te_diff', 'echo_time', 'phase_encoding_direction'
    ]),
                         name='inputspec')

    # i/o node
    datasource_templates = dict(
        func='{sub_id}/{sess_id}/func/*bold.nii.gz',
        physio='{sub_id}/{sess_id}/func/*.log',
        events='{sub_id}/{sess_id}/func/*.pickle',
        eye='{sub_id}/{sess_id}/func/*.edf',
        anat='{sub_id}/{sess_id}/anat/*_inplaneT2.nii.gz',
        reg='{sub_id}/{sess_id}/anat/*_inplaneT2.mat')  # ,
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    output_node = pe.Node(IdentityInterface(
        fields=(['temporal_filtered_files', 'percent_signal_change_files'])),
                          name='outputspec')

    # node for temporal filtering
    sgfilter = pe.MapNode(Function(input_names=['in_file'],
                                   output_names=['out_file'],
                                   function=savgol_filter),
                          name='sgfilter',
                          iterfield=['in_file'])

    # node for converting pickle files to json
    pj = pe.MapNode(Function(input_names=['in_file'],
                             output_names=['out_file'],
                             function=pickle_to_json),
                    name='pj',
                    iterfield=['in_file'])

    # node for percent signal change
    psc = pe.MapNode(Function(input_names=['in_file', 'func'],
                              output_names=['out_file'],
                              function=percent_signal_change),
                     name='percent_signal_change',
                     iterfield=['in_file'])

    # node to select the nii files that have physio information
    physio_for_niis = pe.Node(Function(
        input_names=['all_input_files', 'all_physio_files'],
        output_names=['files_with_physio'],
        function=which_files_have_physio),
                              name='physio_for_niis')

    physio_for_mocos = pe.Node(Function(
        input_names=['all_input_files', 'all_physio_files', 'input_extension'],
        output_names=['files_with_physio'],
        function=which_files_have_physio),
                               name='physio_for_mocos')
    physio_for_mocos.inputs.input_extension = '_bold_brain_mcf.niiext_moco_pars.par'

    # node for nuisance regression
    fit_nuis = pe.MapNode(
        Function(
            input_names=['in_file', 'slice_regressor_list', 'vol_regressors'],
            output_names=['res_file', 'rsq_file', 'beta_file'],
            function=fit_nuisances),
        name='fit_nuisances',
        iterfield=['in_file', 'slice_regressor_list', 'vol_regressors'])

    edf_converter = pe.MapNode(Function(input_names=['edf_file'],
                                        output_names=['hdf5_file'],
                                        function=convert_edf_2_hdf5),
                               name='edf_converter',
                               iterfield=['edf_file'])

    hdf_tsv_converter = pe.MapNode(Function(input_names=['hdf5_file'],
                                            output_names=['tsv_file'],
                                            function=convert_hdf_eye_to_tsv),
                                   name='hdf_tsv_converter',
                                   iterfield=['hdf5_file'])

    behavior_tsv_converter = pe.Node(Function(
        input_names=['hdf5_files', 'reward_signal_unpredictable'],
        output_names=['tsv_files'],
        function=convert_behavior),
                                     name='behavior_tsv_converter')
    behavior_tsv_converter.inputs.reward_signal_unpredictable = analysis_info[
        'which_reward_sound_unpredictable']

    # node for datasinking
    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    ########################################################################################
    # workflow
    ########################################################################################

    # the actual top-level workflow
    all_calcarine_reward_workflow = pe.Workflow(name=name)

    all_calcarine_reward_workflow.connect(input_node, 'raw_directory',
                                          datasource, 'base_directory')
    all_calcarine_reward_workflow.connect(input_node, 'sub_id', datasource,
                                          'sub_id')
    all_calcarine_reward_workflow.connect(input_node, 'sess_id', datasource,
                                          'sess_id')

    # behavioral pickle to json
    all_calcarine_reward_workflow.connect(datasource, 'events', pj, 'in_file')
    all_calcarine_reward_workflow.connect(datasource, 'eye', edf_converter,
                                          'edf_file')

    all_calcarine_reward_workflow.connect(edf_converter, 'hdf5_file',
                                          hdf_tsv_converter, 'hdf5_file')
    all_calcarine_reward_workflow.connect(edf_converter, 'hdf5_file',
                                          behavior_tsv_converter, 'hdf5_files')

    # motion correction, using T2 inplane anatomicals to prime
    # the motion correction to the standard EPI space
    motion_proc = create_motion_correction_workflow(analysis_info, 'moco')
    all_calcarine_reward_workflow.connect(input_node, 'tr', motion_proc,
                                          'inputspec.tr')
    all_calcarine_reward_workflow.connect(input_node, 'output_directory',
                                          motion_proc,
                                          'inputspec.output_directory')
    all_calcarine_reward_workflow.connect(input_node,
                                          'which_file_is_EPI_space',
                                          motion_proc,
                                          'inputspec.which_file_is_EPI_space')
    all_calcarine_reward_workflow.connect(datasource, 'func', motion_proc,
                                          'inputspec.in_files')
    all_calcarine_reward_workflow.connect(datasource, 'anat', motion_proc,
                                          'inputspec.inplane_T2_files')
    all_calcarine_reward_workflow.connect(datasource, 'reg', motion_proc,
                                          'inputspec.T2_files_reg_matrices')

    # registration
    reg = create_registration_workflow(analysis_info, name='reg')
    all_calcarine_reward_workflow.connect(input_node, 'output_directory', reg,
                                          'inputspec.output_directory')
    all_calcarine_reward_workflow.connect(motion_proc,
                                          'outputspec.EPI_space_file', reg,
                                          'inputspec.EPI_space_file')
    all_calcarine_reward_workflow.connect(input_node, 'FS_ID', reg,
                                          'inputspec.freesurfer_subject_ID')
    all_calcarine_reward_workflow.connect(input_node, 'FS_subject_dir', reg,
                                          'inputspec.freesurfer_subject_dir')
    all_calcarine_reward_workflow.connect(input_node, 'standard_file', reg,
                                          'inputspec.standard_file')
    # the T1_file entry could be empty sometimes, depending on the output of the
    # datasource. Check this.
    # all_calcarine_reward_workflow.connect(reg, 'outputspec.T1_file', reg, 'inputspec.T1_file')

    # temporal filtering
    all_calcarine_reward_workflow.connect(motion_proc,
                                          'outputspec.motion_corrected_files',
                                          sgfilter, 'in_file')

    # node for percent signal change
    all_calcarine_reward_workflow.connect(input_node, 'psc_func', psc, 'func')
    all_calcarine_reward_workflow.connect(sgfilter, 'out_file', psc, 'in_file')

    # connect filtering and psc results to output node
    all_calcarine_reward_workflow.connect(sgfilter, 'out_file', output_node,
                                          'temporal_filtered_files')
    all_calcarine_reward_workflow.connect(psc, 'out_file', output_node,
                                          'percent_signal_change_files')

    # retroicor functionality
    retr = create_retroicor_workflow(
        name='retroicor',
        order_or_timing=analysis_info['retroicor_order_or_timing'])

    # select those nii files with physio
    all_calcarine_reward_workflow.connect(datasource, 'func', physio_for_niis,
                                          'all_input_files')
    all_calcarine_reward_workflow.connect(datasource, 'physio',
                                          physio_for_niis, 'all_physio_files')
    all_calcarine_reward_workflow.connect(physio_for_niis, 'files_with_physio',
                                          retr, 'inputspec.in_files')

    all_calcarine_reward_workflow.connect(datasource, 'physio', retr,
                                          'inputspec.phys_files')
    all_calcarine_reward_workflow.connect(input_node, 'nr_dummies', retr,
                                          'inputspec.nr_dummies')
    all_calcarine_reward_workflow.connect(input_node, 'MB_factor', retr,
                                          'inputspec.MB_factor')
    all_calcarine_reward_workflow.connect(input_node, 'tr', retr,
                                          'inputspec.tr')
    all_calcarine_reward_workflow.connect(input_node, 'slice_direction', retr,
                                          'inputspec.slice_direction')
    all_calcarine_reward_workflow.connect(input_node, 'slice_timing', retr,
                                          'inputspec.slice_timing')
    all_calcarine_reward_workflow.connect(input_node, 'slice_order', retr,
                                          'inputspec.slice_order')
    all_calcarine_reward_workflow.connect(input_node, 'phys_sample_rate', retr,
                                          'inputspec.phys_sample_rate')

    # fit nuisances from retroicor
    # all_calcarine_reward_workflow.connect(retr, 'outputspec.evs', fit_nuis, 'slice_regressor_list')
    # select the relevant motion correction files, using selection function
    # all_calcarine_reward_workflow.connect(motion_proc, 'outputspec.extended_motion_correction_parameters', physio_for_mocos, 'all_input_files')
    # all_calcarine_reward_workflow.connect(datasource, 'physio', physio_for_mocos, 'all_physio_files')
    # all_calcarine_reward_workflow.connect(physio_for_mocos, 'files_with_physio', fit_nuis, 'vol_regressors')

    # all_calcarine_reward_workflow.connect(physio_for_niis, 'files_with_physio', fit_nuis, 'in_file')

    # surface-based label import in to EPI space
    masks_from_surface = create_masks_from_surface_workflow(
        name='masks_from_surface')
    masks_from_surface.inputs.inputspec.label_directory = 'retmap'
    masks_from_surface.inputs.inputspec.fill_thresh = 0.005
    masks_from_surface.inputs.inputspec.re = '*.label'

    all_calcarine_reward_workflow.connect(motion_proc,
                                          'outputspec.EPI_space_file',
                                          masks_from_surface,
                                          'inputspec.EPI_space_file')
    all_calcarine_reward_workflow.connect(input_node, 'output_directory',
                                          masks_from_surface,
                                          'inputspec.output_directory')
    all_calcarine_reward_workflow.connect(input_node, 'FS_subject_dir',
                                          masks_from_surface,
                                          'inputspec.freesurfer_subject_dir')
    all_calcarine_reward_workflow.connect(input_node, 'FS_ID',
                                          masks_from_surface,
                                          'inputspec.freesurfer_subject_ID')
    all_calcarine_reward_workflow.connect(reg, 'rename_register.out_file',
                                          masks_from_surface,
                                          'inputspec.reg_file')

    # ########################################################################################
    # # outputs via datasink
    # ########################################################################################

    all_calcarine_reward_workflow.connect(input_node, 'output_directory',
                                          datasink, 'base_directory')

    # # sink out events and eyelink files
    all_calcarine_reward_workflow.connect(pj, 'out_file', datasink, 'events')

    all_calcarine_reward_workflow.connect(sgfilter, 'out_file', datasink, 'tf')
    all_calcarine_reward_workflow.connect(psc, 'out_file', datasink, 'psc')

    all_calcarine_reward_workflow.connect(retr, 'outputspec.new_phys',
                                          datasink, 'phys.log')
    all_calcarine_reward_workflow.connect(retr, 'outputspec.fig_file',
                                          datasink, 'phys.figs')
    all_calcarine_reward_workflow.connect(retr, 'outputspec.evs', datasink,
                                          'phys.evs')

    # all_calcarine_reward_workflow.connect(fit_nuis, 'res_file', datasink, 'phys.res')
    # all_calcarine_reward_workflow.connect(fit_nuis, 'rsq_file', datasink, 'phys.rsq')
    # all_calcarine_reward_workflow.connect(fit_nuis, 'beta_file', datasink, 'phys.betas')

    all_calcarine_reward_workflow.connect(masks_from_surface,
                                          'outputspec.masks', datasink,
                                          'masks')

    all_calcarine_reward_workflow.connect(datasource, 'eye', datasink, 'eye')
    all_calcarine_reward_workflow.connect(edf_converter, 'hdf5_file', datasink,
                                          'eye.h5')
    all_calcarine_reward_workflow.connect(hdf_tsv_converter, 'tsv_file',
                                          datasink, 'eye.tsv')
    all_calcarine_reward_workflow.connect(behavior_tsv_converter, 'tsv_files',
                                          datasink, 'events.tsv')

    return all_calcarine_reward_workflow
Exemplo n.º 9
0
def create_extended_susan_workflow(name='extended_susan', separate_masks=True):

    input_node = pe.Node(IdentityInterface(fields=['in_file',
                                                   'fwhm',
                                                   'EPI_session_space',
                                                   'output_directory',
                                                   'sub_id']), name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(fields=['smoothed_files',
                                                              'mask',
                                                              'mean']), name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    # first link the workflow's output_directory into the datasink.

    esw = pe.Workflow(name=name)

    esw.connect(input_node, 'output_directory', datasink, 'base_directory')
    esw.connect(input_node, 'sub_id', datasink, 'container')

    meanfuncmask = pe.Node(interface=fsl.BET(mask=True,
                                             no_output=True,
                                             frac=0.3),
                           name='meanfuncmask')

    esw.connect(input_node, 'EPI_session_space', meanfuncmask, 'in_file')

    """
    Mask the functional runs with the extracted mask
    """

    maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet',
                                                   op_string='-mas'),
                          iterfield=['in_file'],
                          name='maskfunc')

    esw.connect(input_node, 'in_file', maskfunc, 'in_file')
    esw.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2')

    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """

    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    esw.connect(maskfunc, 'out_file', getthresh, 'in_file')

    """
    Threshold the first run of the functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')

    esw.connect(maskfunc, 'out_file', threshold, 'in_file')

    """
    Define a function to get 10% of the intensity
    """

    esw.connect(getthresh, ('out_stat', getthreshop), threshold, 'op_string')

    """
    Determine the median value of the functional runs using the mask
    """

    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    esw.connect(input_node, 'in_file', medianval, 'in_file')
    esw.connect(threshold, 'out_file', medianval, 'mask_file')

    """
    Dilate the mask
    """

    dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                     op_string='-dilF'),
                            iterfield=['in_file'],
                            name='dilatemask')
    esw.connect(threshold, 'out_file', dilatemask, 'in_file')
    esw.connect(dilatemask, 'out_file', output_node, 'mask')

    """
    Mask the motion corrected functional runs with the dilated mask
    """

    maskfunc2 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc2')
    esw.connect(input_node, 'in_file', maskfunc2, 'in_file')
    esw.connect(dilatemask, 'out_file', maskfunc2, 'in_file2')

    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask constituting the mean
    functional
    """

    smooth = create_susan_smooth(separate_masks=separate_masks)

    esw.connect(input_node, 'fwhm', smooth, 'inputnode.fwhm')
    esw.connect(maskfunc2, 'out_file', smooth, 'inputnode.in_files')
    esw.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file')

    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    esw.connect(smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')

    esw.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=Merge(2),
                         name='concat')
    esw.connect(maskfunc2, ('out_file', tolist), concatnode, 'in1')
    esw.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')

    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=Select(), name='select')

    esw.connect(concatnode, 'out', selectnode, 'inlist')

    esw.connect(input_node, ('fwhm', chooseindex), selectnode, 'index')
    esw.connect(selectnode, 'out', output_node, 'smoothed_files')

    """
    Scale the median value of the run is set to 10000
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    esw.connect(selectnode, 'out', meanscale, 'in_file')

    """
    Define a function to get the scaling factor for intensity normalization
    """

    esw.connect(medianval, ('out_stat', getmeanscale), meanscale, 'op_string')

    """
    Generate a mean functional image from the first run
    """

    meanfunc3 = pe.Node(interface=fsl.ImageMaths(op_string='-Tmean',
                                                 suffix='_mean'),
                        iterfield=['in_file'],
                        name='meanfunc3')

    esw.connect(meanscale, ('out_file', pickfirst), meanfunc3, 'in_file')
    esw.connect(meanfunc3, 'out_file', output_node, 'mean')

    # Datasink
    esw.connect(meanscale, 'out_file', datasink, 'filtering')
    esw.connect(selectnode, 'out', datasink, 'filtering.@smoothed')
    esw.connect(dilatemask, 'out_file', datasink, 'filtering.@mask')

    return esw
Exemplo n.º 10
0
def create_motion_correction_workflow(name='moco',
                                      method='AFNI',
                                      extend_moco_params=False):
    """uses sub-workflows to perform different registration steps.
    Requires fsl and freesurfer tools
    Parameters
    ----------
    name : string
        name of workflow

    Example
    -------
    >>> motion_correction_workflow = create_motion_correction_workflow('motion_correction_workflow')
    >>> motion_correction_workflow.inputs.inputspec.output_directory = '/data/project/raw/BIDS/sj_1/'
    >>> motion_correction_workflow.inputs.inputspec.in_files = ['sub-001.nii.gz','sub-002.nii.gz']
    >>> motion_correction_workflow.inputs.inputspec.which_file_is_EPI_space = 'middle'

    Inputs::
          inputspec.output_directory : directory in which to sink the result files
          inputspec.in_files : list of functional files
          inputspec.which_file_is_EPI_space : determines which file is the 'standard EPI space'
    Outputs::
           outputspec.EPI_space_file : standard EPI space file, one timepoint
           outputspec.motion_corrected_files : motion corrected files
           outputspec.motion_correction_plots : motion correction plots
           outputspec.motion_correction_parameters : motion correction parameters
    """

    ### NODES
    input_node = pe.Node(IdentityInterface(fields=[
        'in_files', 'output_directory', 'which_file_is_EPI_space', 'sub_id',
        'tr'
    ]),
                         name='inputspec')
    output_node = pe.Node(IdentityInterface(fields=([
        'motion_corrected_files', 'EPI_space_file', 'mask_EPI_space_file',
        'motion_correction_plots', 'motion_correction_parameters',
        'extended_motion_correction_parameters',
        'new_motion_correction_parameters'
    ])),
                          name='outputspec')

    ########################################################################################
    # Invariant nodes
    ########################################################################################

    EPI_file_selector_node = pe.Node(interface=EPI_file_selector,
                                     name='EPI_file_selector_node')
    mean_bold = pe.Node(interface=fsl.maths.MeanImage(dimension='T'),
                        name='mean_space')
    rename_mean_bold = pe.Node(niu.Rename(format_string='session_EPI_space',
                                          keep_ext=True),
                               name='rename_mean_bold')

    ########################################################################################
    # Workflow
    ########################################################################################

    motion_correction_workflow = pe.Workflow(name=name)
    motion_correction_workflow.connect(input_node, 'which_file_is_EPI_space',
                                       EPI_file_selector_node, 'which_file')
    motion_correction_workflow.connect(input_node, 'in_files',
                                       EPI_file_selector_node, 'in_files')

    ########################################################################################
    # outputs via datasink
    ########################################################################################
    datasink = pe.Node(nio.DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    # first link the workflow's output_directory into the datasink.
    motion_correction_workflow.connect(input_node, 'output_directory',
                                       datasink, 'base_directory')
    motion_correction_workflow.connect(input_node, 'sub_id', datasink,
                                       'container')

    ########################################################################################
    # FSL MCFlirt
    ########################################################################################
    # new approach, which should aid in the joint motion correction of
    # multiple sessions together, by pre-registering each run.
    # the strategy would be to, for each run, take the first TR
    # and FLIRT-align (6dof) it to the EPI_space file.
    # then we can use this as an --infile argument to mcflirt.

    if method == 'FSL':

        rename_motion_files = pe.MapNode(
            niu.Rename(keep_ext=False),
            name='rename_motion_files',
            iterfield=['in_file', 'format_string'])

        remove_niigz_ext = pe.MapNode(interface=Remove_extension,
                                      name='remove_niigz_ext',
                                      iterfield=['in_file'])

        motion_correct_EPI_space = pe.Node(interface=fsl.MCFLIRT(
            cost='normcorr', interpolation='sinc', mean_vol=True),
                                           name='motion_correct_EPI_space')

        motion_correct_all = pe.MapNode(interface=fsl.MCFLIRT(
            save_mats=True,
            save_plots=True,
            cost='normcorr',
            interpolation='sinc',
            stats_imgs=True),
                                        name='motion_correct_all',
                                        iterfield=['in_file'])

        plot_motion = pe.MapNode(
            interface=fsl.PlotMotionParams(in_source='fsl'),
            name='plot_motion',
            iterfield=['in_file'])

        if extend_moco_params:
            # make extend_motion_pars node here
            # extend_motion_pars = pe.MapNode(Function(input_names=['moco_par_file', 'tr'], output_names=['new_out_file', 'ext_out_file'],
            # function=_extend_motion_parameters), name='extend_motion_pars', iterfield = ['moco_par_file'])
            pass

        # create reference:
        motion_correction_workflow.connect(EPI_file_selector_node, 'out_file',
                                           motion_correct_EPI_space, 'in_file')
        motion_correction_workflow.connect(motion_correct_EPI_space,
                                           'out_file', mean_bold, 'in_file')
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           motion_correct_all, 'ref_file')

        # motion correction across runs
        motion_correction_workflow.connect(input_node, 'in_files',
                                           motion_correct_all, 'in_file')
        #motion_correction_workflow.connect(motion_correct_all, 'out_file', output_node, 'motion_corrected_files')
        # motion_correction_workflow.connect(motion_correct_all, 'par_file', extend_motion_pars, 'moco_par_file')
        # motion_correction_workflow.connect(input_node, 'tr', extend_motion_pars, 'tr')
        # motion_correction_workflow.connect(extend_motion_pars, 'ext_out_file', output_node, 'extended_motion_correction_parameters')
        # motion_correction_workflow.connect(extend_motion_pars, 'new_out_file', output_node, 'new_motion_correction_parameters')

        ########################################################################################
        # Plot the estimated motion parameters
        ########################################################################################

        # rename:
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           rename_mean_bold, 'in_file')
        motion_correction_workflow.connect(motion_correct_all, 'par_file',
                                           rename_motion_files, 'in_file')
        motion_correction_workflow.connect(motion_correct_all, 'par_file',
                                           remove_niigz_ext, 'in_file')
        motion_correction_workflow.connect(remove_niigz_ext, 'out_file',
                                           rename_motion_files,
                                           'format_string')

        # plots:
        plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
        motion_correction_workflow.connect(rename_motion_files, 'out_file',
                                           plot_motion, 'in_file')
        motion_correction_workflow.connect(plot_motion, 'out_file',
                                           output_node,
                                           'motion_correction_plots')

        # output node:
        motion_correction_workflow.connect(mean_bold, 'out_file', output_node,
                                           'EPI_space_file')
        motion_correction_workflow.connect(rename_motion_files, 'out_file',
                                           output_node,
                                           'motion_correction_parameters')
        motion_correction_workflow.connect(motion_correct_all, 'out_file',
                                           output_node,
                                           'motion_corrected_files')

        # datasink:
        motion_correction_workflow.connect(rename_mean_bold, 'out_file',
                                           datasink, 'reg')
        motion_correction_workflow.connect(motion_correct_all, 'out_file',
                                           datasink, 'mcf')
        motion_correction_workflow.connect(rename_motion_files, 'out_file',
                                           datasink, 'mcf.motion_pars')
        motion_correction_workflow.connect(plot_motion, 'out_file', datasink,
                                           'mcf.motion_plots')
        # motion_correction_workflow.connect(extend_motion_pars, 'ext_out_file', datasink, 'mcf.ext_motion_pars')
        # motion_correction_workflow.connect(extend_motion_pars, 'new_out_file', datasink, 'mcf.new_motion_pars')

    ########################################################################################
    # AFNI 3DVolReg
    ########################################################################################
    # for speed, we use AFNI's 3DVolReg brute-force.
    # this loses plotting of motion parameters but increases speed
    # we hold on to the same setup, first moco the selected run
    # and then moco everything to that image, but without the
    # intermediate FLIRT step.

    if method == 'AFNI':
        motion_correct_EPI_space = pe.Node(
            interface=afni.Volreg(
                outputtype='NIFTI_GZ',
                zpad=5,
                args=' -cubic '  # -twopass -Fourier
            ),
            name='motion_correct_EPI_space')

        motion_correct_all = pe.MapNode(
            interface=afni.Volreg(
                outputtype='NIFTI_GZ',
                zpad=5,
                args=' -cubic '  # -twopass
            ),
            name='motion_correct_all',
            iterfield=['in_file'])

        # for renaming *_volreg.nii.gz to *_mcf.nii.gz
        set_postfix_mcf = pe.MapNode(interface=Set_postfix,
                                     name='set_postfix_mcf',
                                     iterfield=['in_file'])
        set_postfix_mcf.inputs.postfix = 'mcf'

        rename_volreg = pe.MapNode(interface=Rename(keep_ext=True),
                                   name='rename_volreg',
                                   iterfield=['in_file', 'format_string'])

        # curate for moco between sessions
        motion_correction_workflow.connect(EPI_file_selector_node, 'out_file',
                                           motion_correct_EPI_space, 'in_file')
        motion_correction_workflow.connect(motion_correct_EPI_space,
                                           'out_file', mean_bold, 'in_file')

        # motion correction across runs
        motion_correction_workflow.connect(input_node, 'in_files',
                                           motion_correct_all, 'in_file')
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           motion_correct_all, 'basefile')
        # motion_correction_workflow.connect(mean_bold, 'out_file', motion_correct_all, 'rotparent')
        # motion_correction_workflow.connect(mean_bold, 'out_file', motion_correct_all, 'gridparent')

        # output node:
        motion_correction_workflow.connect(mean_bold, 'out_file', output_node,
                                           'EPI_space_file')
        motion_correction_workflow.connect(motion_correct_all, 'md1d_file',
                                           output_node,
                                           'max_displacement_info')
        motion_correction_workflow.connect(motion_correct_all, 'oned_file',
                                           output_node,
                                           'motion_correction_parameter_info')
        motion_correction_workflow.connect(
            motion_correct_all, 'oned_matrix_save', output_node,
            'motion_correction_parameter_matrix')
        motion_correction_workflow.connect(input_node, 'in_files',
                                           set_postfix_mcf, 'in_file')
        motion_correction_workflow.connect(set_postfix_mcf, 'out_file',
                                           rename_volreg, 'format_string')
        motion_correction_workflow.connect(motion_correct_all, 'out_file',
                                           rename_volreg, 'in_file')
        motion_correction_workflow.connect(rename_volreg, 'out_file',
                                           output_node,
                                           'motion_corrected_files')

        # datasink:
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           rename_mean_bold, 'in_file')
        motion_correction_workflow.connect(rename_mean_bold, 'out_file',
                                           datasink, 'reg')
        motion_correction_workflow.connect(rename_volreg, 'out_file', datasink,
                                           'mcf')
        motion_correction_workflow.connect(motion_correct_all, 'md1d_file',
                                           datasink,
                                           'mcf.max_displacement_info')
        motion_correction_workflow.connect(motion_correct_all, 'oned_file',
                                           datasink, 'mcf.parameter_info')
        motion_correction_workflow.connect(motion_correct_all,
                                           'oned_matrix_save', datasink,
                                           'mcf.motion_pars')

    return motion_correction_workflow
Exemplo n.º 11
0
def create_T1_to_standard_workflow(name='T1_to_standard', use_FS = True,
                                   do_fnirt = False, use_AFNI_ss=False):
    """Registers subject's T1 to standard space using FLIRT and FNIRT.
    Requires fsl tools
    Parameters
    ----------
    name : string
        name of workflow
    use_FS : bool
        whether to use freesurfer's T1
    Example
    -------
    >>> T1_to_standard = create_T1_to_standard_workflow()
    >>> T1_to_standard.inputs.inputspec.T1_file = 'T1.nii.gz'
    >>> T1_to_standard.inputs.inputspec.standard_file = 'standard.nii.gz'
    >>> T1_to_standard.inputs.inputspec.freesurfer_subject_ID = 'sub_01'
    >>> T1_to_standard.inputs.inputspec.freesurfer_subject_dir = '$SUBJECTS_DIR'

    Inputs::
          inputspec.T1_file : T1 anatomy file
          inputspec.standard_file : MNI? standard file
          inputspec.freesurfer_subject_ID : FS subject ID
          inputspec.freesurfer_subject_dir : $SUBJECTS_DIR
    Outputs::
           outputspec.T1_MNI_file : T1 converted to standard
           outputspec.out_matrix_file : mat file specifying how to convert T1 to standard
           outputspec.out_inv_matrix_file : mat file specifying how to convert standard to T1
           outputspec.warp_field_file : FNIRT warp field
           outputspec.warp_fieldcoeff_file : FNIRT warp coeff field
           outputspec.warped_file : FNIRT warped T1
           outputspec.out_intensitymap_file : FNIRT intensity map

    """

    ### NODES
    input_node = pe.Node(IdentityInterface(
      fields=['freesurfer_subject_ID', 'freesurfer_subject_dir', 'T1_file', 'standard_file']), name='inputspec')

    # still have to choose which of these two output methods to use.

    datasink = pe.Node(nio.DataSink(), name='sinker')
    output_node = pe.Node(IdentityInterface(fields=['T1_standard_file', 
                    'T1_standard_matrix_file', 
                    'standard_T1_matrix_file',
                    'warp_field_file'
                    'warp_fieldcoeff_file',
                    'warped_file',
                    'modulatedref_file',
                    'out_intensitymap_file',
                    'T1_file'
                    ]), name='outputspec')

    # housekeeping function for finding T1 file in FS directory
    def FS_T1_file(freesurfer_subject_ID, freesurfer_subject_dir):
        import os.path as op
        return op.join(freesurfer_subject_dir, freesurfer_subject_ID, 'mri', 'T1.mgz')

    FS_T1_file_node = pe.Node(Function(input_names=('freesurfer_subject_ID', 'freesurfer_subject_dir'), output_names='T1_mgz_path',
                                     function=FS_T1_file), name='FS_T1_file_node')  

    T1_to_standard_workflow = pe.Workflow(name='T1_to_standard')

    # first link the workflow's output_directory into the datasink.
    # and immediately attempt to datasink the standard file
    T1_to_standard_workflow.connect(input_node, 'standard_file', datasink, 'reg.feat.standard.@nii.@gz')

    ########################################################################################
    # create FLIRT/FNIRT nodes
    ########################################################################################
    if use_AFNI_ss:
        bet_N = pe.Node(interface=SkullStrip(args='-orig_vol', outputtype='NIFTI_GZ'), name='bet_N_afni')
    else:
        bet_N = pe.Node(interface=fsl.BET(vertical_gradient = -0.1, functional=False, mask=True), name='bet_N_fsl')

    flirt_t2s = pe.Node(fsl.FLIRT(cost_func='normmi', output_type = 'NIFTI_GZ', dof = 12, interp = 'sinc'),
                        name='flirt_t2s')
    if do_fnirt: 
        fnirt_N = pe.Node(fsl.FNIRT(in_fwhm=[8, 4, 2, 2],
                              subsampling_scheme=[4, 2, 1, 1],
                              warp_resolution =(6, 6, 6),
                              output_type='NIFTI_GZ'),
                        name='fnirt_N')

    ########################################################################################
    # first take file from freesurfer subject directory, if necessary
    # in which case we assume that there is no T1_file at present and overwrite it
    ########################################################################################
    if use_FS: 
        mriConvert_N = pe.Node(freesurfer.MRIConvert(out_type = 'niigz'), 
                          name = 'mriConvert_N')

        T1_to_standard_workflow.connect(input_node, 'freesurfer_subject_ID', FS_T1_file_node, 'freesurfer_subject_ID')
        T1_to_standard_workflow.connect(input_node, 'freesurfer_subject_dir', FS_T1_file_node, 'freesurfer_subject_dir')

        T1_to_standard_workflow.connect(FS_T1_file_node, 'T1_mgz_path', mriConvert_N, 'in_file')

        # and these are input into the flirt and fnirt operators, as below.
        T1_to_standard_workflow.connect(mriConvert_N, 'out_file', bet_N, 'in_file')
        T1_to_standard_workflow.connect(bet_N, 'out_file', flirt_t2s, 'in_file')
        T1_to_standard_workflow.connect(mriConvert_N, 'out_file', output_node, 'T1_file')
        if do_fnirt:
            T1_to_standard_workflow.connect(bet_N, 'out_file', fnirt_N, 'in_file')

    else:
        T1_to_standard_workflow.connect(input_node, 'T1_file', bet_N, 'in_file')
        T1_to_standard_workflow.connect(bet_N, 'out_file', flirt_t2s, 'in_file')
        T1_to_standard_workflow.connect(input_node, 'T1_file', output_node, 'T1_file')

        if do_fnirt:
            T1_to_standard_workflow.connect(bet_N, 'out_file', fnirt_N, 'in_file')

    ########################################################################################
    # continue with FLIRT step
    ########################################################################################
    T1_to_standard_workflow.connect(input_node, 'standard_file', flirt_t2s, 'reference')

    T1_to_standard_workflow.connect(flirt_t2s, 'out_matrix_file', output_node, 'T1_standard_matrix_file')
    T1_to_standard_workflow.connect(flirt_t2s, 'out_file', output_node, 'T1_standard_file')

    ########################################################################################
    # invert step
    ########################################################################################
    invert_N = pe.Node(fsl.ConvertXFM(invert_xfm = True), name = 'invert_N')
    T1_to_standard_workflow.connect(flirt_t2s, 'out_matrix_file', invert_N, 'in_file')
    T1_to_standard_workflow.connect(invert_N, 'out_file', output_node, 'standard_T1_matrix_file')

    if do_fnirt:
        ########################################################################################
        # FNIRT step
        ########################################################################################

        T1_to_standard_workflow.connect(flirt_t2s, 'out_matrix_file', fnirt_N, 'affine_file')
        T1_to_standard_workflow.connect(input_node, 'standard_file', fnirt_N, 'ref_file')

        ########################################################################################
        # output node
        ########################################################################################

        T1_to_standard_workflow.connect(fnirt_N, 'field_file', output_node, 'warp_field_file')
        T1_to_standard_workflow.connect(fnirt_N, 'fieldcoeff_file', output_node, 'warp_fieldcoeff_file')
        T1_to_standard_workflow.connect(fnirt_N, 'warped_file', output_node, 'warped_file')
        T1_to_standard_workflow.connect(fnirt_N, 'modulatedref_file', output_node, 'modulatedref_file')
        T1_to_standard_workflow.connect(fnirt_N, 'out_intensitymap_file', output_node, 'out_intensitymap_file')

    return T1_to_standard_workflow
Exemplo n.º 12
0
def spikereg_workflow(SinkTag="func_preproc",
                      wf_name="data_censoring_despike"):
    """

    Description:
        Calculates volumes to be excluded, creates the despike regressor matrix

    Workflow inputs:
        :param FD: the frame wise displacement calculated by the MotionCorrecter.py script
        :param threshold: threshold of FD volumes which should be excluded
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..

    Workflow outputs:

        :return: spikereg_workflow - workflow

    Tamas Spisak
    [email protected]
    2018


    References
    ----------

    .. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
           but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
           2142-2154. doi:10.1016/j.neuroimage.2011.10.018

    .. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
           toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
           NeuroImage. doi:10.1016/j.neuroimage.2012.03.017

    .. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
           and accuratedef datacens_workflow(SinkTag="func_preproc", wf_name="data_censoring"):

    """
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.io as io
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Identitiy mapping for input variables
    inputspec = pe.Node(utility.IdentityInterface(fields=[
        'func',
        'FD',
        'threshold',
    ]),
                        name='inputspec')
    inputspec.inputs.threshold = 5

    #TODO_ready check CPAC.generate_motion_statistics.generate_motion_statistics script. It may use the FD of Jenkinson to index volumes which violate the upper threhold limit, no matter what we set.
    # - we use the power method to calculate FD
    # Determine the indices of the upper part (which is defined by the threshold, deafult 5%) of values based on their FD values
    calc_upprperc = pe.MapNode(utility.Function(
        input_names=['in_file', 'threshold'],
        output_names=[
            'frames_in_idx', 'frames_out_idx', 'percentFD', 'out_file', 'nvol'
        ],
        function=calculate_upperpercent),
                               iterfield=['in_file'],
                               name='calculate_upperpercent')

    #create despiking matrix, to be included into nuisance correction
    despike_matrix = pe.MapNode(utility.Function(
        input_names=['frames_excluded', 'total_vols'],
        output_names=['despike_mat'],
        function=create_despike_regressor_matrix),
                                iterfield=['frames_excluded', 'total_vols'],
                                name='create_despike_matrix')

    outputspec = pe.Node(
        utility.IdentityInterface(fields=['despike_mat', 'FD']),
        name='outputspec')

    # save data out with Datasink
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir

    #TODO_ready: some plot for qualitiy checking

    # Create workflow
    analysisflow = pe.Workflow(wf_name)
    ###Calculating mean Framewise Displacement (FD) as Power et al., 2012
    # Calculating frames to exclude and include after scrubbing
    analysisflow.connect(inputspec, 'FD', calc_upprperc, 'in_file')
    analysisflow.connect(inputspec, 'threshold', calc_upprperc, 'threshold')
    # Create the proper format for the scrubbing procedure
    analysisflow.connect(calc_upprperc, 'frames_out_idx', despike_matrix,
                         'frames_excluded')
    analysisflow.connect(calc_upprperc, 'nvol', despike_matrix, 'total_vols')
    analysisflow.connect(
        calc_upprperc, 'out_file', ds,
        'percentFD')  # TODO save this in separet folder for QC
    # Output
    analysisflow.connect(despike_matrix, 'despike_mat', outputspec,
                         'despike_mat')
    analysisflow.connect(inputspec, 'FD', outputspec, 'FD')
    return analysisflow
Exemplo n.º 13
0
def create_B0_workflow(name='b0_unwarping', scanner='philips'):
    """ Does B0 field unwarping

    Example
    -------
    >>> nipype_epicorrect = create_unwarping_workflow('unwarp',)
    >>> unwarp.inputs.input_node.in_file = 'subj1_run1_bold.nii.gz'
    >>> unwarp.inputs.input_node.fieldmap_mag = 'subj1_run1_mag.nii.gz'
    >>> unwarp.inputs.input_node.fieldmap_pha = 'subj1_run1_phas.nii.gz'
    >>> unwarp.inputs.input_node.wfs = 12.223
    >>> unwarp.inputs.input_node.epi_factor = 35.0
    >>> unwarp.inputs.input_node.acceleration = 3.0
    >>> unwarp.inputs.input_node.te_diff = 0.005
    >>> unwarp.inputs.input_node.phase_encoding_direction = 'y'
    >>> nipype_epicorrect.run()

    Inputs::
        input_node.in_file - Volume acquired with EPI sequence
        input_node.fieldmap_mag - Magnitude of the fieldmap
        input_node.fieldmap_pha - Phase difference of the fieldmap
        input_node.wfs - Water-fat-shift in pixels
        input_node.epi_factor - EPI factor
        input_node.acceleration - Acceleration factor used for EPI parallel imaging (SENSE)
        input_node.te_diff - Time difference between TE in seconds.
        input_node.phase_encoding_direction - Unwarp direction (default should be "y")
    Outputs::
        outputnode.epi_corrected
    """

    # Nodes:
    # ------

    # Define input and workflow:
    input_node = pe.Node(name='inputspec',
                         interface=IdentityInterface(fields=[
                             'in_files', 'fieldmap_mag', 'fieldmap_pha', 'wfs',
                             'epi_factor', 'acceleration', 'echo_spacing',
                             'te_diff', 'phase_encoding_direction'
                         ]))

    # Normalize phase difference of the fieldmap phase to be [-pi, pi)
    norm_pha = pe.Node(interface=Prepare_phasediff, name='normalize_phasediff')

    # Mask the magnitude of the fieldmap
    mask_mag = pe.Node(fsl.BET(mask=True), name='mask_magnitude')
    mask_mag_dil = pe.Node(interface=Dilate_mask, name='mask_dilate')

    # Unwrap fieldmap phase using FSL PRELUDE
    prelude = pe.Node(fsl.PRELUDE(process3d=True), name='phase_unwrap')

    # Convert unwrapped fieldmap phase to radials per second:
    radials_per_second = pe.Node(interface=Radials_per_second,
                                 name='radials_ps')

    # in case of SIEMENS scanner:
    prepare_fieldmap = pe.Node(PrepareFieldmap(), name='prepare_fieldmap')

    # Register unwrapped fieldmap (rad/s) to epi, using the magnitude of the fieldmap
    registration = pe.MapNode(fsl.FLIRT(bins=256,
                                        cost='corratio',
                                        dof=6,
                                        interp='trilinear',
                                        searchr_x=[-10, 10],
                                        searchr_y=[-10, 10],
                                        searchr_z=[-10, 10]),
                              iterfield=['reference'],
                              name='registration')

    # transform unwrapped fieldmap (rad/s)
    applyxfm = pe.MapNode(fsl.ApplyXFM(interp='trilinear'),
                          iterfield=['reference', 'in_matrix_file'],
                          name='apply_xfm')

    # compute effective echospacing:
    echo_spacing_philips = pe.Node(interface=Compute_echo_spacing_philips,
                                   name='echo_spacing_philips')
    echo_spacing_siemens = pe.Node(interface=Compute_echo_spacing_siemens,
                                   name='echo_spacing_siemens')
    te_diff_in_ms = pe.Node(interface=TE_diff_ms, name='te_diff_in_ms')

    # Unwarp with FSL Fugue
    fugue = pe.MapNode(interface=fsl.FUGUE(median_2dfilter=True),
                       iterfield=['in_file', 'unwarped_file', 'fmap_in_file'],
                       name='fugue')

    # Convert unwrapped fieldmap phase to radials per second:
    out_file = pe.MapNode(interface=Make_output_filename,
                          iterfield=['in_file'],
                          name='out_file')

    # Define output node
    outputnode = pe.Node(
        IdentityInterface(fields=['out_files', 'field_coefs']),
        name='outputspec')

    # Workflow:
    # ---------

    unwarp_workflow = pe.Workflow(name=name)
    unwarp_workflow.connect(input_node, 'in_files', out_file, 'in_file')

    # registration:
    unwarp_workflow.connect(input_node, 'fieldmap_mag', mask_mag, 'in_file')
    unwarp_workflow.connect(mask_mag, 'mask_file', mask_mag_dil, 'in_file')
    unwarp_workflow.connect(mask_mag, 'out_file', registration, 'in_file')
    unwarp_workflow.connect(input_node, 'in_files', registration, 'reference')

    if scanner == 'philips':

        # prepare fieldmap:
        unwarp_workflow.connect(input_node, 'fieldmap_pha', norm_pha,
                                'in_file')
        unwarp_workflow.connect(input_node, 'fieldmap_mag', prelude,
                                'magnitude_file')
        unwarp_workflow.connect(norm_pha, 'out_file', prelude, 'phase_file')
        unwarp_workflow.connect(mask_mag_dil, 'out_file', prelude, 'mask_file')
        unwarp_workflow.connect(prelude, 'unwrapped_phase_file',
                                radials_per_second, 'in_file')
        unwarp_workflow.connect(input_node, 'te_diff', radials_per_second,
                                'asym')

        # transform fieldmap:
        unwarp_workflow.connect(radials_per_second, 'out_file', applyxfm,
                                'in_file')
        unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm,
                                'in_matrix_file')
        unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')

        # compute echo spacing:
        unwarp_workflow.connect(input_node, 'wfs', echo_spacing_philips, 'wfs')
        unwarp_workflow.connect(input_node, 'epi_factor', echo_spacing_philips,
                                'epi_factor')
        unwarp_workflow.connect(input_node, 'acceleration',
                                echo_spacing_philips, 'acceleration')
        unwarp_workflow.connect(echo_spacing_philips, 'echo_spacing', fugue,
                                'dwell_time')

    elif scanner == 'siemens':

        unwarp_workflow.connect(input_node, 'te_diff', te_diff_in_ms,
                                'te_diff')

        # prepare fieldmap:
        unwarp_workflow.connect(mask_mag, 'out_file', prepare_fieldmap,
                                'in_magnitude')
        unwarp_workflow.connect(input_node, 'fieldmap_pha', prepare_fieldmap,
                                'in_phase')
        unwarp_workflow.connect(te_diff_in_ms, 'te_diff', prepare_fieldmap,
                                'delta_TE')

        # transform fieldmap:
        unwarp_workflow.connect(prepare_fieldmap, 'out_fieldmap', applyxfm,
                                'in_file')
        unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm,
                                'in_matrix_file')
        unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')

        # compute echo spacing:
        unwarp_workflow.connect(input_node, 'acceleration',
                                echo_spacing_siemens, 'acceleration')
        unwarp_workflow.connect(input_node, 'echo_spacing',
                                echo_spacing_siemens, 'echo_spacing')
        unwarp_workflow.connect(echo_spacing_siemens, 'echo_spacing', fugue,
                                'dwell_time')

    unwarp_workflow.connect(input_node, 'in_files', fugue, 'in_file')
    unwarp_workflow.connect(out_file, 'out_file', fugue, 'unwarped_file')
    unwarp_workflow.connect(applyxfm, 'out_file', fugue, 'fmap_in_file')
    unwarp_workflow.connect(input_node, 'te_diff', fugue, 'asym_se_time')
    unwarp_workflow.connect(input_node, 'phase_encoding_direction', fugue,
                            'unwarp_direction')
    unwarp_workflow.connect(fugue, 'unwarped_file', outputnode, 'out_files')
    unwarp_workflow.connect(applyxfm, 'out_file', outputnode, 'field_coefs')

    # # Connect
    # unwarp_workflow.connect(input_node, 'in_files', out_file, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_pha', norm_pha, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_mag', mask_mag, 'in_file')
    # unwarp_workflow.connect(mask_mag, 'mask_file', mask_mag_dil, 'in_file')
    # unwarp_workflow.connect(input_node, 'fieldmap_mag', prelude, 'magnitude_file')
    # unwarp_workflow.connect(norm_pha, 'out_file', prelude, 'phase_file')
    # unwarp_workflow.connect(mask_mag_dil, 'out_file', prelude, 'mask_file')
    # unwarp_workflow.connect(prelude, 'unwrapped_phase_file', radials_per_second, 'in_file')
    # unwarp_workflow.connect(input_node, 'te_diff', radials_per_second, 'asym')
    # unwarp_workflow.connect(mask_mag, 'out_file', registration, 'in_file')
    # unwarp_workflow.connect(input_node, 'in_files', registration, 'reference')
    # unwarp_workflow.connect(radials_per_second, 'out_file', applyxfm, 'in_file')
    # unwarp_workflow.connect(registration, 'out_matrix_file', applyxfm, 'in_matrix_file')
    # unwarp_workflow.connect(input_node, 'in_files', applyxfm, 'reference')
    # if compute_echo_spacing:
    #     unwarp_workflow.connect(input_node, 'wfs', echo_spacing, 'wfs')
    #     unwarp_workflow.connect(input_node, 'epi_factor', echo_spacing, 'epi_factor')
    #     unwarp_workflow.connect(input_node, 'acceleration', echo_spacing, 'acceleration')
    #     unwarp_workflow.connect(echo_spacing, 'echo_spacing', fugue, 'dwell_time')
    # else:
    #     unwarp_workflow.connect(input_node, 'echo_spacing', fugue, 'dwell_time')
    # unwarp_workflow.connect(input_node, 'in_files', fugue, 'in_file')
    # unwarp_workflow.connect(out_file, 'out_file', fugue, 'unwarped_file')
    # unwarp_workflow.connect(applyxfm, 'out_file', fugue, 'fmap_in_file')
    # unwarp_workflow.connect(input_node, 'te_diff', fugue, 'asym_se_time')
    # unwarp_workflow.connect(input_node, 'phase_encoding_direction', fugue, 'unwarp_direction')
    # unwarp_workflow.connect(fugue, 'unwarped_file', outputnode, 'out_files')
    # unwarp_workflow.connect(applyxfm, 'out_file', outputnode, 'field_coefs')

    return unwarp_workflow
Exemplo n.º 14
0
def create_pupil_workflow(analysis_info, name='pupil'):
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink

    from utils.pupil import fit_FIR_pupil_files

    imports = [
        'from utils.behavior import behavior_timing',
        'from utils.plotting import plot_fir_results_unpredictable',
        'from utils.plotting import plot_fir_results_predictable',
        'from utils.plotting import plot_fir_results_variable',
    ]

    input_node = pe.Node(
        IdentityInterface(fields=['preprocessed_directory', 'sub_id']),
        name='inputspec')

    # i/o node
    datasource_templates = dict(
        all_roi_file='{sub_id}/h5/roi.h5',
        # predictable reward experiment needs behavior files and moco but no physio
        predictable_in_files='{sub_id}/psc/*-predictable_reward_*.nii.gz',
        predictable_behavior_tsv_files=
        '{sub_id}/events/tsv/*-predictable_reward_*.tsv',
        predictable_eye_h5_files='{sub_id}/eye/h5/*-predictable_reward_*.h5',
        # unpredictable reward experiment needs behavior files, moco and physio
        unpredictable_in_files='{sub_id}/psc/*-unpredictable_reward_*.nii.gz',
        unpredictable_behavior_tsv_files=
        '{sub_id}/events/tsv/*-unpredictable_reward_*.tsv',
        unpredictable_eye_h5_files=
        '{sub_id}/eye/h5/*-unpredictable_reward_*.h5',
        # variable reward experiment needs behavior files, moco and physio
        variable_in_files='{sub_id}/psc/*-variable_*_reward_*.nii.gz',
        variable_behavior_tsv_files=
        '{sub_id}/events/tsv/*-variable_*_reward_*.tsv',
        variable_eye_h5_files='{sub_id}/eye/h5/*-variable_*_reward_*.h5',
    )
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    predictable_pupil_FIR = pe.Node(Function(input_names=[
        'experiment', 'eye_h5_file_list', 'behavior_file_list', 'h5_file',
        'in_files', 'fir_frequency', 'fir_interval', 'data_type',
        'lost_signal_rate_threshold'
    ],
                                             output_names=['out_figures'],
                                             function=fit_FIR_pupil_files,
                                             imports=imports),
                                    name='predictable_pupil_FIR')
    predictable_pupil_FIR.inputs.fir_frequency = analysis_info[
        'pupil_fir_frequency']
    predictable_pupil_FIR.inputs.fir_interval = analysis_info[
        'pupil_fir_interval']
    predictable_pupil_FIR.inputs.experiment = 'predictable'
    predictable_pupil_FIR.inputs.data_type = analysis_info['pupil_data_type']
    predictable_pupil_FIR.inputs.lost_signal_rate_threshold = analysis_info[
        'pupil_lost_signal_rate_threshold']

    unpredictable_pupil_FIR = pe.Node(Function(input_names=[
        'experiment', 'eye_h5_file_list', 'behavior_file_list', 'h5_file',
        'in_files', 'fir_frequency', 'fir_interval', 'data_type',
        'lost_signal_rate_threshold'
    ],
                                               output_names=['out_figures'],
                                               function=fit_FIR_pupil_files,
                                               imports=imports),
                                      name='unpredictable_pupil_FIR')
    unpredictable_pupil_FIR.inputs.fir_frequency = analysis_info[
        'pupil_fir_frequency']
    unpredictable_pupil_FIR.inputs.fir_interval = analysis_info[
        'pupil_fir_interval']
    unpredictable_pupil_FIR.inputs.experiment = 'unpredictable'
    unpredictable_pupil_FIR.inputs.data_type = analysis_info['pupil_data_type']
    unpredictable_pupil_FIR.inputs.lost_signal_rate_threshold = analysis_info[
        'pupil_lost_signal_rate_threshold']

    variable_pupil_FIR = pe.Node(Function(input_names=[
        'experiment', 'eye_h5_file_list', 'behavior_file_list', 'h5_file',
        'in_files', 'fir_frequency', 'fir_interval', 'data_type',
        'lost_signal_rate_threshold'
    ],
                                          output_names=['out_figures'],
                                          function=fit_FIR_pupil_files,
                                          imports=imports),
                                 name='variable_pupil_FIR')
    variable_pupil_FIR.inputs.fir_frequency = analysis_info[
        'pupil_fir_frequency']
    variable_pupil_FIR.inputs.fir_interval = analysis_info[
        'pupil_fir_interval']
    variable_pupil_FIR.inputs.experiment = 'variable'
    variable_pupil_FIR.inputs.data_type = analysis_info['pupil_data_type']
    variable_pupil_FIR.inputs.lost_signal_rate_threshold = analysis_info[
        'pupil_lost_signal_rate_threshold']

    # the actual top-level workflow
    pupil_analysis_workflow = pe.Workflow(name=name)

    pupil_analysis_workflow.connect(input_node, 'preprocessed_directory',
                                    datasource, 'base_directory')
    pupil_analysis_workflow.connect(input_node, 'sub_id', datasource, 'sub_id')

    # variable reward pupil FIR
    pupil_analysis_workflow.connect(datasource, 'variable_eye_h5_files',
                                    variable_pupil_FIR, 'eye_h5_file_list')
    pupil_analysis_workflow.connect(datasource, 'variable_behavior_tsv_files',
                                    variable_pupil_FIR, 'behavior_file_list')
    pupil_analysis_workflow.connect(datasource, 'all_roi_file',
                                    variable_pupil_FIR, 'h5_file')
    pupil_analysis_workflow.connect(datasource, 'variable_in_files',
                                    variable_pupil_FIR, 'in_files')

    # predictable reward pupil FIR
    pupil_analysis_workflow.connect(datasource, 'predictable_eye_h5_files',
                                    predictable_pupil_FIR, 'eye_h5_file_list')
    pupil_analysis_workflow.connect(datasource,
                                    'predictable_behavior_tsv_files',
                                    predictable_pupil_FIR,
                                    'behavior_file_list')
    pupil_analysis_workflow.connect(datasource, 'all_roi_file',
                                    predictable_pupil_FIR, 'h5_file')
    pupil_analysis_workflow.connect(datasource, 'predictable_in_files',
                                    predictable_pupil_FIR, 'in_files')

    # unpredictable reward pupil FIR
    pupil_analysis_workflow.connect(datasource, 'unpredictable_eye_h5_files',
                                    unpredictable_pupil_FIR,
                                    'eye_h5_file_list')
    pupil_analysis_workflow.connect(datasource,
                                    'unpredictable_behavior_tsv_files',
                                    unpredictable_pupil_FIR,
                                    'behavior_file_list')
    pupil_analysis_workflow.connect(datasource, 'all_roi_file',
                                    unpredictable_pupil_FIR, 'h5_file')
    pupil_analysis_workflow.connect(datasource, 'unpredictable_in_files',
                                    unpredictable_pupil_FIR, 'in_files')

    # datasink
    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    pupil_analysis_workflow.connect(input_node, 'preprocessed_directory',
                                    datasink, 'base_directory')
    pupil_analysis_workflow.connect(input_node, 'sub_id', datasink,
                                    'container')

    pupil_analysis_workflow.connect(unpredictable_pupil_FIR, 'out_figures',
                                    datasink, 'pupil.@unpredictable_pupil_FIR')
    pupil_analysis_workflow.connect(predictable_pupil_FIR, 'out_figures',
                                    datasink, 'pupil.@predictable_pupil_FIR')
    pupil_analysis_workflow.connect(variable_pupil_FIR, 'out_figures',
                                    datasink, 'pupil.@variable_pupil_FIR')

    return pupil_analysis_workflow
Exemplo n.º 15
0
def datacens_workflow_threshold(SinkTag="func_preproc",
                                wf_name="data_censoring",
                                ex_before=1,
                                ex_after=2):
    """

        Modified version of CPAC.scrubbing.scrubbing +
                            CPAC.generate_motion_statistics.generate_motion_statistics +
                            CPAC.func_preproc.func_preproc

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/scrubbing/scrubbing.html`
    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/generate_motion_statistics/generate_motion_statistics.html`
    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/func_preproc/func_preproc.html`

    Description:
        Do the data censoring on the 4D functional data. First, it calculates the framewise displacement according to Power's method. Second, it
        indexes the volumes which FD is in the upper part in percent(determined by the threshold variable which is 5% by default). Thirdly, it excludes those volumes and one volume
        before and 2 volumes after the indexed volume. The workflow returns a 4D scrubbed functional data.

    Workflow inputs:
        :param func: The reoriented,motion occrected, nuissance removed and bandpass filtered functional file.
        :param FD: the frame wise displacement calculated by the MotionCorrecter.py script
        :param threshold: threshold of FD volumes which should be excluded
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..

    Workflow outputs:

        :return: datacens_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    References
    ----------

    .. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
           but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
           2142-2154. doi:10.1016/j.neuroimage.2011.10.018

    .. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
           toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
           NeuroImage. doi:10.1016/j.neuroimage.2012.03.017

    .. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
           and accurate linear registration and motion correction of brain images. Neuroimage 17, 825-841.

    """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.io as io
    import PUMI.utils.utils_convert as utils_convert
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Identitiy mapping for input variables
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func', 'FD', 'threshold']),
        name='inputspec')
    inputspec.inputs.threshold = 0.2  #mm

    #TODO_ready check CPAC.generate_motion_statistics.generate_motion_statistics script. It may use the FD of Jenkinson to index volumes which violate the upper threhold limit, no matter what we set.
    # - we use the power method to calculate FD
    above_thr = pe.MapNode(utility.Function(
        input_names=['in_file', 'threshold', 'frames_before', 'frames_after'],
        output_names=[
            'frames_in_idx', 'frames_out_idx', 'percentFD',
            'percent_scrubbed_file', 'fd_scrubbed_file', 'nvol'
        ],
        function=above_threshold),
                           iterfield=['in_file'],
                           name='above_threshold')
    above_thr.inputs.frames_before = ex_before
    above_thr.inputs.frames_after = ex_after

    # Save outputs which are important
    ds_fd_scrub = pe.Node(interface=io.DataSink(), name='ds_fd_scrub')
    ds_fd_scrub.inputs.base_directory = SinkDir
    ds_fd_scrub.inputs.regexp_substitutions = [("(\/)[^\/]*$",
                                                "FD_scrubbed.csv")]
    pop_perc_scrub = pe.Node(interface=utils_convert.List2TxtFileOpen,
                             name='pop_perc_scrub')

    # save data out with Datasink
    ds_pop_perc_scrub = pe.Node(interface=io.DataSink(),
                                name='ds_pop_perc_scrub')
    ds_pop_perc_scrub.inputs.regexp_substitutions = [
        ("(\/)[^\/]*$", "pop_percent_scrubbed.txt")
    ]
    ds_pop_perc_scrub.inputs.base_directory = SinkDir

    # Generate the weird input for the scrubbing procedure which is done in afni
    craft_scrub_input = pe.MapNode(
        utility.Function(input_names=['scrub_input', 'frames_in_1D_file'],
                         output_names=['scrub_input_string'],
                         function=get_indx),
        iterfield=['scrub_input', 'frames_in_1D_file'],
        name='scrubbing_craft_input_string')
    # Scrub the image
    scrubbed_preprocessed = pe.MapNode(utility.Function(
        input_names=['scrub_input'],
        output_names=['scrubbed_image'],
        function=scrub_image),
                                       iterfield=['scrub_input'],
                                       name='scrubbed_preprocessed')

    myqc = qc.timecourse2png("timeseries", tag="040_censored")

    outputspec = pe.Node(
        utility.IdentityInterface(fields=['scrubbed_image', 'FD_scrubbed']),
        name='outputspec')

    # save data out with Datasink
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir

    #TODO_ready: some plot for qualitiy checking

    # Create workflow
    analysisflow = pe.Workflow(wf_name)
    ###Calculating mean Framewise Displacement (FD) as Power et al., 2012
    # Calculating frames to exclude and include after scrubbing
    analysisflow.connect(inputspec, 'FD', above_thr, 'in_file')
    analysisflow.connect(inputspec, 'threshold', above_thr, 'threshold')
    # Create the proper format for the scrubbing procedure
    analysisflow.connect(above_thr, 'frames_in_idx', craft_scrub_input,
                         'frames_in_1D_file')
    analysisflow.connect(
        above_thr, 'percent_scrubbed_file', ds,
        'percentFD')  # TODO save this in separate folder for QC
    analysisflow.connect(inputspec, 'func', craft_scrub_input, 'scrub_input')
    # Do the scubbing
    analysisflow.connect(craft_scrub_input, 'scrub_input_string',
                         scrubbed_preprocessed, 'scrub_input')
    # Output
    analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', outputspec,
                         'scrubbed_image')
    analysisflow.connect(above_thr, 'fd_scrubbed_file', outputspec,
                         'FD_scrubbed')  #TODO_ready: scrub FD file, as well
    analysisflow.connect(above_thr, 'fd_scrubbed_file', ds_fd_scrub,
                         'FD_scrubbed')

    analysisflow.connect(above_thr, 'percent_scrubbed_file', pop_perc_scrub,
                         'in_list')
    analysisflow.connect(pop_perc_scrub, 'txt_file', ds_pop_perc_scrub, 'pop')

    # Save a few files
    analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', ds,
                         'scrubbed_image')
    #analysisflow.connect(above_thr, 'percentFD', ds, 'scrubbed_image.@numberofvols')
    analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', myqc,
                         'inputspec.func')

    return analysisflow
Exemplo n.º 16
0
def aroma_workflow(fwhm=0, # in mm
                SinkTag = "func_preproc", wf_name="ICA_AROMA"):

    """
   ICA AROMA method embedded into PUMI
   https://github.com/rhr-pruim/ICA-AROMA

    function input: fwhm: smoothing FWHM in mm. fwhm=0 means no smoothing

    Workflow inputs:
        :param mc_func: The reoriented and motion-corrected functional file.
        :param mc_params: motion parameters file from mcflirt
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..

    Workflow outputs:




        :return: aroma_workflow - workflow

    Tamas Spisak
    [email protected]
    2018


    """
    from nipype.interfaces.fsl import ICA_AROMA
    import nipype.pipeline as pe
    from nipype.interfaces import utility
    import nipype.interfaces.io as io
    import PUMI.utils.QC as qc
    from nipype.interfaces.fsl import Smooth
    import os
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of the workflow
    inputspec = pe.Node(utility.IdentityInterface(fields=['mc_func',
                                                          'mc_par',
                                                          'fnirt_warp_file',
                                                          'mat_file',
                                                          'mask',
                                                          'qc_mask'
                                                          ]),
                            name='inputspec')

    # build the actual pipeline
    if fwhm != 0:
        smoother = pe.MapNode(interface=Smooth(fwhm=fwhm),
                              iterfield=['in_file'],
                              name="smoother")
    myqc_before = qc.timecourse2png("timeseries", tag="1_original")

    aroma = pe.MapNode(interface=ICA_AROMA(denoise_type='both'),
                       iterfield=['in_file',
                                  'motion_parameters',
                                  'mat_file',
                                  'fnirt_warp_file',
                                  'mask'],
                       name="ICA_AROMA")
    aroma.inputs.denoise_type = 'both'
    aroma.inputs.out_dir = 'AROMA_out'

    myqc_after_nonaggr = qc.timecourse2png("timeseries", tag="2_nonaggressive")
    myqc_after_aggr = qc.timecourse2png("timeseries", tag="3_aggressive")  # put these in the same QC dir

    getMotICs=pe.MapNode(interface=Function(input_names=['aroma_dir'],
                                            output_names=['motion_ICs'],
                                            function=extract_motionICs),
                         iterfield=['aroma_dir'],
                         name="get_motion_ICs")

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(),
                 name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    ds_txt = pe.Node(interface=io.DataSink(),
                     name='ds_txt')
    ds_txt.inputs.base_directory = SinkDir
    ds_txt.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".txt")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=['aggr_denoised_file',
                                                           'nonaggr_denoised_file',
                                                           'motion_ICs',
                                                           'out_dir',
                                                           'fwhm']),
                         name='outputspec')
    outputspec.inputs.fwhm = fwhm

    analysisflow = pe.Workflow(name=wf_name)
    if fwhm != 0:
        analysisflow.connect(inputspec, 'mc_func', smoother, 'in_file')
        analysisflow.connect(smoother, 'smoothed_file', aroma, 'in_file')
        analysisflow.connect(smoother, 'smoothed_file', myqc_before, 'inputspec.func')
    else:
        analysisflow.connect(inputspec, 'mc_func', aroma, 'in_file')
        analysisflow.connect(inputspec, 'mc_func', myqc_before, 'inputspec.func')
    analysisflow.connect(inputspec, 'mc_par', aroma, 'motion_parameters')
    analysisflow.connect(inputspec, 'mat_file', aroma, 'mat_file')
    analysisflow.connect(inputspec, 'fnirt_warp_file', aroma, 'fnirt_warp_file')
    analysisflow.connect(inputspec, 'mask', aroma, 'mask')
    analysisflow.connect(aroma, 'out_dir', getMotICs, 'aroma_dir')
    analysisflow.connect(getMotICs, 'motion_ICs', ds_txt, 'motion_ICs')
    analysisflow.connect(aroma, 'aggr_denoised_file', ds_nii, 'AROMA_aggr_denoised')
    analysisflow.connect(aroma, 'nonaggr_denoised_file', ds_nii, 'AROMA_nonaggr_denoised')

    analysisflow.connect(inputspec, 'qc_mask', myqc_before, 'inputspec.mask')
    analysisflow.connect(aroma, 'aggr_denoised_file', myqc_after_aggr, 'inputspec.func')
    #analysisflow.connect(inputspec, 'qc_mask', myqc_after_aggr, 'inputspec.mask')
    analysisflow.connect(aroma, 'nonaggr_denoised_file', myqc_after_nonaggr, 'inputspec.func')
    #analysisflow.connect(inputspec, 'qc_mask', myqc_after_nonaggr, 'inputspec.mask')

    analysisflow.connect(aroma, 'aggr_denoised_file', outputspec, 'aggr_denoised_file')
    analysisflow.connect(aroma, 'nonaggr_denoised_file', outputspec, 'nonaggr_denoised_file')
    analysisflow.connect(aroma, 'out_dir', outputspec, 'out_dir')
    analysisflow.connect(getMotICs, 'motion_ICs', outputspec, 'motion_ICs')

    return analysisflow
Exemplo n.º 17
0
def create_epi_to_T1_workflow(name='epi_to_T1', use_FS=True, do_FAST=True):
    """Registers session's EPI space to subject's T1 space
    uses either FLIRT or, when a FS segmentation is present, BBRegister
    Requires fsl and freesurfer tools

    Parameters
    ----------
    name : string
        name of workflow
    use_FS : bool
        whether to use freesurfer's segmentation and BBRegister
    Example
    -------
    >>> epi_to_T1 = create_epi_to_T1_workflow('epi_to_T1', use_FS = True)
    >>> epi_to_T1.inputs.inputspec.EPI_space_file = 'example_Func.nii.gz'
    >>> epi_to_T1.inputs.inputspec.T1_file = 'T1.nii.gz'
    >>> epi_to_T1.inputs.inputspec.freesurfer_subject_ID = 'sub_01'
    >>> epi_to_T1.inputs.inputspec.freesurfer_subject_dir = '$SUBJECTS_DIR'
 
    Inputs::
          inputspec.T1_file : T1 anatomy file
          inputspec.EPI_space_file : EPI session file
          inputspec.freesurfer_subject_ID : FS subject ID
          inputspec.freesurfer_subject_dir : $SUBJECTS_DIR
    Outputs::
           outputspec.EPI_T1_register_file : BBRegister registration file that maps EPI space to T1
           outputspec.EPI_T1_matrix_file : FLIRT registration file that maps EPI space to T1
           outputspec.T1_EPI_matrix_file : FLIRT registration file that maps T1 space to EPI
    """

    input_node = pe.Node(IdentityInterface(fields=[
        'EPI_space_file', 'output_directory', 'freesurfer_subject_ID',
        'freesurfer_subject_dir', 'T1_file'
    ]),
                         name='inputspec')

    # Idea: also output FAST outputs for later use?
    output_node = pe.Node(IdentityInterface(fields=('EPI_T1_matrix_file',
                                                    'T1_EPI_matrix_file',
                                                    'EPI_T1_register_file')),
                          name='outputspec')

    epi_to_T1_workflow = pe.Workflow(name=name)

    if use_FS:  # do BBRegister
        bbregister_N = pe.Node(freesurfer.BBRegister(init='fsl',
                                                     contrast_type='t2',
                                                     out_fsl_file=True),
                               name='bbregister_N')

        epi_to_T1_workflow.connect(input_node, 'EPI_space_file', bbregister_N,
                                   'source_file')
        epi_to_T1_workflow.connect(input_node, 'freesurfer_subject_ID',
                                   bbregister_N, 'subject_id')
        epi_to_T1_workflow.connect(input_node, 'freesurfer_subject_dir',
                                   bbregister_N, 'subjects_dir')

        epi_to_T1_workflow.connect(bbregister_N, 'out_fsl_file', output_node,
                                   'EPI_T1_matrix_file')
        epi_to_T1_workflow.connect(bbregister_N, 'out_reg_file', output_node,
                                   'EPI_T1_register_file')

        # the final invert node
        invert_EPI_N = pe.Node(fsl.ConvertXFM(invert_xfm=True),
                               name='invert_EPI_N')
        epi_to_T1_workflow.connect(bbregister_N, 'out_fsl_file', invert_EPI_N,
                                   'in_file')
        epi_to_T1_workflow.connect(invert_EPI_N, 'out_file', output_node,
                                   'T1_EPI_matrix_file')

    else:  # do FAST + FLIRT

        flirt_e2t = pe.Node(fsl.FLIRT(cost_func='bbr',
                                      output_type='NIFTI_GZ',
                                      dof=12,
                                      interp='sinc'),
                            name='flirt_e2t')

        epi_to_T1_workflow.connect(input_node, 'EPI_space_file', flirt_e2t,
                                   'in_file')

        if do_FAST:
            fast = pe.Node(fsl.FAST(no_pve=True, img_type=1, segments=True),
                           name='fast')

            epi_to_T1_workflow.connect(input_node, 'T1_file', fast, 'in_files')
            epi_to_T1_workflow.connect(fast, ('tissue_class_files', pick_last),
                                       flirt_e2t, 'wm_seg')
        elif not do_FAST and flirt_e2t.inputs.cost_func == 'bbr':
            print(
                'You indicated not wanting to do FAST, but still wanting to do a'
                ' BBR epi-to-T1 registration. That is probably not going to work ...'
            )

        epi_to_T1_workflow.connect(input_node, 'T1_file', flirt_e2t,
                                   'reference')
        epi_to_T1_workflow.connect(flirt_e2t, 'out_matrix_file', output_node,
                                   'EPI_T1_matrix_file')

        # the final invert node
        invert_EPI_N = pe.Node(fsl.ConvertXFM(invert_xfm=True),
                               name='invert_EPI_N')
        epi_to_T1_workflow.connect(flirt_e2t, 'out_matrix_file', invert_EPI_N,
                                   'in_file')
        epi_to_T1_workflow.connect(invert_EPI_N, 'out_file', output_node,
                                   'T1_EPI_matrix_file')

    return epi_to_T1_workflow
Exemplo n.º 18
0
def create_whole_brain_GLM_workflow(analysis_info, name='GLM'):
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink

    from utils.GLM import fit_glm_nuisances_single_file, fit_FIR_nuisances_all_files
    imports = ['from utils.behavior import behavior_timing']

    input_node = pe.Node(
        IdentityInterface(fields=['preprocessed_directory', 'sub_id']),
        name='inputspec')

    # i/o node
    datasource_templates = dict(
        example_func='{sub_id}/reg/example_func.nii.gz',
        # predictable experiment has no physiology
        predictable_mapper_in_file=
        '{sub_id}/psc/*-predictable_mapper_1_*.nii.gz',
        predictable_mapper_tsv_file=
        '{sub_id}/events/tsv/*-predictable_mapper_1_*.tsv',
        predictable_mapper_mcf_par=
        '{sub_id}/mcf/ext_motion_pars/*-predictable_mapper_1_*.par',
        # predictable reward experiment needs behavior files and moco but no physio
        predictable_in_files='{sub_id}/psc/*-predictable_reward_*.nii.gz',
        predictable_behavior_tsv_file=
        '{sub_id}/events/tsv/*-predictable_reward_*.tsv',
        predictable_mcf_pars=
        '{sub_id}/mcf/ext_motion_pars/*-predictable_reward_*.par',
        # unpredictable experiment has physiology but no behavior because: block design
        unpredictable_mapper_in_file=
        '{sub_id}/psc/*-unpredictable_mapper_1_*.nii.gz',
        unpredictable_mapper_physio_files=
        '{sub_id}/phys/evs/*-unpredictable_mapper_1_*.nii.gz',
        unpredictable_mapper_mcf_par=
        '{sub_id}/mcf/ext_motion_pars/*-unpredictable_mapper_1_*.par',
        # unpredictable reward experiment needs behavior files, moco and physio
        unpredictable_in_files='{sub_id}/psc/*-unpredictable_reward_*.nii.gz',
        unpredictable_behavior_tsv_file=
        '{sub_id}/events/tsv/*-unpredictable_reward_*.tsv',
        unpredictable_physio_files=
        '{sub_id}/phys/evs/*-unpredictable_reward_*.nii.gz',
        unpredictable_mcf_pars=
        '{sub_id}/mcf/ext_motion_pars/*-unpredictable_reward_*.par',
        # variable reward experiment needs behavior files, moco and physio
        variable_in_files='{sub_id}/psc/*-variable_*_reward_*.nii.gz',
        variable_behavior_tsv_file=
        '{sub_id}/events/tsv/*-variable_*_reward_*.tsv',
        variable_physio_files='{sub_id}/phys/evs/*-variable_*_reward_*.nii.gz',
        variable_mcf_pars=
        '{sub_id}/mcf/ext_motion_pars/*-variable_*_reward_*.par')
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    unpredictable_split_phys_list = pe.Node(
        Function(input_names=['slice_regressor_list', 'in_files'],
                 output_names=['slice_regressor_lists'],
                 function=sublists_for_phys),
        name='unpredictable_split_phys_list')

    variable_split_phys_list = pe.Node(Function(
        input_names=['slice_regressor_list', 'in_files'],
        output_names=['slice_regressor_lists'],
        function=sublists_for_phys),
                                       name='variable_split_phys_list')

    unpredictable_GLM = pe.Node(Function(
        input_names=[
            'in_file', 'slice_regressor_list', 'vol_regressors',
            'num_components', 'method', 'mapper', 'dm_upscale_factor',
            'tsv_behavior_file'
        ],
        output_names=['out_files'],
        function=fit_glm_nuisances_single_file),
                                name='unpredictable_GLM')
    unpredictable_GLM.inputs.mapper = 'unpredictable'
    unpredictable_GLM.inputs.num_components = 6
    unpredictable_GLM.inputs.method = 'PCA'
    unpredictable_GLM.inputs.dm_upscale_factor = 10

    predictable_GLM = pe.Node(Function(input_names=[
        'in_file', 'slice_regressor_list', 'vol_regressors', 'num_components',
        'method', 'mapper', 'dm_upscale_factor', 'tsv_behavior_file'
    ],
                                       output_names=['out_files'],
                                       function=fit_glm_nuisances_single_file),
                              name='predictable_GLM')
    predictable_GLM.inputs.mapper = 'predictable'
    predictable_GLM.inputs.num_components = 4  # no physio, just motion correction nuisances
    predictable_GLM.inputs.method = 'PCA'
    predictable_GLM.inputs.dm_upscale_factor = 10

    unpredictable_FIR = pe.Node(
        Function(input_names=[
            'experiment', 'example_func', 'in_files', 'slice_regressor_lists',
            'vol_regressor_list', 'behavior_file_list', 'fir_frequency',
            'fir_interval', 'num_components', 'method'
        ],
                 output_names=['out_files'],
                 function=fit_FIR_nuisances_all_files,
                 imports=imports),
        name='unpredictable_FIR',
    )
    unpredictable_FIR.inputs.fir_frequency = analysis_info['fir_frequency']
    unpredictable_FIR.inputs.fir_interval = analysis_info['fir_interval']
    unpredictable_FIR.inputs.num_components = 6
    unpredictable_FIR.inputs.method = 'PCA'
    unpredictable_FIR.inputs.experiment = 'unpredictable'

    predictable_FIR = pe.Node(Function(input_names=[
        'experiment', 'example_func', 'in_files', 'slice_regressor_lists',
        'vol_regressor_list', 'behavior_file_list', 'fir_frequency',
        'fir_interval', 'num_components', 'method'
    ],
                                       output_names=['out_files'],
                                       function=fit_FIR_nuisances_all_files,
                                       imports=imports),
                              name='predictable_FIR')
    predictable_FIR.inputs.fir_frequency = analysis_info['fir_frequency']
    predictable_FIR.inputs.fir_interval = analysis_info['fir_interval']
    predictable_FIR.inputs.num_components = 6
    predictable_FIR.inputs.method = 'PCA'
    predictable_FIR.inputs.experiment = 'predictable'
    predictable_FIR.inputs.slice_regressor_lists = [[]]  # no physio regressors

    variable_FIR = pe.Node(Function(input_names=[
        'experiment', 'example_func', 'in_files', 'slice_regressor_lists',
        'vol_regressor_list', 'behavior_file_list', 'fir_frequency',
        'fir_interval', 'num_components', 'method'
    ],
                                    output_names=['out_files'],
                                    function=fit_FIR_nuisances_all_files,
                                    imports=imports),
                           name='variable_FIR')
    variable_FIR.inputs.fir_frequency = analysis_info['fir_frequency']
    variable_FIR.inputs.fir_interval = analysis_info['fir_interval']
    variable_FIR.inputs.num_components = 6
    variable_FIR.inputs.method = 'PCA'
    variable_FIR.inputs.experiment = 'variable'

    # the actual top-level workflow
    whole_brain_analysis_workflow = pe.Workflow(name=name)

    whole_brain_analysis_workflow.connect(input_node, 'preprocessed_directory',
                                          datasource, 'base_directory')
    whole_brain_analysis_workflow.connect(input_node, 'sub_id', datasource,
                                          'sub_id')

    # predictable mapper GLM
    whole_brain_analysis_workflow.connect(datasource,
                                          'predictable_mapper_in_file',
                                          predictable_GLM, 'in_file')
    whole_brain_analysis_workflow.connect(datasource,
                                          'predictable_mapper_mcf_par',
                                          predictable_GLM, 'vol_regressors')
    whole_brain_analysis_workflow.connect(datasource,
                                          'predictable_mapper_tsv_file',
                                          predictable_GLM, 'tsv_behavior_file')

    # predictable reward FIR
    whole_brain_analysis_workflow.connect(datasource, 'predictable_in_files',
                                          predictable_FIR, 'in_files')
    whole_brain_analysis_workflow.connect(datasource, 'predictable_mcf_pars',
                                          predictable_FIR,
                                          'vol_regressor_list')
    whole_brain_analysis_workflow.connect(datasource,
                                          'predictable_behavior_tsv_file',
                                          predictable_FIR,
                                          'behavior_file_list')
    whole_brain_analysis_workflow.connect(datasource, 'example_func',
                                          predictable_FIR, 'example_func')

    # unpredictable mapper GLM
    whole_brain_analysis_workflow.connect(datasource,
                                          'unpredictable_mapper_in_file',
                                          unpredictable_GLM, 'in_file')
    whole_brain_analysis_workflow.connect(datasource,
                                          'unpredictable_mapper_mcf_par',
                                          unpredictable_GLM, 'vol_regressors')
    whole_brain_analysis_workflow.connect(datasource,
                                          'unpredictable_mapper_physio_files',
                                          unpredictable_GLM,
                                          'slice_regressor_list')

    # unpredictable reward FIR; first split the 1D slice regressor list to 2D
    whole_brain_analysis_workflow.connect(datasource,
                                          'unpredictable_physio_files',
                                          unpredictable_split_phys_list,
                                          'slice_regressor_list')
    whole_brain_analysis_workflow.connect(datasource, 'unpredictable_in_files',
                                          unpredictable_split_phys_list,
                                          'in_files')
    whole_brain_analysis_workflow.connect(unpredictable_split_phys_list,
                                          'slice_regressor_lists',
                                          unpredictable_FIR,
                                          'slice_regressor_lists')

    whole_brain_analysis_workflow.connect(datasource, 'unpredictable_in_files',
                                          unpredictable_FIR, 'in_files')
    whole_brain_analysis_workflow.connect(datasource, 'unpredictable_mcf_pars',
                                          unpredictable_FIR,
                                          'vol_regressor_list')
    whole_brain_analysis_workflow.connect(datasource,
                                          'unpredictable_behavior_tsv_file',
                                          unpredictable_FIR,
                                          'behavior_file_list')
    whole_brain_analysis_workflow.connect(datasource, 'example_func',
                                          unpredictable_FIR, 'example_func')

    # variable reward FIR; first split the 1D slice regressor list to 2D
    whole_brain_analysis_workflow.connect(datasource, 'variable_physio_files',
                                          variable_split_phys_list,
                                          'slice_regressor_list')
    whole_brain_analysis_workflow.connect(datasource, 'variable_in_files',
                                          variable_split_phys_list, 'in_files')
    whole_brain_analysis_workflow.connect(variable_split_phys_list,
                                          'slice_regressor_lists',
                                          variable_FIR,
                                          'slice_regressor_lists')

    whole_brain_analysis_workflow.connect(datasource, 'variable_in_files',
                                          variable_FIR, 'in_files')
    whole_brain_analysis_workflow.connect(datasource, 'variable_mcf_pars',
                                          variable_FIR, 'vol_regressor_list')
    whole_brain_analysis_workflow.connect(datasource,
                                          'variable_behavior_tsv_file',
                                          variable_FIR, 'behavior_file_list')
    whole_brain_analysis_workflow.connect(datasource, 'example_func',
                                          variable_FIR, 'example_func')

    # datasink
    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    whole_brain_analysis_workflow.connect(input_node, 'preprocessed_directory',
                                          datasink, 'base_directory')
    whole_brain_analysis_workflow.connect(input_node, 'sub_id', datasink,
                                          'container')

    whole_brain_analysis_workflow.connect(predictable_GLM, 'out_files',
                                          datasink, 'GLM.@predictable_GLM')
    whole_brain_analysis_workflow.connect(predictable_FIR, 'out_files',
                                          datasink, 'GLM.@predictable_FIR')
    whole_brain_analysis_workflow.connect(unpredictable_GLM, 'out_files',
                                          datasink, 'GLM.@unpredictable_GLM')
    whole_brain_analysis_workflow.connect(unpredictable_FIR, 'out_files',
                                          datasink, 'GLM.@unpredictable_FIR')
    whole_brain_analysis_workflow.connect(variable_FIR, 'out_files', datasink,
                                          'GLM.@variable_FIR')

    return whole_brain_analysis_workflow
Exemplo n.º 19
0
def build_netmat(SinkTag="connectivity", wf_name="build_network"):
    ########################################################################
    # Extract timeseries
    ########################################################################

    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    from nipype.interfaces.utility import Function
    import nipype.interfaces.io as io
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    import os

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Identitiy mapping for input variables
    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'timeseries',  #contains labels
            'modules',  # optional
            'measure',
            'atlas'  # optional, only for plotting purposes
        ]),
        name='inputspec')
    inputspec.inputs.atlas = False  # default value
    inputspec.inputs.measure = "partial correlation"

    # This is not a map node, since it takes all the subject-level regional timseries in a list and does population-level modelling
    # if measure == "tangent"
    estimate_network_mtx = pe.Node(interface=Function(
        input_names=['timeseries_list', 'modules', 'measure'],
        output_names=['mean_mtx', 'subject_matrix_list'],
        function=netmat),
                                   name='estimate_network_mtx')

    matrix_qc_mean = qc.matrixQC("group_mean_matrix", tag=wf_name + "_")
    matrix_qc = qc.matrixQC("matrices", tag=wf_name)

    # Save outputs which are important
    ds_meanmat = pe.Node(interface=io.DataSink(), name='ds_meanmat')
    ds_meanmat.inputs.base_directory = SinkDir
    ds_meanmat.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")]

    # Save outputs which are important
    ds_mat = pe.Node(interface=io.DataSink(), name='ds_mats')
    ds_mat.inputs.base_directory = SinkDir
    ds_mat.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")]

    analysisflow = pe.Workflow(wf_name)
    analysisflow.connect(inputspec, 'timeseries', estimate_network_mtx,
                         'timeseries_list')
    analysisflow.connect(inputspec, 'measure', estimate_network_mtx, 'measure')

    analysisflow.connect(estimate_network_mtx, 'mean_mtx', ds_meanmat,
                         'mean_connectivity_mat')
    analysisflow.connect(estimate_network_mtx, 'subject_matrix_list', ds_mat,
                         'connectivity_matrices')

    analysisflow.connect(estimate_network_mtx, 'mean_mtx', matrix_qc_mean,
                         'inputspec.matrix_file')
    analysisflow.connect(inputspec, 'modules', matrix_qc_mean,
                         'inputspec.modules')
    analysisflow.connect(inputspec, 'atlas', matrix_qc_mean, 'inputspec.atlas')

    analysisflow.connect(estimate_network_mtx, 'subject_matrix_list',
                         matrix_qc, 'inputspec.matrix_file')
    analysisflow.connect(inputspec, 'modules', matrix_qc, 'inputspec.modules')
    analysisflow.connect(inputspec, 'atlas', matrix_qc, 'inputspec.atlas')

    return analysisflow
Exemplo n.º 20
0
def func2mni(stdreg,
             carpet_plot="",
             wf_name='func2mni',
             SinkTag="func_preproc"):
    """
    stdreg: either globals._RegType_.ANTS or globals._RegType_.FSL (do default value to make sure the user has to decide explicitly)

    Transaform 4D functional image to MNI space.

    carpet_plot: string specifying the tag parameter for carpet plot of the standardized MRI measurement
            (default is "": no carpet plot)
            if not "", inputs atlaslabels and confounds should be defined (it might work with defaults, though)

    Workflow inputs:
    :param func
    :param linear_reg_mtrx
    :param nonlinear_reg_mtrx
    :param reference_brain
    :param atlas (optional)
    :param confounds (optional)
    :param confound_names (optional)


    Workflow outputs:




        :return: anat2mni_workflow - workflow


        anat="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                      brain="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres_brain.nii.gz",


    Balint Kincses
    [email protected]
    2018


    """
    import os
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.ants as ants
    from nipype.interfaces.c3 import C3dAffineTool
    import PUMI.utils.globals as globals
    import PUMI.func_preproc.Onevol as onevol
    import PUMI.utils.QC as qc
    import nipype.interfaces.io as io
    from nipype.interfaces.utility import Function

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'func',
            'anat',  # only obligatory if stdreg==globals._RegType_.ANTS
            'linear_reg_mtrx',
            'nonlinear_reg_mtrx',
            'reference_brain',
            'atlas',
            'confounds',
            'confound_names'
        ]),
        name='inputspec')

    inputspec.inputs.atlas = globals._FSLDIR_ + '/data/atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr25-2mm.nii.gz'

    inputspec.inputs.reference_brain = globals._FSLDIR_ + "/data/standard/MNI152_T1_3mm_brain.nii.gz"  #3mm by default
    # TODO: this does not work with the iterfiled definition for ref_file below:
    # TODO: it should be sepcified in a function argument, whether it shopuld be iterated
    #TODO_ready: ANTS
    #TODO: make resampling voxel size for func parametrizable

    # apply transformation martices
    if stdreg == globals._RegType_.FSL:
        applywarp = pe.MapNode(interface=fsl.ApplyWarp(interp="spline", ),
                               iterfield=['in_file', 'field_file', 'premat'],
                               name='applywarp')
        myqc = qc.vol2png("func2mni", wf_name + "_FSL", overlayiterated=False)
        myqc.inputs.slicer.image_width = 500  # 500 # for the 2mm template
        myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 2mm template
    else:  #ANTs
        # source file for C3dAffineToolmust not be 4D, so we extract the one example vol
        myonevol = onevol.onevol_workflow()
        # concat premat and ants transform
        bbr2ants = pe.MapNode(
            interface=C3dAffineTool(fsl2ras=True, itk_transform=True),
            iterfield=['source_file', 'transform_file',
                       'reference_file'],  # output: 'itk_transform'
            name="bbr2ants")
        #concat trfs into a list
        trflist = pe.MapNode(interface=Function(
            input_names=['trf_first', 'trf_second'],
            output_names=['trflist'],
            function=transformlist),
                             iterfield=['trf_first', 'trf_second'],
                             name="collect_trf")

        applywarp = pe.MapNode(interface=ants.ApplyTransforms(
            interpolation="BSpline", input_image_type=3),
                               iterfield=['input_image', 'transforms'],
                               name='applywarp')
        myqc = qc.vol2png("func2mni",
                          wf_name + "_ANTS3",
                          overlayiterated=False)
        myqc.inputs.slicer.image_width = 500  # 500 # for the 2mm template
        myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 2mm template

    if carpet_plot:
        fmri_qc = qc.fMRI2QC("carpet_plots", tag=carpet_plot)

    outputspec = pe.Node(utility.IdentityInterface(fields=['func_std']),
                         name='outputspec')

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", wf_name + ".nii.gz")]

    analysisflow = pe.Workflow(wf_name)
    analysisflow.base_dir = '.'
    if stdreg == globals._RegType_.FSL:
        analysisflow.connect(inputspec, 'func', applywarp, 'in_file')
        analysisflow.connect(inputspec, 'linear_reg_mtrx', applywarp, 'premat')
        analysisflow.connect(inputspec, 'nonlinear_reg_mtrx', applywarp,
                             'field_file')
        analysisflow.connect(inputspec, 'reference_brain', applywarp,
                             'ref_file')
        analysisflow.connect(applywarp, 'out_file', outputspec, 'func_std')
        analysisflow.connect(applywarp, 'out_file', myqc, 'inputspec.bg_image')
        analysisflow.connect(inputspec, 'reference_brain', myqc,
                             'inputspec.overlay_image')
        analysisflow.connect(applywarp, 'out_file', ds_nii, 'func2mni')
    else:  # ANTs
        analysisflow.connect(inputspec, 'func', myonevol, 'inputspec.func')
        analysisflow.connect(myonevol, 'outputspec.func1vol', bbr2ants,
                             'source_file')
        analysisflow.connect(inputspec, 'linear_reg_mtrx', bbr2ants,
                             'transform_file')
        analysisflow.connect(inputspec, 'anat', bbr2ants, 'reference_file')
        analysisflow.connect(bbr2ants, 'itk_transform', trflist, 'trf_first')
        analysisflow.connect(inputspec, 'nonlinear_reg_mtrx', trflist,
                             'trf_second')
        analysisflow.connect(trflist, 'trflist', applywarp, 'transforms')
        analysisflow.connect(inputspec, 'func', applywarp, 'input_image')
        analysisflow.connect(inputspec, 'reference_brain', applywarp,
                             'reference_image')

        analysisflow.connect(applywarp, 'output_image', outputspec, 'func_std')
        analysisflow.connect(applywarp, 'output_image', myqc,
                             'inputspec.bg_image')
        analysisflow.connect(inputspec, 'reference_brain', myqc,
                             'inputspec.overlay_image')
        analysisflow.connect(applywarp, 'output_image', ds_nii, 'func2mni')

    if carpet_plot:
        if stdreg == globals._RegType_.FSL:
            analysisflow.connect(applywarp, 'out_file', fmri_qc,
                                 'inputspec.func')
        else:  # ANTs
            analysisflow.connect(applywarp, 'output_image', fmri_qc,
                                 'inputspec.func')

        analysisflow.connect(inputspec, 'atlas', fmri_qc, 'inputspec.atlas')
        analysisflow.connect(inputspec, 'confounds', fmri_qc,
                             'inputspec.confounds')

    return analysisflow
Exemplo n.º 21
0
def create_VWM_anti_pp_workflow(analysis_info, name='VWM-anti'):
    """Summary
    
    Parameters
    ----------
    analysis_info : TYPE
        Description
    name : str, optional
        Description
    
    Returns
    -------
    TYPE
        Description
    """
    import os.path as op
    import nipype.pipeline as pe
    import tempfile
    import glob
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink
    from nipype.interfaces.ants import ApplyTransforms
    from bids.grabbids import BIDSLayout

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from spynoza.filtering.nodes import Savgol_filter, Savgol_filter_confounds
    from spynoza.conversion.nodes import psc
    from spynoza.utils import get_scaninfo, pickfirst
    from utils import mask_nii_2_hdf5, nistats_confound_glm, mask_to_tsv

    input_node = pe.Node(IdentityInterface(
        fields=['bids_directory', 'fmriprep_directory', 'output_directory', 'mask_directory', 'sub_id']), name='inputspec')

    BIDSNiiGrabber = pe.Node(Function(function=get_niftis, input_names=["subject_id",
                                                                        "data_dir", "task", "space"],
                                      output_names=["nii_files"]), name="BIDSNiiGrabber")
    BIDSNiiGrabber.inputs.space = 'mni'

    BIDSEventsGrabber = pe.Node(Function(function=get_events, input_names=["subject_id",
                                                                           "data_dir", "task"],
                                         output_names=["event_files"]), name="BIDSEventsGrabber")
    
    BIDSConfoundsGrabber = pe.Node(Function(function=get_confounds, input_names=["subject_id",
                                                                                 "data_dir", "task"],
                                            output_names=["confounds_tsv_files"]), name="BIDSConfoundsGrabber")
    
    MaskGrabber = pe.Node(Function(function=get_masks, input_names=["mask_directory"],
                                   output_names=["mask_files"]), name="MaskGrabber")

    HDF5PSCMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                     function=mask_nii_2_hdf5),
                            name='hdf5_psc_masker')
    HDF5PSCMasker.inputs.folder_alias = 'psc'
    HDF5PSCMasker.inputs.hdf5_file = op.join(tempfile.mkdtemp(), 'roi.h5')

    HDF5PSCNuisMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                         function=mask_nii_2_hdf5),
                                name='hdf5_psc_nuis_masker')
    HDF5PSCNuisMasker.inputs.folder_alias = 'psc_nuis'

    # HDF5StatsMasker = pe.Node(Function(input_names = ['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names = ['hdf5_file'],
    #                             function = mask_nii_2_hdf5),
    #                             name = 'hdf5_stats_masker')
    # HDF5StatsMasker.inputs.folder_alias = 'stats'

    HDF5ROIMasker = pe.Node(Function(input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'], output_names=['hdf5_file'],
                                     function=mask_nii_2_hdf5),
                            name='hdf5_roi_masker')
    HDF5ROIMasker.inputs.folder_alias = 'rois'

    ConfoundGLM = pe.MapNode(Function(input_names=['nifti_file', 'confounds_file', 'which_confounds'], output_names=['output_pdf', 'output_nifti'],
                                      function=nistats_confound_glm),
                             name='nistats_confound_glm', iterfield=["nifti_file", "confounds_file"])
    ConfoundGLM.inputs.which_confounds = analysis_info['nuisance_columns']

    # VolTransNode = pe.MapNode(interface=fsl.preprocess.ApplyXFM(apply_xfm=False, apply_isoxfm=True, interp='sinc'),
    #                                                     name='vol_trans', iterfield = ['in_file'])

    # VolTransNode = pe.MapNode(interface=ApplyTransforms(transforms='identity', interpolation='LanczosWindowedSinc'),
    #                                                     name='vol_trans', iterfield = ['input_image'])

    ThreshNode = pe.MapNode(fsl.Threshold(thresh=analysis_info['MNI_mask_threshold'], args='-bin', output_datatype='int'),
                            name='thresh', iterfield=['in_file'])

    TSVMasker = pe.MapNode(Function(input_names=['in_file', 'mask_files'], output_names=['out_file'],
                                 function=mask_to_tsv), iterfield=['in_file'],
                        name='tsv_masker')

    ROIResampler = pe.Node(Function(input_names=['mni_roi_files', 'mni_epi_space_file'], output_names=['output_roi_files'],
                                    function=resample_rois),
                           name='roi_resampler')

    sgfilter = pe.MapNode(interface=Savgol_filter,
                          name='sgfilter',
                          iterfield=['in_file'])
    sgfilter_confounds = pe.MapNode(interface=Savgol_filter_confounds,
                                    name='sgfilter_confounds',
                                    iterfield=['confounds'])

    # Both fmri data and nuisances are filtered with identical parameters
    sgfilter.inputs.polyorder = sgfilter_confounds.inputs.polyorder = analysis_info[
        'sgfilter_polyorder']
    sgfilter.inputs.deriv = sgfilter_confounds.inputs.deriv = analysis_info['sgfilter_deriv']
    sgfilter.inputs.window_length = sgfilter_confounds.inputs.window_length = analysis_info[
        'sgfilter_window_length']
    sgfilter.inputs.tr = sgfilter_confounds.inputs.tr = analysis_info['RepetitionTime']

    # set the psc function
    psc.inputs.func = analysis_info['psc_function']

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    ########################################################################################
    # workflow
    ########################################################################################

    # the actual top-level workflow
    VWM_anti_pp_workflow = pe.Workflow(name=name)

    # data source
    VWM_anti_pp_workflow.connect(
        input_node, 'bids_directory', BIDSEventsGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSEventsGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'fmriprep_directory', BIDSNiiGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSNiiGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'fmriprep_directory', BIDSConfoundsGrabber, 'data_dir')
    VWM_anti_pp_workflow.connect(input_node, 'sub_id',
                              BIDSConfoundsGrabber, 'subject_id')
    VWM_anti_pp_workflow.connect(
        input_node, 'mask_directory', MaskGrabber, 'mask_directory')

    # filter and psc
    VWM_anti_pp_workflow.connect(BIDSNiiGrabber, 'nii_files', sgfilter, 'in_file')
    VWM_anti_pp_workflow.connect(sgfilter, 'out_file', psc, 'in_file')
    # do the same filtering on confounds
    VWM_anti_pp_workflow.connect(BIDSConfoundsGrabber, 'confounds_tsv_files', sgfilter_confounds, 'confounds')

    # cleanup GLM
    VWM_anti_pp_workflow.connect(psc, 'out_file', ConfoundGLM, 'nifti_file')
    VWM_anti_pp_workflow.connect(
        sgfilter_confounds, 'out_file', ConfoundGLM, 'confounds_file')

    # preparing masks, ANTS and fsl not working correctly
    # ANTs
    # pearl_pp_workflow.connect(BIDSNiiGrabber, ('nii_files', pickfirst), VolTransNode, 'reference_image')
    # pearl_pp_workflow.connect(MaskGrabber, 'mask_files', VolTransNode, 'input_image')
    # fsl
    # pearl_pp_workflow.connect(BIDSNiiGrabber, ('nii_files', pickfirst), VolTransNode, 'reference')
    # pearl_pp_workflow.connect(MaskGrabber, 'mask_files', VolTransNode, 'in_file')
    # pearl_pp_workflow.connect(VolTransNode, 'output_image', ThreshNode, 'in_file')

    VWM_anti_pp_workflow.connect(
        BIDSNiiGrabber, ('nii_files', pickfirst), ROIResampler, 'mni_epi_space_file')
    VWM_anti_pp_workflow.connect(
        MaskGrabber, 'mask_files', ROIResampler, 'mni_roi_files')
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', ThreshNode, 'in_file')

    # masking data
    VWM_anti_pp_workflow.connect(psc, 'out_file', HDF5PSCMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5PSCMasker, 'mask_files')

    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', HDF5PSCNuisMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5PSCNuisMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        HDF5PSCMasker, 'hdf5_file', HDF5PSCNuisMasker, 'hdf5_file')

    # needs stats before we do a masker....
    # pearl_pp_workflow.connect(VolTransNode, 'out_file', HDF5StatsMasker, 'in_files')
    # pearl_pp_workflow.connect(ThreshNode, 'out_file', HDF5StatsMasker, 'mask_files')
    # pearl_pp_workflow.connect(HDF5PSCNuisMasker, 'hdf5_file', HDF5StatsMasker, 'hdf5_file')

    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', HDF5ROIMasker, 'in_files')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file',
                              HDF5ROIMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        HDF5PSCNuisMasker, 'hdf5_file', HDF5ROIMasker, 'hdf5_file')

    # mask to .tsv, for one timecourse per roi
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', TSVMasker, 'mask_files')
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', TSVMasker, 'in_file')

    # set up output folder
    VWM_anti_pp_workflow.connect(
        input_node, 'output_directory', datasink, 'base_directory')

    # connect all outputs to datasink
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_nifti', datasink, 'confound_glm')
    VWM_anti_pp_workflow.connect(
        BIDSEventsGrabber, 'event_files', datasink, 'events')
    VWM_anti_pp_workflow.connect(sgfilter, 'out_file', datasink, 'sg_filter')
    VWM_anti_pp_workflow.connect(
        sgfilter_confounds, 'out_file', datasink, 'sg_filter_confound')
    VWM_anti_pp_workflow.connect(psc, 'out_file', datasink, 'psc')
    VWM_anti_pp_workflow.connect(
        ROIResampler, 'output_roi_files', datasink, 'masks_f')
    VWM_anti_pp_workflow.connect(ThreshNode, 'out_file', datasink, 'masks_b')
    VWM_anti_pp_workflow.connect(TSVMasker, 'out_file', datasink, 'tsv')
    VWM_anti_pp_workflow.connect(HDF5PSCNuisMasker, 'hdf5_file', datasink, 'h5')
    VWM_anti_pp_workflow.connect(
        ConfoundGLM, 'output_pdf', datasink, 'confound_glm_report')

    return VWM_anti_pp_workflow
Exemplo n.º 22
0
def create_retroicor_workflow(name = 'retroicor', order_or_timing = 'order'):
    
    """
    
    Creates RETROICOR regressors
    
    Example
    -------
    
    Inputs::
        inputnode.in_file - The .log file acquired together with EPI sequence
    Outputs::
        outputnode.regressor_files
    """
    
    # Define nodes:
    input_node = pe.Node(niu.IdentityInterface(fields=['in_files',
                                                    'phys_files',
                                                    'nr_dummies',
                                                    'MB_factor', 
                                                    'tr',
                                                    'slice_direction',
                                                    'phys_sample_rate',
                                                    'slice_timing',
                                                    'slice_order',
                                                    'hr_rvt',
                                                    ]), name='inputspec')

    # the slice time preprocessing node before we go into popp (PreparePNM)
    slice_times_from_gradients = pe.MapNode(niu.Function(input_names=['in_file', 'phys_file', 'nr_dummies', 'MB_factor', 'sample_rate'], 
                        output_names=['out_file', 'fig_file'], 
                        function=_distill_slice_times_from_gradients), name='slice_times_from_gradients', iterfield = ['in_file','phys_file'])
    
    slice_times_to_txt_file = pe.Node(niu.Function(input_names=['slice_times'], 
                        output_names=['out_file'], 
                        function=_slice_times_to_txt_file), name='slice_times_to_txt_file')

    pnm_prefixer = pe.MapNode(niu.Function(input_names=['filename'], 
                        output_names=['out_string'], 
                        function=_preprocess_nii_files_to_pnm_evs_prefix), name='pnm_prefixer', iterfield = ['filename'])

    prepare_pnm = pe.MapNode(PreparePNM(), name='prepare_pnm', iterfield = ['in_file'])

    pnm_evs = pe.MapNode(PNMtoEVs(), name='pnm_evs', iterfield = ['functional_epi', 'cardiac', 'resp', 'hr', 'rvt', 'prefix'])

    # Define output node
    output_node = pe.Node(niu.IdentityInterface(fields=['new_phys', 'fig_file', 'evs']), name='outputspec')

    ########################################################################################
    # workflow
    ########################################################################################

    retroicor_workflow = pe.Workflow(name=name)
    
    # align phys-log data to nifti 
    retroicor_workflow.connect(input_node, 'in_files', slice_times_from_gradients, 'in_file')
    retroicor_workflow.connect(input_node, 'phys_files', slice_times_from_gradients, 'phys_file')
    retroicor_workflow.connect(input_node, 'nr_dummies', slice_times_from_gradients, 'nr_dummies')
    retroicor_workflow.connect(input_node, 'MB_factor', slice_times_from_gradients, 'MB_factor')
    retroicor_workflow.connect(input_node, 'phys_sample_rate', slice_times_from_gradients, 'sample_rate')

    # conditional here, for the creation of a separate slice timing file if order_or_timing is 'timing'
    # order_or_timing can also be 'order'
    if order_or_timing == 'timing':
        retroicor_workflow.connect(input_node, 'slice_timing', slice_times_to_txt_file, 'slice_times')
    
    # prepare pnm:
    retroicor_workflow.connect(input_node, 'phys_sample_rate', prepare_pnm, 'sampling_rate')
    retroicor_workflow.connect(input_node, 'tr', prepare_pnm, 'tr')
    retroicor_workflow.connect(slice_times_from_gradients, 'out_file', prepare_pnm, 'in_file')
    retroicor_workflow.connect(input_node, 'hr_rvt', prepare_pnm, 'hr_rvt')
    
    # pnm evs:
    retroicor_workflow.connect(input_node, 'in_files', pnm_prefixer, 'filename')
    retroicor_workflow.connect(pnm_prefixer, 'out_string', pnm_evs, 'prefix')
    retroicor_workflow.connect(input_node, 'in_files', pnm_evs, 'functional_epi')
    retroicor_workflow.connect(input_node, 'slice_direction', pnm_evs, 'slice_dir')
    retroicor_workflow.connect(input_node, 'tr', pnm_evs, 'tr')
    if order_or_timing ==   'timing':
        retroicor_workflow.connect(slice_times_to_txt_file, 'out_file', pnm_evs, 'slice_timing')
    elif order_or_timing == 'order':
        retroicor_workflow.connect(input_node, 'slice_order', pnm_evs, 'slice_order')
    retroicor_workflow.connect(prepare_pnm, 'card', pnm_evs, 'cardiac')
    retroicor_workflow.connect(prepare_pnm, 'resp', pnm_evs, 'resp')
    retroicor_workflow.connect(prepare_pnm, 'hr', pnm_evs, 'hr')
    retroicor_workflow.connect(prepare_pnm, 'rvt', pnm_evs, 'rvt')

    retroicor_workflow.connect(slice_times_from_gradients, 'out_file', output_node, 'new_phys')
    retroicor_workflow.connect(slice_times_from_gradients, 'fig_file', output_node, 'fig_file')
    retroicor_workflow.connect(pnm_evs, 'evs', output_node, 'evs')

    return retroicor_workflow
Exemplo n.º 23
0
def create_preprocessing_workflow(analysis_params, name='yesno_3T'):
    import os.path as op
    import nipype.pipeline as pe
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink
    from IPython import embed as shell

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from spynoza.utils import get_scaninfo, pickfirst, average_over_runs, set_nifti_intercept_slope
    from spynoza.uniformization.workflows import create_non_uniformity_correct_4D_file
    from spynoza.unwarping.b0.workflows import create_B0_workflow
    from spynoza.motion_correction.workflows import create_motion_correction_workflow
    from spynoza.registration.workflows import create_registration_workflow
    from spynoza.filtering.nodes import sgfilter
    from spynoza.conversion.nodes import psc
    from spynoza.denoising.retroicor.workflows import create_retroicor_workflow
    from spynoza.masking.workflows import create_masks_from_surface_workflow
    from spynoza.glm.nodes import fit_nuisances

    ########################################################################################
    # nodes
    ########################################################################################

    input_node = pe.Node(
        IdentityInterface(fields=[
            'task',  # main
            'sub_id',  # main
            'ses_id',  # main
            'raw_data_dir',  # main
            'output_directory',  # main
            'sub_FS_id',  # main
            'FS_subject_dir',  # motion correction
            'RepetitionTime',  # motion correction
            'which_file_is_EPI_space',  # motion correction
            'standard_file',  # registration
            'topup_conf_file',  # unwarping
            'EchoTimeDiff',  # unwarping
            'EpiFactor',  # unwarping
            'SenseFactor',  # unwarping
            'WaterFatShift',  # unwarping
            'PhaseEncodingDirection',  # unwarping
            'EchoSpacing'  # unwarping
            'psc_func',  # percent signal change
            'sg_filter_window_length',  # temporal filtering
            'sg_filter_order',  # temporal filtering
            'SliceEncodingDirection',  # retroicor
            'PhysiologySampleRate',  # retroicor
            'SliceTiming',  # retroicor
            'SliceOrder',  # retroicor
            'NumberDummyScans',  # retroicor
            'MultiBandFactor',  # retroicor
            'hr_rvt',  # retroicor
            'av_func',  # extra
            'EchoTime',  # extra
            'bd_design_matrix_file',  # extra
        ]),
        name='inputspec')

    for param in analysis_params:
        exec('input_node.inputs.{} = analysis_params[param]'.format(param))

    # i/o node
    datasource_templates = dict(
        func=
        '{sub_id}/{ses_id}/func/{sub_id}_{ses_id}_task-{task}*_bold.nii.gz',
        magnitude='{sub_id}/{ses_id}/fmap/{sub_id}_{ses_id}*magnitude.nii.gz',
        phasediff='{sub_id}/{ses_id}/fmap/{sub_id}_{ses_id}*phasediff.nii.gz',
        #physio='{sub_id}/{ses_id}/func/*{task}*physio.*',
        #events='{sub_id}/{ses_id}/func/*{task}*_events.pickle',
        #eye='{sub_id}/{ses_id}/func/*{task}*_eyedata.edf'
    )
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    output_node = pe.Node(IdentityInterface(
        fields=(['temporal_filtered_files', 'percent_signal_change_files'])),
                          name='outputspec')

    # nodes for setting the slope/intercept of incoming niftis to (1, 0)
    # this is apparently necessary for the B0 map files
    int_slope_B0_magnitude = pe.Node(Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=set_nifti_intercept_slope),
                                     name='int_slope_B0_magnitude')
    int_slope_B0_phasediff = pe.Node(Function(
        input_names=['in_file'],
        output_names=['out_file'],
        function=set_nifti_intercept_slope),
                                     name='int_slope_B0_phasediff')

    # reorient nodes
    reorient_epi = pe.MapNode(interface=fsl.Reorient2Std(),
                              name='reorient_epi',
                              iterfield=['in_file'])
    reorient_B0_magnitude = pe.Node(interface=fsl.Reorient2Std(),
                                    name='reorient_B0_magnitude')
    reorient_B0_phasediff = pe.Node(interface=fsl.Reorient2Std(),
                                    name='reorient_B0_phasediff')

    # bet_epi = pe.MapNode(interface=
    #     fsl.BET(frac=analysis_parameters['bet_f_value'], vertical_gradient = analysis_parameters['bet_g_value'],
    #             functional=True, mask = True), name='bet_epi', iterfield=['in_file'])

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    ########################################################################################
    # workflow
    ########################################################################################

    # the actual top-level workflow
    preprocessing_workflow = pe.Workflow(name=name)
    preprocessing_workflow.base_dir = op.join(analysis_params['base_dir'],
                                              'temp/')

    # data source
    preprocessing_workflow.connect(input_node, 'raw_data_dir', datasource,
                                   'base_directory')
    preprocessing_workflow.connect(input_node, 'sub_id', datasource, 'sub_id')
    preprocessing_workflow.connect(input_node, 'ses_id', datasource, 'ses_id')
    preprocessing_workflow.connect(input_node, 'task', datasource, 'task')

    # and data sink
    preprocessing_workflow.connect(input_node, 'output_directory', datasink,
                                   'base_directory')

    # BET (we don't do this, because we expect the raw data in the bids folder to be betted
    # already for anonymization purposes)
    # preprocessing_workflow.connect(datasource, 'func', bet_epi, 'in_file')

    # non-uniformity correction
    # preprocessing_workflow.connect(bet_epi, 'out_file', nuc, 'in_file')
    # preprocessing_workflow.connect(datasource, 'func', nuc, 'in_file')

    # reorient images
    preprocessing_workflow.connect(datasource, 'func', reorient_epi, 'in_file')
    preprocessing_workflow.connect(datasource, 'magnitude',
                                   reorient_B0_magnitude, 'in_file')
    preprocessing_workflow.connect(datasource, 'phasediff',
                                   reorient_B0_phasediff, 'in_file')
    preprocessing_workflow.connect(reorient_epi, 'out_file', datasink,
                                   'reorient')

    #B0 field correction:
    if analysis_params['B0_or_topup'] == 'B0':
        # set slope/intercept to unity for B0 map
        preprocessing_workflow.connect(reorient_B0_magnitude, 'out_file',
                                       int_slope_B0_magnitude, 'in_file')
        preprocessing_workflow.connect(reorient_B0_phasediff, 'out_file',
                                       int_slope_B0_phasediff, 'in_file')
        #B0 field correction:
        if 'EchoSpacing' in analysis_params:
            B0_wf = create_B0_workflow(name='B0', scanner='siemens')
            preprocessing_workflow.connect(input_node, 'EchoSpacing', B0_wf,
                                           'inputspec.echo_spacing')
        else:
            B0_wf = create_B0_workflow(name='B0', scanner='philips')
            preprocessing_workflow.connect(input_node, 'WaterFatShift', B0_wf,
                                           'inputspec.wfs')
            preprocessing_workflow.connect(input_node, 'EpiFactor', B0_wf,
                                           'inputspec.epi_factor')
        preprocessing_workflow.connect(input_node, 'SenseFactor', B0_wf,
                                       'inputspec.acceleration')
        preprocessing_workflow.connect(reorient_epi, 'out_file', B0_wf,
                                       'inputspec.in_files')
        preprocessing_workflow.connect(int_slope_B0_magnitude, 'out_file',
                                       B0_wf, 'inputspec.fieldmap_mag')
        preprocessing_workflow.connect(int_slope_B0_phasediff, 'out_file',
                                       B0_wf, 'inputspec.fieldmap_pha')
        preprocessing_workflow.connect(input_node, 'EchoTimeDiff', B0_wf,
                                       'inputspec.te_diff')
        preprocessing_workflow.connect(input_node, 'PhaseEncodingDirection',
                                       B0_wf,
                                       'inputspec.phase_encoding_direction')
        preprocessing_workflow.connect(B0_wf, 'outputspec.field_coefs',
                                       datasink, 'B0.fieldcoef')
        preprocessing_workflow.connect(B0_wf, 'outputspec.out_files', datasink,
                                       'B0')

    # motion correction
    motion_proc = create_motion_correction_workflow(
        'moco', method=analysis_params['moco_method'])
    if analysis_params['B0_or_topup'] == 'B0':
        preprocessing_workflow.connect(B0_wf, 'outputspec.out_files',
                                       motion_proc, 'inputspec.in_files')
    elif analysis_params['B0_or_topup'] == 'neither':
        preprocessing_workflow.connect(bet_epi, 'out_file', motion_proc,
                                       'inputspec.in_files')
    preprocessing_workflow.connect(input_node, 'RepetitionTime', motion_proc,
                                   'inputspec.tr')
    preprocessing_workflow.connect(input_node, 'output_directory', motion_proc,
                                   'inputspec.output_directory')
    preprocessing_workflow.connect(input_node, 'which_file_is_EPI_space',
                                   motion_proc,
                                   'inputspec.which_file_is_EPI_space')

    # registration
    reg = create_registration_workflow(analysis_params, name='reg')
    preprocessing_workflow.connect(input_node, 'output_directory', reg,
                                   'inputspec.output_directory')
    preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file',
                                   reg, 'inputspec.EPI_space_file')
    preprocessing_workflow.connect(input_node, 'sub_FS_id', reg,
                                   'inputspec.freesurfer_subject_ID')
    preprocessing_workflow.connect(input_node, 'FS_subject_dir', reg,
                                   'inputspec.freesurfer_subject_dir')
    preprocessing_workflow.connect(input_node, 'standard_file', reg,
                                   'inputspec.standard_file')

    # temporal filtering
    preprocessing_workflow.connect(input_node, 'sg_filter_window_length',
                                   sgfilter, 'window_length')
    preprocessing_workflow.connect(input_node, 'sg_filter_order', sgfilter,
                                   'polyorder')
    preprocessing_workflow.connect(motion_proc,
                                   'outputspec.motion_corrected_files',
                                   sgfilter, 'in_file')
    preprocessing_workflow.connect(sgfilter, 'out_file', datasink, 'tf')

    # node for percent signal change
    preprocessing_workflow.connect(input_node, 'psc_func', psc, 'func')
    preprocessing_workflow.connect(sgfilter, 'out_file', psc, 'in_file')
    preprocessing_workflow.connect(psc, 'out_file', datasink, 'psc')

    # # retroicor functionality
    # if analysis_params['perform_physio'] == 1:
    #     retr = create_retroicor_workflow(name = 'retroicor', order_or_timing = analysis_params['retroicor_order_or_timing'])
    #
    #     # # retroicor can take the crudest form of epi file, so that it proceeds quickly
    #     preprocessing_workflow.connect(datasource, 'func', retr, 'inputspec.in_files')
    #     preprocessing_workflow.connect(datasource, 'physio', retr, 'inputspec.phys_files')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.nr_dummies', retr, 'inputspec.nr_dummies')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.MultiBandFactor', retr, 'inputspec.MB_factor')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.tr', retr, 'inputspec.tr')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceEncodingDirection', retr, 'inputspec.slice_direction')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceTiming', retr, 'inputspec.slice_timing')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.SliceOrder', retr, 'inputspec.slice_order')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.PhysiologySampleRate', retr, 'inputspec.phys_sample_rate')
    #     preprocessing_workflow.connect(input_node, 'analysis_params.hr_rvt', retr, 'inputspec.hr_rvt')
    #
    #     # fit nuisances from retroicor
    #     # preprocessing_workflow.connect(retr, 'outputspec.evs', fit_nuis, 'slice_regressor_list')
    #     # preprocessing_workflow.connect(motion_proc, 'outputspec.extended_motion_correction_parameters', fit_nuis, 'vol_regressors')
    #     # preprocessing_workflow.connect(psc, 'out_file', fit_nuis, 'in_file')
    #
    #     # preprocessing_workflow.connect(fit_nuis, 'res_file', av_r, 'in_files')
    #
    #     preprocessing_workflow.connect(retr, 'outputspec.new_phys', datasink, 'phys.log')
    #     preprocessing_workflow.connect(retr, 'outputspec.fig_file', datasink, 'phys.figs')
    #     preprocessing_workflow.connect(retr, 'outputspec.evs', datasink, 'phys.evs')
    #     # preprocessing_workflow.connect(fit_nuis, 'res_file', datasink, 'phys.res')
    #     # preprocessing_workflow.connect(fit_nuis, 'rsq_file', datasink, 'phys.rsq')
    #     # preprocessing_workflow.connect(fit_nuis, 'beta_file', datasink, 'phys.betas')
    #
    #     # preprocessing_workflow.connect(av_r, 'out_file', datasink, 'av_r')

    #
    # ########################################################################################
    # # masking stuff if doing mri analysis
    # ########################################################################################
    #
    #     all_mask_opds = ['dc'] + analysis_parameters[u'avg_subject_RS_label_folders']
    #     all_mask_lds = [''] + analysis_parameters[u'avg_subject_RS_label_folders']
    #
    #     # loop across different folders to mask
    #     # untested as yet.
    #     masking_list = []
    #     dilate_list = []
    #     for opd, label_directory in zip(all_mask_opds,all_mask_lds):
    #         dilate_list.append(
    #             pe.MapNode(interface=fsl.maths.DilateImage(
    #                 operation = 'mean', kernel_shape = 'sphere', kernel_size = analysis_parameters['dilate_kernel_size']),
    #                 name='dilate_'+label_directory, iterfield=['in_file']))
    #
    #         masking_list.append(create_masks_from_surface_workflow(name = 'masks_from_surface_'+label_directory))
    #
    #         masking_list[-1].inputs.inputspec.label_directory = label_directory
    #         masking_list[-1].inputs.inputspec.fill_thresh = 0.005
    #         masking_list[-1].inputs.inputspec.re = '*.label'
    #
    #         preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file', masking_list[-1], 'inputspec.EPI_space_file')
    #         preprocessing_workflow.connect(input_node, 'output_directory', masking_list[-1], 'inputspec.output_directory')
    #         preprocessing_workflow.connect(input_node, 'FS_subject_dir', masking_list[-1], 'inputspec.freesurfer_subject_dir')
    #         preprocessing_workflow.connect(input_node, 'FS_ID', masking_list[-1], 'inputspec.freesurfer_subject_ID')
    #         preprocessing_workflow.connect(reg, 'rename_register.out_file', masking_list[-1], 'inputspec.reg_file')
    #
    #         preprocessing_workflow.connect(masking_list[-1], 'outputspec.masks', dilate_list[-1], 'in_file')
    #         preprocessing_workflow.connect(dilate_list[-1], 'out_file', datasink, 'masks.'+opd)
    #
    #     # # surface-based label import in to EPI space, but now for RS labels
    #     # these should have been imported to the subject's FS folder,
    #     # see scripts/annot_conversion.sh
    #     RS_masks_from_surface = create_masks_from_surface_workflow(name = 'RS_masks_from_surface')
    #     RS_masks_from_surface.inputs.inputspec.label_directory = analysis_parameters['avg_subject_label_folder']
    #     RS_masks_from_surface.inputs.inputspec.fill_thresh = 0.005
    #     RS_masks_from_surface.inputs.inputspec.re = '*.label'
    #
    #     preprocessing_workflow.connect(motion_proc, 'outputspec.EPI_space_file', RS_masks_from_surface, 'inputspec.EPI_space_file')
    #     preprocessing_workflow.connect(input_node, 'output_directory', RS_masks_from_surface, 'inputspec.output_directory')
    #     preprocessing_workflow.connect(input_node, 'FS_subject_dir', RS_masks_from_surface, 'inputspec.freesurfer_subject_dir')
    #     preprocessing_workflow.connect(input_node, 'FS_ID', RS_masks_from_surface, 'inputspec.freesurfer_subject_ID')
    #     preprocessing_workflow.connect(reg, 'rename_register.out_file', RS_masks_from_surface, 'inputspec.reg_file')
    #
    #     preprocessing_workflow.connect(RS_masks_from_surface, 'outputspec.masks', RS_dilate_cortex, 'in_file')
    #     preprocessing_workflow.connect(RS_dilate_cortex, 'out_file', datasink, 'masks.'+analysis_parameters['avg_subject_label_folder'])

    ########################################################################################
    # wrapping up, sending data to datasink
    ########################################################################################

    # preprocessing_workflow.connect(bet_epi, 'out_file', datasink, 'bet.epi')
    # preprocessing_workflow.connect(bet_epi, 'mask_file', datasink, 'bet.epimask')
    # preprocessing_workflow.connect(bet_topup, 'out_file', datasink, 'bet.topup')
    # preprocessing_workflow.connect(bet_topup, 'mask_file', datasink, 'bet.topupmask')

    # preprocessing_workflow.connect(nuc, 'out_file', datasink, 'nuc')
    # preprocessing_workflow.connect(sgfilter, 'out_file', datasink, 'tf')
    # preprocessing_workflow.connect(psc, 'out_file', datasink, 'psc')
    # preprocessing_workflow.connect(datasource, 'physio', datasink, 'phys')

    return preprocessing_workflow
Exemplo n.º 24
0
def create_compcor_workflow(name='compcor'):
    """ Creates A/T compcor workflow. """

    input_node = pe.Node(interface=IdentityInterface(fields=[
        'in_file', 'fast_files', 'highres2epi_mat', 'n_comp_tcompcor',
        'n_comp_acompcor', 'output_directory', 'sub_id'
    ]),
                         name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(
        fields=['tcompcor_file', 'acompcor_file', 'epi_mask']),
                          name='outputspec')

    extract_task = pe.MapNode(interface=Extract_task,
                              iterfield=['in_file'],
                              name='extract_task')

    rename_acompcor = pe.MapNode(interface=Rename(
        format_string='task-%(task)s_acompcor.tsv', keepext=True),
                                 iterfield=['task', 'in_file'],
                                 name='rename_acompcor')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    average_func = pe.MapNode(interface=fsl.maths.MeanImage(dimension='T'),
                              name='average_func',
                              iterfield=['in_file'])

    epi_mask = pe.MapNode(interface=fsl.BET(frac=.3,
                                            mask=True,
                                            no_output=True,
                                            robust=True),
                          iterfield=['in_file'],
                          name='epi_mask')

    wm2epi = pe.MapNode(fsl.ApplyXFM(interp='nearestneighbour'),
                        iterfield=['reference'],
                        name='wm2epi')

    csf2epi = pe.MapNode(fsl.ApplyXFM(interp='nearestneighbour'),
                         iterfield=['reference'],
                         name='csf2epi')

    erode_csf = pe.MapNode(interface=Erode_mask,
                           name='erode_csf',
                           iterfield=['epi_mask', 'in_file'])
    erode_csf.inputs.erosion_mm = 0
    erode_csf.inputs.epi_mask_erosion_mm = 30

    erode_wm = pe.MapNode(interface=Erode_mask,
                          name='erode_wm',
                          iterfield=['epi_mask', 'in_file'])

    erode_wm.inputs.erosion_mm = 6
    erode_wm.inputs.epi_mask_erosion_mm = 10

    merge_wm_and_csf_masks = pe.MapNode(Merge(2),
                                        name='merge_wm_and_csf_masks',
                                        iterfield=['in1', 'in2'])

    # This should be fit on the 30mm eroded mask from CSF
    tcompcor = pe.MapNode(TCompCor(components_file='tcomcor_comps.txt'),
                          iterfield=['realigned_file', 'mask_files'],
                          name='tcompcor')

    # WM + CSF mask
    acompcor = pe.MapNode(ACompCor(components_file='acompcor_comps.txt',
                                   merge_method='union'),
                          iterfield=['realigned_file', 'mask_files'],
                          name='acompcor')

    compcor_wf = pe.Workflow(name=name)
    compcor_wf.connect(input_node, 'in_file', extract_task, 'in_file')
    compcor_wf.connect(extract_task, 'task_name', rename_acompcor, 'task')
    compcor_wf.connect(acompcor, 'components_file', rename_acompcor, 'in_file')

    compcor_wf.connect(input_node, 'sub_id', datasink, 'container')
    compcor_wf.connect(input_node, 'output_directory', datasink,
                       'base_directory')

    compcor_wf.connect(input_node, ('fast_files', pick_wm), wm2epi, 'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', wm2epi, 'reference')
    compcor_wf.connect(input_node, 'highres2epi_mat', wm2epi, 'in_matrix_file')

    compcor_wf.connect(input_node, ('fast_files', pick_csf), csf2epi,
                       'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', csf2epi, 'reference')
    compcor_wf.connect(input_node, 'highres2epi_mat', csf2epi,
                       'in_matrix_file')

    compcor_wf.connect(input_node, 'n_comp_tcompcor', tcompcor,
                       'num_components')
    compcor_wf.connect(input_node, 'n_comp_acompcor', acompcor,
                       'num_components')

    compcor_wf.connect(input_node, 'in_file', average_func, 'in_file')
    compcor_wf.connect(average_func, 'out_file', epi_mask, 'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', erode_csf, 'epi_mask')
    compcor_wf.connect(epi_mask, 'mask_file', erode_wm, 'epi_mask')

    compcor_wf.connect(wm2epi, 'out_file', erode_wm, 'in_file')
    compcor_wf.connect(csf2epi, 'out_file', erode_csf, 'in_file')

    compcor_wf.connect(erode_wm, 'roi_eroded', merge_wm_and_csf_masks, 'in1')
    compcor_wf.connect(erode_csf, 'roi_eroded', merge_wm_and_csf_masks, 'in2')
    compcor_wf.connect(merge_wm_and_csf_masks, 'out', acompcor, 'mask_files')

    compcor_wf.connect(input_node, 'in_file', acompcor, 'realigned_file')
    compcor_wf.connect(input_node, 'in_file', tcompcor, 'realigned_file')
    compcor_wf.connect(erode_csf, 'epi_mask_eroded', tcompcor, 'mask_files')

    #compcor_wf.connect(tcompcor, 'components_file', output_node, 'acompcor_file')
    #compcor_wf.connect(acompcor, 'components_file', output_node, 'tcompcor_file')
    compcor_wf.connect(epi_mask, 'mask_file', output_node, 'epi_mask')

    compcor_wf.connect(rename_acompcor, 'out_file', datasink, 'acompcor_file')

    #compcor_wf.connect(tcompcor, 'components_file', combine_files, 'tcomp')
    #compcor_wf.connect(acompcor, 'components_file', combine_files, 'acomp')
    #compcor_wf.connect(combine_files, 'out_file', datasink, 'confounds')

    return compcor_wf
Exemplo n.º 25
0
def anat2mni_fsl_workflow(SinkTag="anat_preproc", wf_name="anat2mni_fsl"):
    """
    Modified version of CPAC.registration.registration:

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/registration/registration.html`


    Register skull and brain extracted image to MNI space and return the transformation martices.

    Workflow inputs:
        :param skull: The reoriented anatomical file.
        :param brain: The brain extracted anat.
        :param ref_skull: MNI152 skull file.
        :param ref_brain: MNI152 brain file.
        :param ref_mask: CSF mask of the MNI152 file.
        :param fnirt config: Parameters which specifies FNIRT options.
        :param SinkDir:
        :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

    Workflow outputs:




        :return: anat2mni_workflow - workflow


        anat="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                      brain="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres_brain.nii.gz",


    Balint Kincses
    [email protected]
    2018


    """

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of workflow
    inputspec = pe.Node(utility.IdentityInterface(fields=[
        'brain', 'skull', 'reference_brain', 'reference_skull', 'ref_mask',
        'fnirt_config'
    ]),
                        name='inputspec')

    inputspec.inputs.reference_brain = globals._FSLDIR_ + globals._brainref
    inputspec.inputs.reference_skull = globals._FSLDIR_ + globals._headref
    inputspec.inputs.ref_mask = globals._FSLDIR_ + globals._brainref_mask
    # inputspec.inputs.fnirt_config = "T1_2_MNI152_2mm"

    # Linear registration node
    linear_reg = pe.MapNode(interface=fsl.FLIRT(),
                            iterfield=['in_file'],
                            name='linear_reg_0')
    linear_reg.inputs.cost = 'corratio'

    # Non-linear registration node
    nonlinear_reg = pe.MapNode(interface=fsl.FNIRT(),
                               iterfield=['in_file', 'affine_file'],
                               name='nonlinear_reg_1')
    nonlinear_reg.inputs.fieldcoeff_file = True
    nonlinear_reg.inputs.jacobian_file = True

    # Applying warp field
    brain_warp = pe.MapNode(interface=fsl.ApplyWarp(),
                            iterfield=['in_file', 'field_file'],
                            name='brain_warp')

    # Calculate the invers of the linear transformation
    inv_flirt_xfm = pe.MapNode(interface=fsl.utils.ConvertXFM(),
                               iterfield=['in_file'],
                               name='inv_linear_reg0_xfm')
    inv_flirt_xfm.inputs.invert_xfm = True

    # Calculate inverse of the nonlinear warping field
    inv_fnirt_xfm = pe.MapNode(interface=fsl.utils.InvWarp(),
                               iterfield=['warp', 'reference'],
                               name="inv_nonlinear_xfm")

    # Create png images for quality check
    myqc = qc.vol2png("anat2mni", "FSL2", overlayiterated=False)
    myqc.inputs.inputspec.overlay_image = globals._FSLDIR_ + globals._brainref
    myqc.inputs.slicer.image_width = 500
    myqc.inputs.slicer.threshold_edges = 0.1

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'output_brain', 'linear_xfm', 'invlinear_xfm', 'nonlinear_xfm',
        'invnonlinear_xfm', 'std_template'
    ]),
                         name='outputspec')

    # Create workflow nad connect nodes
    analysisflow = pe.Workflow(name=wf_name)
    analysisflow.connect(inputspec, 'brain', linear_reg, 'in_file')
    analysisflow.connect(inputspec, 'reference_brain', linear_reg, 'reference')
    analysisflow.connect(inputspec, 'skull', nonlinear_reg, 'in_file')
    analysisflow.connect(inputspec, 'reference_skull', nonlinear_reg,
                         'ref_file')
    analysisflow.connect(inputspec, 'ref_mask', nonlinear_reg, 'refmask_file')
    # FNIRT parameters are specified by FSL config file
    # ${FSLDIR}/etc/flirtsch/TI_2_MNI152_2mm.cnf (or user-specified)
    analysisflow.connect(inputspec, 'fnirt_config', nonlinear_reg,
                         'config_file')
    analysisflow.connect(linear_reg, 'out_matrix_file', nonlinear_reg,
                         'affine_file')
    analysisflow.connect(nonlinear_reg, 'fieldcoeff_file', outputspec,
                         'nonlinear_xfm')
    analysisflow.connect(nonlinear_reg, 'field_file', outputspec, 'field_file')
    analysisflow.connect(inputspec, 'brain', brain_warp, 'in_file')
    analysisflow.connect(nonlinear_reg, 'fieldcoeff_file', brain_warp,
                         'field_file')
    analysisflow.connect(inputspec, 'reference_brain', brain_warp, 'ref_file')
    analysisflow.connect(brain_warp, 'out_file', outputspec, 'output_brain')
    analysisflow.connect(linear_reg, 'out_matrix_file', inv_flirt_xfm,
                         'in_file')
    analysisflow.connect(inv_flirt_xfm, 'out_file', outputspec,
                         'invlinear_xfm')

    analysisflow.connect(nonlinear_reg, 'fieldcoeff_file', inv_fnirt_xfm,
                         'warp')
    analysisflow.connect(inputspec, 'brain', inv_fnirt_xfm, 'reference')
    analysisflow.connect(inv_fnirt_xfm, 'inverse_warp', outputspec,
                         'invnonlinear_xfm')

    analysisflow.connect(linear_reg, 'out_matrix_file', outputspec,
                         'linear_xfm')
    analysisflow.connect(inputspec, 'reference_brain', outputspec,
                         'std_template')
    analysisflow.connect(brain_warp, 'out_file', ds, 'anat2mni_std')
    analysisflow.connect(nonlinear_reg, 'fieldcoeff_file', ds,
                         'anat2mni_warpfield')
    analysisflow.connect(brain_warp, 'out_file', myqc, 'inputspec.bg_image')

    return analysisflow
Exemplo n.º 26
0
def anat2mni_ants_workflow_nipype(SinkTag="anat_preproc",
                                  wf_name="anat2mni_ants"):
    """
    Register skull and brain extracted image to MNI space and return the transformation martices.
    Using ANTS, doing it in the nipype way.

    Workflow inputs:
        :param skull: The reoriented anatomical file.
        :param brain: The brain extracted anat.
        :param ref_skull: MNI152 skull file.
        :param ref_brain: MNI152 brain file.
        :param SinkDir:
        :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

    Workflow outputs:




        :return: anat2mni_workflow - workflow


        anat="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                      brain="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres_brain.nii.gz",


    Tamas Spisak
    [email protected]
    2018


    """
    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of workflow
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['brain', 'skull', 'reference_brain', 'reference_skull']),
                        name='inputspec')

    inputspec.inputs.reference_brain = globals._FSLDIR_ + globals._brainref  #TODO_ready: 1 or 2mm???
    inputspec.inputs.reference_skull = globals._FSLDIR_ + globals._headref

    # Multi-stage registration node with ANTS
    reg = pe.MapNode(
        interface=Registration(),
        iterfield=['moving_image'],  # 'moving_image_mask'],
        name="ANTS")
    """
    reg.inputs.transforms = ['Affine', 'SyN']
    reg.inputs.transform_parameters = [(2.0,), (0.1, 3.0, 0.0)]
    reg.inputs.number_of_iterations = [[1500, 200], [100, 50, 30]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = False
    reg.inputs.initialize_transforms_per_stage = False
    reg.inputs.metric = ['Mattes', 'Mattes']
    reg.inputs.metric_weight = [1] * 2  # Default (value ignored currently by ANTs)
    reg.inputs.radius_or_number_of_bins = [32] * 2
    reg.inputs.sampling_strategy = ['Random', None]
    reg.inputs.sampling_percentage = [0.05, None]
    reg.inputs.convergence_threshold = [1.e-8, 1.e-9]
    reg.inputs.convergence_window_size = [20] * 2
    reg.inputs.smoothing_sigmas = [[1, 0], [2, 1, 0]]
    reg.inputs.sigma_units = ['vox'] * 2
    reg.inputs.shrink_factors = [[2, 1], [4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True, True]
    reg.inputs.use_histogram_matching = [True, True]  # This is the default
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.winsorize_lower_quantile = 0.01
    reg.inputs.winsorize_upper_quantile = 0.99
    """

    #satra says:
    reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.2, 3.0, 0.0)]
    reg.inputs.number_of_iterations = ([[10000, 111110, 11110]] * 2 +
                                       [[100, 50, 30]])
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = True
    reg.inputs.initial_moving_transform_com = True
    reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 2 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 3
    reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 3
    reg.inputs.use_histogram_matching = [False] * 2 + [True]
    reg.inputs.winsorize_lower_quantile = 0.005
    reg.inputs.winsorize_upper_quantile = 0.995
    reg.inputs.args = '--float'

    # Create png images for quality check
    myqc = qc.vol2png("anat2mni", "ANTS3", overlayiterated=False)
    myqc.inputs.inputspec.overlay_image = globals._FSLDIR_ + globals._brainref  #TODO_ready: 1 or 2mm???
    myqc.inputs.slicer.image_width = 500  # 5000 # for the 1mm template
    myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 1mm template

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'output_brain', 'linear_xfm', 'invlinear_xfm', 'nonlinear_xfm',
        'invnonlinear_xfm', 'std_template'
    ]),
                         name='outputspec')

    outputspec.inputs.std_template = inputspec.inputs.reference_brain

    # Create workflow nad connect nodes
    analysisflow = pe.Workflow(name=wf_name)

    analysisflow.connect(inputspec, 'reference_skull', reg, 'fixed_image')
    #analysisflow.connect(inputspec, 'reference_brain', reg, 'fixed_image_mask')
    analysisflow.connect(inputspec, 'skull', reg, 'moving_image')
    #analysisflow.connect(inputspec, 'brain', reg, 'moving_image_mask')

    analysisflow.connect(reg, 'composite_transform', outputspec,
                         'nonlinear_xfm')
    analysisflow.connect(reg, 'inverse_composite_transform', outputspec,
                         'invnonlinear_xfm')
    analysisflow.connect(reg, 'warped_image', outputspec, 'output_brain')
    analysisflow.connect(reg, 'warped_image', ds, 'anat2mni_std')
    analysisflow.connect(reg, 'composite_transform', ds, 'anat2mni_warpfield')
    analysisflow.connect(reg, 'warped_image', myqc, 'inputspec.bg_image')

    return analysisflow
Exemplo n.º 27
0
def bbr_workflow(SinkTag="func_preproc", wf_name="func2anat"):
    """
        Modified version of CPAC.registration.registration:

        `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/registration/registration.html`


        BBR registration of functional image to anat.

        Workflow inputs:
            :param func: One volume of the 4D fMRI (The one which is the closest to the fieldmap recording in time should be chosen- e.g: if fieldmap was recorded after the fMRI the last volume of it should be chosen).
            :param skull: The oriented high res T1w image.
            :param anat_wm_segmentation: WM probability mask in .
            :param anat_csf_segmentation: CSF probability mask in
            :param bbr_schedule: Parameters which specifies BBR options.
            :param SinkDir:
            :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found.

        Workflow outputs:




            :return: bbreg_workflow - workflow
                func="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/s002/func_data.nii.gz",
                 skull="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                 anat_wm_segmentation="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/anat_preproc/fast/fast__prob_2.nii.gz",



        Balint Kincses
        [email protected]
        2018


        """
    import os
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.func_preproc.Onevol as onevol
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of the workflow
    inputspec = pe.Node(utility.IdentityInterface(fields=[
        'func', 'skull', 'anat_wm_segmentation', 'anat_gm_segmentation',
        'anat_csf_segmentation', 'anat_ventricle_segmentation'
    ]),
                        name='inputspec')

    myonevol = onevol.onevol_workflow()

    # trilinear interpolation is used by default in linear registration for func to anat
    linear_reg = pe.MapNode(interface=fsl.FLIRT(),
                            iterfield=['in_file', 'reference'],
                            name='linear_func_to_anat')
    linear_reg.inputs.cost = 'corratio'
    linear_reg.inputs.dof = 6
    linear_reg.inputs.out_matrix_file = "lin_mat"

    # WM probability map is thresholded and masked
    wm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='wm_bb_mask')
    wm_bb_mask.inputs.op_string = '-thr 0.5 -bin'
    # CSf probability map is thresholded and masked
    csf_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                             iterfield=['in_file'],
                             name='csf_bb_mask')
    csf_bb_mask.inputs.op_string = '-thr 0.5 -bin'

    # GM probability map is thresholded and masked
    gm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='gm_bb_mask')
    gm_bb_mask.inputs.op_string = '-thr 0.1 -bin'  # liberal mask to capture all gm signal

    # ventricle probability map is thresholded and masked
    vent_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                              iterfield=['in_file'],
                              name='vent_bb_mask')
    vent_bb_mask.inputs.op_string = '-thr 0.8 -bin -ero -dilM'  # stricter threshold and some morphology for compcor

    # add the CSF and WM masks
    #add_masks=pe.MapNode(interface=fsl.ImageMaths(),
    #                     iterfield=['in_file','in_file2'],
    #                     name='add_masks')
    #add_masks.inputs.op_string = ' -add'

    # A function is defined for define bbr argumentum which says flirt to perform bbr registration
    # for each element of the list, due to MapNode
    def bbreg_args(bbreg_target):
        return '-cost bbr -wmseg ' + bbreg_target

    bbreg_arg_convert = pe.MapNode(interface=Function(
        input_names=["bbreg_target"],
        output_names=["arg"],
        function=bbreg_args),
                                   iterfield=['bbreg_target'],
                                   name="bbr_arg_converter")

    # BBR registration within the FLIRT node
    bbreg_func_to_anat = pe.MapNode(
        interface=fsl.FLIRT(),
        iterfield=['in_file', 'reference', 'in_matrix_file', 'args'],
        name='bbreg_func_to_anat')
    bbreg_func_to_anat.inputs.dof = 6

    # calculate the inverse of the transformation matrix (of func to anat)
    convertmatrix = pe.MapNode(interface=fsl.ConvertXFM(),
                               iterfield=['in_file'],
                               name="convertmatrix")
    convertmatrix.inputs.invert_xfm = True

    # use the invers registration (anat to func) to transform anatomical csf mask
    reg_anatmask_to_func1 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func1')
    #reg_anatmask_to_func1.inputs.apply_xfm = True
    # use the invers registration (anat to func) to transform anatomical wm mask
    reg_anatmask_to_func2 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func2')
    #reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func3 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func3')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func4 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func4')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # Create png images for quality check
    myqc = qc.vol2png("func2anat")

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'func_sample2anat', 'example_func', 'func_to_anat_linear_xfm',
        'anat_to_func_linear_xfm', 'csf_mask_in_funcspace',
        'wm_mask_in_funcspace', 'gm_mask_in_funcspace',
        'ventricle_mask_in_funcspace'
    ]),
                         name='outputspec')

    analysisflow = pe.Workflow(name=wf_name)
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'func', myonevol, 'inputspec.func')
    analysisflow.connect(myonevol, 'outputspec.func1vol', linear_reg,
                         'in_file')
    analysisflow.connect(inputspec, 'skull', linear_reg, 'reference')
    analysisflow.connect(linear_reg, 'out_matrix_file', bbreg_func_to_anat,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol', bbreg_func_to_anat,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', bbreg_arg_convert,
                         'bbreg_target')
    analysisflow.connect(bbreg_arg_convert, 'arg', bbreg_func_to_anat, 'args')
    analysisflow.connect(inputspec, 'skull', bbreg_func_to_anat, 'reference')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', convertmatrix,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func1,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func1, 'reference')
    analysisflow.connect(csf_bb_mask, 'out_file', reg_anatmask_to_func1,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func2,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func2, 'reference')
    analysisflow.connect(wm_bb_mask, 'out_file', reg_anatmask_to_func2,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func3,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func3, 'reference')
    analysisflow.connect(gm_bb_mask, 'out_file', reg_anatmask_to_func3,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func4,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func4, 'reference')
    analysisflow.connect(vent_bb_mask, 'out_file', reg_anatmask_to_func4,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', wm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_csf_segmentation', csf_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_gm_segmentation', gm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_ventricle_segmentation',
                         vent_bb_mask, 'in_file')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', outputspec,
                         'func_sample2anat')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', outputspec,
                         'func_to_anat_linear_xfm')
    analysisflow.connect(reg_anatmask_to_func1, 'out_file', outputspec,
                         'csf_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func2, 'out_file', outputspec,
                         'wm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func3, 'out_file', outputspec,
                         'gm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func4, 'out_file', outputspec,
                         'ventricle_mask_in_funcspace')
    analysisflow.connect(myonevol, 'outputspec.func1vol', outputspec,
                         'example_func')
    analysisflow.connect(convertmatrix, 'out_file', outputspec,
                         'anat_to_func_linear_xfm')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', ds, "func2anat")
    analysisflow.connect(bbreg_func_to_anat, 'out_file', myqc,
                         'inputspec.bg_image')
    analysisflow.connect(wm_bb_mask, 'out_file', myqc,
                         'inputspec.overlay_image')

    return analysisflow
Exemplo n.º 28
0
def anat2mni_ants_workflow_harcoded(SinkTag="anat_preproc",
                                    wf_name="anat2mni_ants"):
    """
    Register skull and brain extracted image to MNI space and return the transformation martices.
    Using ANTS, doing it with a hardcoded function, a'la C-PAC.
    This uses brain masks and full head images, as well.

    Workflow inputs:
        :param skull: The reoriented anatomical file.
        :param brain: The brain extracted anat.
        :param ref_skull: MNI152 skull file.
        :param ref_brain: MNI152 brain file.
        :param SinkDir:
        :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

    Workflow outputs:




        :return: anat2mni_workflow - workflow


        anat="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                      brain="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres_brain.nii.gz",


    Tamas Spisak
    [email protected]
    2018


    """
    from nipype.interfaces.utility import Function

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of workflow
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['brain', 'skull', 'reference_brain', 'reference_skull']),
                        name='inputspec')

    inputspec.inputs.reference_brain = globals._FSLDIR_ + globals._brainref  #TODO_ready: 1 or 2mm???
    inputspec.inputs.reference_skull = globals._FSLDIR_ + globals._headref

    # Multi-stage registration node with ANTS
    reg = pe.MapNode(interface=Function(input_names=[
        'anatomical_brain', 'reference_brain', 'anatomical_skull',
        'reference_skull'
    ],
                                        output_names=[
                                            'transform_composite',
                                            'transform_inverse_composite',
                                            'warped_image'
                                        ],
                                        function=hardcoded_reg_fast),
                     iterfield=['anatomical_brain', 'anatomical_skull'],
                     name="ANTS_hardcoded",
                     mem_gb=4.1)

    # Calculate linear transformation with FSL. This matrix has to be used in segmentation with fast if priors are set. (the default).
    # Linear registration node
    linear_reg = pe.MapNode(interface=fsl.FLIRT(),
                            iterfield=['in_file'],
                            name='linear_reg_0')
    linear_reg.inputs.cost = 'corratio'

    # Calculate the invers of the linear transformation
    inv_flirt_xfm = pe.MapNode(interface=fsl.utils.ConvertXFM(),
                               iterfield=['in_file'],
                               name='inv_linear_reg0_xfm')
    inv_flirt_xfm.inputs.invert_xfm = True

    #  # or hardcoded_reg_cpac

    # Create png images for quality check
    myqc = qc.vol2png("anat2mni", "ANTS", overlayiterated=False)
    myqc.inputs.inputspec.overlay_image = globals._FSLDIR_ + globals._brainref  #TODO_ready: 1 or 2mm???
    myqc.inputs.slicer.image_width = 500  # 5000 # for the 1mm template
    myqc.inputs.slicer.threshold_edges = 0.1  # 0.1  # for the 1mm template

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'output_brain', 'linear_xfm', 'invlinear_xfm', 'nonlinear_xfm',
        'invnonlinear_xfm', 'std_template'
    ]),
                         name='outputspec')

    outputspec.inputs.std_template = inputspec.inputs.reference_brain

    # Create workflow nad connect nodes
    analysisflow = pe.Workflow(name=wf_name)
    # FSL part for the transformation matrix
    analysisflow.connect(inputspec, 'brain', linear_reg, 'in_file')
    analysisflow.connect(inputspec, 'reference_brain', linear_reg, 'reference')
    analysisflow.connect(linear_reg, 'out_matrix_file', inv_flirt_xfm,
                         'in_file')
    analysisflow.connect(inv_flirt_xfm, 'out_file', outputspec,
                         'invlinear_xfm')

    analysisflow.connect(inputspec, 'reference_skull', reg, 'reference_skull')
    analysisflow.connect(inputspec, 'reference_brain', reg, 'reference_brain')
    analysisflow.connect(inputspec, 'skull', reg, 'anatomical_skull')
    analysisflow.connect(inputspec, 'brain', reg, 'anatomical_brain')

    analysisflow.connect(reg, 'transform_composite', outputspec,
                         'nonlinear_xfm')
    analysisflow.connect(reg, 'transform_inverse_composite', outputspec,
                         'invnonlinear_xfm')
    analysisflow.connect(reg, 'warped_image', outputspec, 'output_brain')
    analysisflow.connect(reg, 'warped_image', ds, 'anat2mni_std')
    analysisflow.connect(reg, 'transform_composite', ds, 'anat2mni_warpfield')
    analysisflow.connect(reg, 'warped_image', myqc, 'inputspec.bg_image')

    return analysisflow
Exemplo n.º 29
0
def create_all_calcarine_reward_2_h5_workflow(
        analysis_info, name='all_calcarine_reward_nii_2_h5'):
    import os.path as op
    import tempfile
    import nipype.pipeline as pe
    from nipype.interfaces import fsl
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from spynoza.nodes.utils import get_scaninfo, dyns_min_1, topup_scan_params, apply_scan_params
    from nipype.interfaces.io import SelectFiles, DataSink

    # Importing of custom nodes from spynoza packages; assumes that spynoza is installed:
    # pip install git+https://github.com/spinoza-centre/spynoza.git@develop
    from utils.utils import mask_nii_2_hdf5, combine_eye_hdfs_to_nii_hdf

    input_node = pe.Node(
        IdentityInterface(fields=['sub_id', 'preprocessed_data_dir']),
        name='inputspec')

    # i/o node
    datasource_templates = dict(mcf='{sub_id}/mcf/*.nii.gz',
                                psc='{sub_id}/psc/*.nii.gz',
                                tf='{sub_id}/tf/*.nii.gz',
                                GLM='{sub_id}/GLM/*.nii.gz',
                                eye='{sub_id}/eye/h5/*.h5',
                                rois='{sub_id}/roi/*_vol.nii.gz')
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False),
                         name='datasource')

    hdf5_psc_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                              name='hdf5_psc_masker')
    hdf5_psc_masker.inputs.folder_alias = 'psc'
    hdf5_psc_masker.inputs.hdf5_file = op.join(tempfile.mkdtemp(), 'roi.h5')

    hdf5_tf_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                             name='hdf5_tf_masker')
    hdf5_tf_masker.inputs.folder_alias = 'tf'
    hdf5_psc_masker.inputs.hdf5_file = op.join(tempfile.mkdtemp(), 'roi.h5')

    hdf5_mcf_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                              name='hdf5_mcf_masker')
    hdf5_mcf_masker.inputs.folder_alias = 'mcf'

    hdf5_GLM_masker = pe.Node(Function(
        input_names=['in_files', 'mask_files', 'hdf5_file', 'folder_alias'],
        output_names=['hdf5_file'],
        function=mask_nii_2_hdf5),
                              name='hdf5_GLM_masker')
    hdf5_GLM_masker.inputs.folder_alias = 'GLM'

    eye_hdfs_to_nii_masker = pe.Node(Function(
        input_names=['nii_hdf5_file', 'eye_hdf_filelist', 'new_alias'],
        output_names=['nii_hdf5_file'],
        function=combine_eye_hdfs_to_nii_hdf),
                                     name='eye_hdfs_to_nii_masker')
    eye_hdfs_to_nii_masker.inputs.new_alias = 'eye'

    # node for datasinking
    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    all_calcarine_reward_nii_2_h5_workflow = pe.Workflow(name=name)

    all_calcarine_reward_nii_2_h5_workflow.connect(input_node,
                                                   'preprocessed_data_dir',
                                                   datasink, 'base_directory')
    all_calcarine_reward_nii_2_h5_workflow.connect(input_node, 'sub_id',
                                                   datasink, 'container')

    all_calcarine_reward_nii_2_h5_workflow.connect(input_node,
                                                   'preprocessed_data_dir',
                                                   datasource,
                                                   'base_directory')
    all_calcarine_reward_nii_2_h5_workflow.connect(input_node, 'sub_id',
                                                   datasource, 'sub_id')

    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'psc',
                                                   hdf5_psc_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_psc_masker,
                                                   'mask_files')

    # the hdf5_file is created by the psc node, and then passed from masker to masker on into the datasink.
    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_psc_masker,
                                                   'hdf5_file', hdf5_tf_masker,
                                                   'hdf5_file')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'tf',
                                                   hdf5_tf_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_tf_masker,
                                                   'mask_files')

    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_tf_masker, 'hdf5_file',
                                                   hdf5_mcf_masker,
                                                   'hdf5_file')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'mcf',
                                                   hdf5_mcf_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_mcf_masker,
                                                   'mask_files')

    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'GLM',
                                                   hdf5_GLM_masker, 'in_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'rois',
                                                   hdf5_GLM_masker,
                                                   'mask_files')
    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_mcf_masker,
                                                   'hdf5_file',
                                                   hdf5_GLM_masker,
                                                   'hdf5_file')

    all_calcarine_reward_nii_2_h5_workflow.connect(hdf5_GLM_masker,
                                                   'hdf5_file',
                                                   eye_hdfs_to_nii_masker,
                                                   'nii_hdf5_file')
    all_calcarine_reward_nii_2_h5_workflow.connect(datasource, 'eye',
                                                   eye_hdfs_to_nii_masker,
                                                   'eye_hdf_filelist')

    all_calcarine_reward_nii_2_h5_workflow.connect(eye_hdfs_to_nii_masker,
                                                   'nii_hdf5_file', datasink,
                                                   'h5')

    return all_calcarine_reward_nii_2_h5_workflow
Exemplo n.º 30
0
def create_topup_workflow(analysis_info, name='topup'):

    ###########################################################################
    # NODES
    ###########################################################################

    input_node = pe.Node(IdentityInterface(fields=[
        'in_files', 'alt_files', 'conf_file', 'output_directory', 'echo_time',
        'phase_encoding_direction', 'epi_factor'
    ]),
                         name='inputspec')

    output_node = pe.Node(
        IdentityInterface(fields=['out_files', 'field_coefs']),
        name='outputspec')

    get_info = pe.MapNode(interface=Get_scaninfo,
                          name='get_scaninfo',
                          iterfield=['in_file'])

    dyns_min_1_node = pe.MapNode(interface=Dyns_min_1,
                                 name='dyns_min_1_node',
                                 iterfield=['dyns'])

    topup_scan_params_node = pe.Node(interface=Topup_scan_params,
                                     name='topup_scan_params')

    apply_scan_params_node = pe.MapNode(interface=Apply_scan_params,
                                        name='apply_scan_params',
                                        iterfield=['nr_trs'])

    PE_ref = pe.MapNode(fsl.ExtractROI(t_size=1),
                        name='PE_ref',
                        iterfield=['in_file', 't_min'])

    # hard-coded the timepoint for this node, no more need for alt_t.
    PE_alt = pe.MapNode(fsl.ExtractROI(t_min=0, t_size=1),
                        name='PE_alt',
                        iterfield=['in_file'])

    PE_comb = pe.MapNode(Merge(2), name='PE_list', iterfield=['in1', 'in2'])
    PE_merge = pe.MapNode(fsl.Merge(dimension='t'),
                          name='PE_merged',
                          iterfield=['in_files'])

    # implementing the contents of b02b0.cnf in the args,
    # while supplying an emtpy text file as a --config option
    # gets topup going on our server.
    topup_args = """--warpres=20,16,14,12,10,6,4,4,4
    --subsamp=1,1,1,1,1,1,1,1,1
    --fwhm=8,6,4,3,3,2,1,0,0
    --miter=5,5,5,5,5,10,10,20,20
    --lambda=0.005,0.001,0.0001,0.000015,0.000005,0.0000005,0.00000005,0.0000000005,0.00000000001
    --ssqlambda=1
    --regmod=bending_energy
    --estmov=1,1,1,1,1,0,0,0,0
    --minmet=0,0,0,0,0,1,1,1,1
    --splineorder=3
    --numprec=double
    --interp=spline
    --scale=1 -v"""

    topup_node = pe.MapNode(fsl.TOPUP(args=topup_args),
                            name='topup',
                            iterfield=['in_file'])
    unwarp = pe.MapNode(fsl.ApplyTOPUP(in_index=[1], method='jac'),
                        name='unwarp',
                        iterfield=[
                            'in_files', 'in_topup_fieldcoef',
                            'in_topup_movpar', 'encoding_file'
                        ])

    ###########################################################################
    # WORKFLOW
    ###########################################################################
    topup_workflow = pe.Workflow(name=name)

    # these are now mapnodes because they split up over files
    topup_workflow.connect(input_node, 'in_files', get_info, 'in_file')
    topup_workflow.connect(input_node, 'in_files', PE_ref, 'in_file')
    topup_workflow.connect(input_node, 'alt_files', PE_alt, 'in_file')

    # this is a simple node, connecting to the input node
    topup_workflow.connect(input_node, 'phase_encoding_direction',
                           topup_scan_params_node, 'pe_direction')
    topup_workflow.connect(input_node, 'echo_time', topup_scan_params_node,
                           'te')
    topup_workflow.connect(input_node, 'epi_factor', topup_scan_params_node,
                           'epi_factor')

    # preparing a node here, which automatically iterates over dyns output of the get_info mapnode
    topup_workflow.connect(input_node, 'echo_time', apply_scan_params_node,
                           'te')
    topup_workflow.connect(input_node, 'phase_encoding_direction',
                           apply_scan_params_node, 'pe_direction')
    topup_workflow.connect(input_node, 'epi_factor', apply_scan_params_node,
                           'epi_factor')
    topup_workflow.connect(get_info, 'dyns', apply_scan_params_node, 'nr_trs')

    # the nr_trs and in_files both propagate into the PR_ref node
    topup_workflow.connect(get_info, 'dyns', dyns_min_1_node, 'dyns')
    topup_workflow.connect(dyns_min_1_node, 'dyns_1', PE_ref, 't_min')

    topup_workflow.connect(PE_ref, 'roi_file', PE_comb, 'in1')
    topup_workflow.connect(PE_alt, 'roi_file', PE_comb, 'in2')
    topup_workflow.connect(PE_comb, 'out', PE_merge, 'in_files')

    topup_workflow.connect(topup_scan_params_node, 'fn', topup_node,
                           'encoding_file')
    topup_workflow.connect(PE_merge, 'merged_file', topup_node, 'in_file')
    topup_workflow.connect(input_node, 'conf_file', topup_node, 'config')

    topup_workflow.connect(input_node, 'in_files', unwarp, 'in_files')
    topup_workflow.connect(apply_scan_params_node, 'fn', unwarp,
                           'encoding_file')
    topup_workflow.connect(topup_node, 'out_fieldcoef', unwarp,
                           'in_topup_fieldcoef')
    topup_workflow.connect(topup_node, 'out_movpar', unwarp, 'in_topup_movpar')

    topup_workflow.connect(unwarp, 'out_corrected', output_node, 'out_files')
    topup_workflow.connect(topup_node, 'out_fieldcoef', output_node,
                           'field_coefs')

    # ToDo: automatic datasink?

    return topup_workflow