Exemplo n.º 1
0
def init_anat_reports_wf(reportlets_dir,
                         skull_strip_ants,
                         output_spaces,
                         template,
                         freesurfer,
                         name='anat_reports_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 'summary_report', 't1_seg_report', 't1_2_mni_report',
        't1_skull_strip_report', 'recon_report'
    ]),
                        name='inputnode')

    ds_summary_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='summary'),
                                name='ds_summary_report',
                                run_without_submitting=True)

    ds_t1_seg_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='t1_seg'),
                               name='ds_t1_seg_report',
                               run_without_submitting=True)

    ds_t1_2_mni_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='t1_2_mni'),
                                 name='ds_t1_2_mni_report',
                                 run_without_submitting=True)

    ds_t1_skull_strip_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='t1_skull_strip'),
                                       name='ds_t1_skull_strip_report',
                                       run_without_submitting=True)

    ds_recon_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='reconall'),
                              name='ds_recon_report',
                              run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_summary_report, [('source_file', 'source_file'),
                                        ('summary_report', 'in_file')]),
        (inputnode, ds_t1_seg_report, [('source_file', 'source_file'),
                                       ('t1_seg_report', 'in_file')]),
    ])

    if skull_strip_ants:
        workflow.connect([(inputnode, ds_t1_skull_strip_report,
                           [('source_file', 'source_file'),
                            ('t1_skull_strip_report', 'in_file')])])
    if freesurfer:
        workflow.connect([(inputnode, ds_recon_report,
                           [('source_file', 'source_file'),
                            ('recon_report', 'in_file')])])
    if 'template' in output_spaces:
        workflow.connect([(inputnode, ds_t1_2_mni_report,
                           [('source_file', 'source_file'),
                            ('t1_2_mni_report', 'in_file')])])

    return workflow
Exemplo n.º 2
0
def main(sourcedata,
         derivatives,
         subject,
         session,
         run,
         wf_dir):

    layout = BIDSLayout(sourcedata)

    bolds = layout.get(subject=subject,
                       session=session,
                       run=run,
                       suffix='bold',
                       return_type='file')

    
    bold = bolds 
    for bold in bolds:
        print('Making reference image of {}'.format(bold))

    inputnode = pe.Node(niu.IdentityInterface(fields=['bold']),
                        name='inputnode')
    inputnode.inputs.bold = bolds

    wf = pe.Workflow(name='make_ref_{}_{}_{}'.format(subject,
                                                     session,
                                                     run))

    wf.base_dir = wf_dir

    mc_wf_bold = create_motion_correction_workflow(name='mc_wf_bold',
                                                   method='FSL',
                                                   lightweight=True)

                                              

    wf.connect(inputnode, 'bold', mc_wf_bold, 'inputspec.in_files')
    wf.connect(inputnode, ('bold', pickfirst), mc_wf_bold, 'inputspec.which_file_is_EPI_space')

    mean_bold = pe.MapNode(fsl.MeanImage(dimension='T'), 
                                 iterfield=['in_file'],
                                 name='mean_bold1')

    n4_correct = pe.MapNode(ants.N4BiasFieldCorrection(), 
                            iterfield=['input_image'],
                            name='n4_correct')
    wf.connect(mean_bold, 'out_file', n4_correct, 'input_image')
    
    ds = pe.MapNode(DerivativesDataSink(out_path_base='simple_bold_ref',
                                        suffix='reference',
                                        base_directory=derivatives),
                                iterfield=['in_file', 'source_file'],
                                name='ds_reg_report')
    
    wf.connect(mc_wf_bold, 'outputspec.motion_corrected_files', mean_bold, 'in_file')
    wf.connect(n4_correct, 'output_image', ds, 'in_file')
    wf.connect(inputnode, 'bold', ds, 'source_file')
    

    wf.run()
Exemplo n.º 3
0
def init_func_reports_wf(reportlets_dir, freesurfer, name='func_reports_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['source_file', 'epi_mask_report', 'epi_reg_report', 'epi_reg_suffix',
                    'acompcor_report', 'tcompcor_report']
            ),
        name='inputnode')

    ds_epi_mask_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='epi_mask'),
        name='ds_epi_mask_report', run_without_submitting=True)

    ds_epi_reg_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='bbr' if freesurfer else 'flt_bbr'),
        name='ds_epi_reg_report', run_without_submitting=True)

    ds_acompcor_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='acompcor'),
        name='ds_acompcor_report', run_without_submitting=True)

    ds_tcompcor_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='tcompcor'),
        name='ds_tcompcor_report', run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_epi_mask_report, [('source_file', 'source_file'),
                                         ('epi_mask_report', 'in_file')]),
        (inputnode, ds_epi_reg_report, [('source_file', 'source_file'),
                                        ('epi_reg_report', 'in_file')]),
        (inputnode, ds_acompcor_report, [('source_file', 'source_file'),
                                         ('acompcor_report', 'in_file')]),
        (inputnode, ds_tcompcor_report, [('source_file', 'source_file'),
                                         ('tcompcor_report', 'in_file')]),
        ])

    return workflow
Exemplo n.º 4
0
def init_nighres_ds_wf(derivatives='/derivatives',
                       out_path_base='nighres',
                       name='nighres_datasinks'):

    wf = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        't1w', 'cortex', 'gwb', 'cgb', 'segmentation', 'thickness',
        'layerdepth', 'layerboundaries', 'discretelayers'
    ]),
                        name='inputnode')

    for key, suffix in zip([
            'cortex', 'gwb', 'cgb', 'layerdepth', 'discretelayers',
            'layerboundaries'
    ], ['dseg', 'levelset', 'levelset', 'depth', 'dseg', 'levelset']):
        ds = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         out_path_base=out_path_base,
                                         desc=key,
                                         suffix=suffix),
                     name='ds_{}'.format(key))
        ds.inputs.extra_values = ['hemi-left', 'hemi-right']
        wf.connect(inputnode, 't1w', ds, 'source_file')
        wf.connect(inputnode, key, ds, 'in_file')

    ds_segmentation = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                                  out_path_base='nighres',
                                                  desc='mgdm',
                                                  suffix='dseg'),
                              name='ds_segmentation')
    wf.connect(inputnode, 'segmentation', ds_segmentation, 'in_file')
    wf.connect(inputnode, 't1w', ds_segmentation, 'source_file')

    ds_thickness = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                               out_path_base='nighres',
                                               suffix='thickness'),
                           name='ds_thickness')
    ds_thickness.inputs.extra_values = ['hemi-left', 'hemi-right']
    wf.connect(inputnode, 'thickness', ds_thickness, 'in_file')
    wf.connect(inputnode, 't1w', ds_thickness, 'source_file')

    return wf
Exemplo n.º 5
0
def init_fmap_unwarp_report_wf(reportlets_dir, name='fmap_unwarp_report_wf'):
    from nipype.interfaces import ants
    from nipype.interfaces import utility as niu
    from niworkflows.interfaces import SimpleBeforeAfter

    def _getwm(in_seg, wm_label=3):
        import os.path as op
        import nibabel as nb
        import numpy as np

        nii = nb.load(in_seg)
        data = np.zeros(nii.shape, dtype=np.uint8)
        data[nii.get_data() == wm_label] = 1
        hdr = nii.header.copy()
        hdr.set_data_dtype(np.uint8)
        nb.Nifti1Image(data, nii.affine, hdr).to_filename('wm.nii.gz')
        return op.abspath('wm.nii.gz')

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_pre', 'in_post', 'in_seg', 'in_xfm',
                'name_source']), name='inputnode')

    map_seg = pe.Node(ants.ApplyTransforms(
        dimension=3, float=True, interpolation='NearestNeighbor'),
        name='map_seg')

    sel_wm = pe.Node(niu.Function(function=_getwm), name='sel_wm')

    epi_rpt = pe.Node(SimpleBeforeAfter(), name='epi_rpt')
    epi_rpt_ds = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='variant-hmcsdc_preproc'), name='epi_rpt_ds'
    )
    workflow.connect([
        (inputnode, epi_rpt, [('in_post', 'after'),
                              ('in_pre', 'before')]),
        (inputnode, epi_rpt_ds, [('name_source', 'source_file')]),
        (epi_rpt, epi_rpt_ds, [('out_report', 'in_file')]),
        (inputnode, map_seg, [('in_post', 'reference_image'),
                              ('in_seg', 'input_image'),
                              ('in_xfm', 'transforms')]),
        (map_seg, sel_wm, [('output_image', 'in_seg')]),
        (sel_wm, epi_rpt, [('out', 'wm_seg')]),
    ])

    return workflow
Exemplo n.º 6
0
def epi_hmc(name='EPI_HMC', settings=None):
    """
    Performs :abbr:`HMC (head motion correction)` over the input
    :abbr:`EPI (echo-planar imaging)` image.
    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['epi']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['epi_brain', 'xforms', 'epi_mask', 'epi_mean']),
                         name='outputnode')

    bet = pe.Node(fsl.BET(functional=True, frac=0.6), name='EPI_bet')

    # Head motion correction (hmc)
    hmc = pe.Node(fsl.MCFLIRT(save_mats=True, save_plots=True, mean_vol=True),
                  name='EPI_hmc')

    pick_1st = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='EPIPickFirst')
    hcm2itk = pe.MapNode(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                         iterfield=['transform_file'],
                         name='hcm2itk')

    avscale = pe.MapNode(fsl.utils.AvScale(all_param=True),
                         name='AvScale',
                         iterfield=['mat_file'])
    avs_format = pe.Node(FormatHMCParam(), name='AVScale_Format')

    # Calculate EPI mask on the average after HMC
    bet_hmc = pe.Node(fsl.BET(mask=True, frac=0.6), name='EPI_hmc_bet')

    workflow.connect([(inputnode, pick_1st, [('epi', 'in_file')]),
                      (inputnode, bet, [('epi', 'in_file')]),
                      (bet, hmc, [('out_file', 'in_file')]),
                      (hmc, hcm2itk, [('mat_file', 'transform_file')]),
                      (pick_1st, hcm2itk, [('roi_file', 'source_file'),
                                           ('roi_file', 'reference_file')]),
                      (hcm2itk, outputnode, [('itk_transform', 'xforms')]),
                      (hmc, outputnode, [('out_file', 'epi_brain')]),
                      (hmc, avscale, [('mat_file', 'mat_file')]),
                      (avscale, avs_format, [('translations', 'translations'),
                                             ('rot_angles', 'rot_angles')]),
                      (hmc, bet_hmc, [('mean_img', 'in_file')]),
                      (hmc, avscale, [('mean_img', 'ref_file')]),
                      (bet_hmc, outputnode, [('mask_file', 'epi_mask'),
                                             ('out_file', 'epi_mean')])])

    # Write corrected file in the designated output dir
    ds_hmc = pe.Node(DerivativesDataSink(base_directory=settings['output_dir'],
                                         suffix='hmc'),
                     name='DerivativesHMC')
    ds_mats = pe.Node(DerivativesDataSink(
        base_directory=settings['output_dir'], suffix='hmc'),
                      name='DerivativesHMCmats')
    ds_mask = pe.Node(DerivativesDataSink(
        base_directory=settings['output_dir'], suffix='hmc_bmask'),
                      name='DerivativesEPImask')

    ds_motion = pe.Node(DerivativesDataSink(
        base_directory=settings['output_dir'], suffix='hmc'),
                        name='DerivativesParamsHMC')

    workflow.connect([(inputnode, ds_hmc, [('epi', 'source_file')]),
                      (inputnode, ds_mats, [('epi', 'source_file')]),
                      (inputnode, ds_mask, [('epi', 'source_file')]),
                      (inputnode, ds_motion, [('epi', 'source_file')]),
                      (hmc, ds_hmc, [('out_file', 'in_file')]),
                      (hcm2itk, ds_mats, [('itk_transform', 'in_file')]),
                      (bet_hmc, ds_mask, [('mask_file', 'in_file')]),
                      (avs_format, ds_motion, [('out_file', 'in_file')])])

    return workflow
Exemplo n.º 7
0
def epi_mni_transformation(name='EPIMNITransformation', settings=None):
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'mat_epi_to_t1', 't1_2_mni_forward_transform', 'epi', 'epi_ras',
        'epi_mask', 't1', 'hmc_xforms'
    ]),
                        name='inputnode')

    def _aslist(in_value):
        if isinstance(in_value, list):
            return in_value
        return [in_value]

    pick_1st = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='EPIPickFirst')

    gen_ref = pe.Node(niu.Function(input_names=['fixed_image', 'moving_image'],
                                   output_names=['out_file'],
                                   function=_gen_reference),
                      name='GenNewMNIReference')
    gen_ref.inputs.fixed_image = op.join(get_mni_template_ras(),
                                         'MNI152_T1_1mm.nii.gz')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitEPI')
    merge_transforms = pe.MapNode(niu.Merge(3),
                                  iterfield=['in3'],
                                  name='MergeTransforms')
    epi_to_mni_transform = pe.MapNode(ants.ApplyTransforms(),
                                      iterfield=['input_image', 'transforms'],
                                      name='EPIToMNITransform')
    epi_to_mni_transform.terminal_output = 'file'
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeEPI')

    mask_merge_tfms = pe.Node(niu.Merge(2), name='MaskMergeTfms')
    mask_mni_tfm = pe.Node(
        ants.ApplyTransforms(interpolation='NearestNeighbor'),
        name='MaskToMNI')

    # Write corrected file in the designated output dir
    ds_mni = pe.Node(DerivativesDataSink(base_directory=settings['output_dir'],
                                         suffix='hmc_mni'),
                     name='DerivativesHMCMNI')
    ds_mni_mask = pe.Node(DerivativesDataSink(
        base_directory=settings['output_dir'], suffix='hmc_mni_bmask'),
                          name='DerivativesHMCMNImask')

    workflow.connect([
        (inputnode, pick_1st, [('epi_ras', 'in_file')]),
        (inputnode, ds_mni, [('epi', 'source_file')]),
        (inputnode, ds_mni_mask, [('epi', 'source_file')]),
        (pick_1st, gen_ref, [('roi_file', 'moving_image')]),
        (inputnode, merge_transforms, [('t1_2_mni_forward_transform', 'in1'),
                                       (('mat_epi_to_t1', _aslist), 'in2'),
                                       ('hmc_xforms', 'in3')]),
        (inputnode, mask_merge_tfms, [('t1_2_mni_forward_transform', 'in1'),
                                      (('mat_epi_to_t1', _aslist), 'in2')]),
        (inputnode, split, [('epi_ras', 'in_file')]),
        (split, epi_to_mni_transform, [('out_files', 'input_image')]),
        (merge_transforms, epi_to_mni_transform, [('out', 'transforms')]),
        (gen_ref, epi_to_mni_transform, [('out_file', 'reference_image')]),
        (epi_to_mni_transform, merge, [('output_image', 'in_files')]),
        (merge, ds_mni, [('merged_file', 'in_file')]),
        (mask_merge_tfms, mask_mni_tfm, [('out', 'transforms')]),
        (gen_ref, mask_mni_tfm, [('out_file', 'reference_image')]),
        (inputnode, mask_mni_tfm, [('epi_mask', 'input_image')]),
        (mask_mni_tfm, ds_mni_mask, [('output_image', 'in_file')])
    ])

    return workflow
Exemplo n.º 8
0
def epi_mean_t1_registration(name='EPIMeanNormalization', settings=None):
    """
    Uses FSL FLIRT with the BBR cost function to find the transform that
    maps the EPI space into the T1-space
    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['epi', 'epi_mean', 't1_brain', 't1_seg']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['mat_epi_to_t1']),
                         name='outputnode')

    # Extract wm mask from segmentation
    wm_mask = pe.Node(niu.Function(input_names=['in_file'],
                                   output_names=['out_file'],
                                   function=_extract_wm),
                      name='WM_mask')

    flt_bbr_init = pe.Node(fsl.FLIRT(dof=6, out_matrix_file='init.mat'),
                           name='Flirt_BBR_init')
    flt_bbr = pe.Node(fsl.FLIRT(dof=6, cost_func='bbr'), name='Flirt_BBR')
    flt_bbr.inputs.schedule = op.join(os.getenv('FSLDIR'),
                                      'etc/flirtsch/bbr.sch')

    # make equivalent warp fields
    invt_bbr = pe.Node(fsl.ConvertXFM(invert_xfm=True), name='Flirt_BBR_Inv')

    #  EPI to T1 transform matrix is from fsl, using c3 tools to convert to
    #  something ANTs will like.
    fsl2itk_fwd = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_fwd')
    fsl2itk_inv = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_inv')

    # Write EPI mean in T1w space
    ds_t1w = pe.Node(DerivativesDataSink(base_directory=settings['output_dir'],
                                         suffix='hmc_t1'),
                     name='DerivHMC_T1w')
    # Write registrated file in the designated output dir
    ds_tfm_fwd = pe.Node(DerivativesDataSink(
        base_directory=settings['output_dir'], suffix='epi2t1w_affine'),
                         name='DerivEPI_to_T1w_fwd')
    ds_tfm_inv = pe.Node(DerivativesDataSink(
        base_directory=settings['output_dir'], suffix='t1w2epi_affine'),
                         name='DerivEPI_to_T1w_inv')

    workflow.connect([
        (inputnode, wm_mask, [('t1_seg', 'in_file')]),
        (inputnode, flt_bbr_init, [('t1_brain', 'reference')]),
        (inputnode, fsl2itk_fwd, [('t1_brain', 'reference_file'),
                                  ('epi_mean', 'source_file')]),
        (inputnode, fsl2itk_inv, [('epi_mean', 'reference_file'),
                                  ('t1_brain', 'source_file')]),
        (inputnode, flt_bbr_init, [('epi_mean', 'in_file')]),
        (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (inputnode, flt_bbr, [('t1_brain', 'reference')]),
        (inputnode, flt_bbr, [('epi_mean', 'in_file')]),
        (wm_mask, flt_bbr, [('out_file', 'wm_seg')]),
        (flt_bbr, invt_bbr, [('out_matrix_file', 'in_file')]),
        (flt_bbr, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]),
        (invt_bbr, fsl2itk_inv, [('out_file', 'transform_file')]),
        (fsl2itk_fwd, outputnode, [('itk_transform', 'mat_epi_to_t1')]),
        (fsl2itk_inv, outputnode, [('itk_transform', 'mat_t1_to_epi')]),
        (inputnode, ds_tfm_fwd, [('epi', 'source_file')]),
        (inputnode, ds_tfm_inv, [('epi', 'source_file')]),
        (inputnode, ds_t1w, [('epi', 'source_file')]),
        (fsl2itk_fwd, ds_tfm_fwd, [('itk_transform', 'in_file')]),
        (fsl2itk_inv, ds_tfm_inv, [('itk_transform', 'in_file')]),
        (flt_bbr, ds_t1w, [('out_file', 'in_file')])
    ])

    # Plots for report
    png_sbref_t1 = pe.Node(niu.Function(
        input_names=['in_file', 'overlay_file', 'out_file'],
        output_names=['out_file'],
        function=stripped_brain_overlay),
                           name='PNG_sbref_t1')
    png_sbref_t1.inputs.out_file = 'sbref_to_t1.png'

    # Write corrected file in the designated output dir
    ds_png = pe.Node(DerivativesDataSink(base_directory=settings['output_dir'],
                                         suffix='epi_to_t1'),
                     name='DerivativesPNG')

    workflow.connect([(flt_bbr, png_sbref_t1, [('out_file', 'overlay_file')]),
                      (inputnode, png_sbref_t1, [('t1_seg', 'in_file')]),
                      (inputnode, ds_png, [('epi', 'source_file')]),
                      (png_sbref_t1, ds_png, [('out_file', 'in_file')])])

    return workflow
Exemplo n.º 9
0
def init_qmri_wf(sourcedata,
                         derivatives,
                         acquisition='memp2rage',
                         name='qmri_mp2rage'):

    wf = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['sourcedata',
                                                      'derivatives',
                                                      'subject',
                                                      'session',
                                                      'acquisition']),
                        name='inputnode')

    inputnode.inputs.sourcedata = sourcedata
    inputnode.inputs.derivatives = derivatives

    
    get_parameters = pe.Node(niu.Function(function=get_mp2rage_pars,
                                               input_names=['sourcedata',
                                                            'subject',
                                                            'session',
                                                            'acquisition'],
                                               output_names=['mp2rage_parameters']),
                                  name='get_mp2rage_pars')

    wf.connect([(inputnode, get_parameters,
                 [('sourcedata', 'sourcedata'),
                  ('subject', 'subject'),
                  ('session', 'session'),
                  ('acquisition', 'acquisition')])])

    get_qmri = pe.Node(niu.Function(function=fit_mp2rage,
                                       input_names=['mp2rage_parameters',
                                                    'return_images'],
                                       output_names=['S0map', 't2starw', 't2starmap']),
                          name='get_qmri')

    get_qmri.inputs.return_images = ['S0map', 't2starw', 't2starmap']

    wf.connect([ (get_parameters, get_qmri, [('mp2rage_parameters', 'mp2rage_parameters')]) ])


    get_first_inversion = pe.MapNode(niu.Function(function=get_inv,
                                                  input_names=['mp2rage_parameters', 'inv', 'echo'],
                                                  output_names='inv1'),
                                     iterfield=['mp2rage_parameters'],
                                     name='get_first_inversion')

    get_first_inversion.inputs.inv = 1
    get_first_inversion.inputs.echo = 1
    wf.connect(get_parameters, 'mp2rage_parameters', get_first_inversion, 'mp2rage_parameters')

    rename = pe.Node(niu.Rename(use_fullpath=True), name='rename')
    rename.inputs.format_string = '%(path)s/sub-%(subject_id)s_ses-%(session)s_MPRAGE.nii.gz'
    rename.inputs.parse_string = '(?P<path>.+)/sub-(?P<subject_id>.+)_ses-(?P<session>.+)_acq-.+_MPRAGE.nii(.gz)?'
    


    ds_S0 = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         keep_dtype=False,
                                         out_path_base='qmri_memp2rages',
                                         suffix='S0',
                                         space='average'),
                                         name='ds_S0')
    reorient_s0 = pe.Node(Reorient(),
                          name='reorient_s0')
    wf.connect(get_first_inversion, ('inv1', _pickone), rename, 'in_file')
    wf.connect(rename, 'out_file', ds_S0, 'source_file')
    wf.connect(get_qmri, 'S0map', reorient_s0, 'in_file')
    wf.connect(reorient_s0, 'out_file', ds_S0, 'in_file')

    ds_t2starmap = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         keep_dtype=False,
                                         out_path_base='qmri_memp2rages',
                                         suffix='t2starmap',
                                         space='average'),
                                         name='ds_t2starmap')
    reorient_t2starmap = pe.Node(Reorient(),
                          name='reorient_t2starmap')
    wf.connect(rename, 'out_file', ds_t2starmap, 'source_file')
    wf.connect(get_qmri, 't2starmap', reorient_t2starmap, 'in_file')
    wf.connect(reorient_t2starmap, 'out_file', ds_t2starmap, 'in_file')

    ds_t2starw = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         keep_dtype=False,
                                         out_path_base='qmri_memp2rages',
                                         suffix='t2starw',
                                         space='average'),
                                         name='ds_t2starw')
    reorient_t2w = pe.Node(Reorient(),
                          name='reorient_t2w')
    wf.connect(rename, 'out_file', ds_t2starw, 'source_file')
    wf.connect(get_qmri, 't2starw', reorient_t2w, 'in_file')
    wf.connect(reorient_t2w, 'out_file', ds_t2starw, 'in_file')

    return wf
Exemplo n.º 10
0
def main(derivatives,
         subject,
         session,
         task,
         acquisition,
         run,
         workflow_folder='/tmp/workflow_folders'):

    os.environ['SUBJECTS_DIR'] = os.path.join(derivatives, 'freesurfer')

    preproc_bold = get_derivative(derivatives,
                                  'spynoza',
                                  'func',
                                  subject=subject,
                                  session=session,
                                  suffix='preproc',
                                  acquisition=acquisition,
                                  run=run,
                                  task=task)

    registration = get_derivative(derivatives,
                                  'manual_registrations',
                                  'func',
                                  subject=subject,
                                  session=session,
                                  description='spynoza2t1w',
                                  suffix='transform',
                                  extension='lta',
                                  check_exists=False)

    wf = pe.Workflow(name='sample_fs', base_dir=workflow_folder)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['preproc_bold', 'registration'
                'subject']),
                        name='inputnode')
    inputnode.inputs.preproc_bold = preproc_bold
    inputnode.inputs.subject = 'sub-{}'.format(subject)
    inputnode.inputs.registration = registration

    sampler = pe.Node(fs.SampleToSurface(sampling_method='average',
                                         sampling_range=(0, 1, 0.2),
                                         sampling_units='frac',
                                         interp_method='trilinear',
                                         cortex_mask=True,
                                         subjects_dir=os.path.join(
                                             derivatives, 'freesurfer'),
                                         override_reg_subj=True,
                                         out_type='gii'),
                      iterables=('hemi', ['lh', 'rh']),
                      name='sampler')

    wf.connect(inputnode, 'preproc_bold', sampler, 'source_file')
    if registration is not None:
        wf.connect(inputnode, 'registration', sampler, 'reg_file')
    else:
        sampler.inputs.reg_header = True
    wf.connect(inputnode, 'subject', sampler, 'subject_id')

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True),
                         name='merger',
                         joinsource='sampler',
                         joinfield=['in1'])
    wf.connect(sampler, 'out_file', merger, 'in1')

    ds = pe.MapNode(DerivativesDataSink(
        base_directory=derivatives,
        out_path_base='sampled_giis',
    ),
                    iterfield=['in_file', 'suffix'],
                    name='datasink')

    ds.inputs.suffix = ['bold.lh', 'bold.rh']

    wf.connect(merger, 'out', ds, 'in_file')
    wf.connect(inputnode, 'preproc_bold', ds, 'source_file')

    wf.run()
Exemplo n.º 11
0
def init_func_derivatives_wf(output_dir, output_spaces, template, freesurfer,
                             name='func_derivatives_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['source_file', 'epi_t1', 'epi_mask_t1', 'epi_mni', 'epi_mask_mni',
                    'confounds', 'surfaces']
            ),
        name='inputnode')

    ds_epi_t1 = pe.Node(DerivativesDataSink(base_directory=output_dir, suffix='space-T1w_preproc'),
                        name='ds_epi_t1', run_without_submitting=True)

    ds_epi_mask_t1 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                 suffix='space-T1w_brainmask'),
                             name='ds_epi_mask_t1', run_without_submitting=True)

    suffix_fmt = 'space-{}_{}'.format
    ds_epi_mni = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix=suffix_fmt(template, 'preproc')),
                         name='ds_epi_mni', run_without_submitting=True)
    ds_epi_mask_mni = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                  suffix=suffix_fmt(template, 'brainmask')),
                              name='ds_epi_mask_mni', run_without_submitting=True)

    ds_confounds = pe.Node(DerivativesDataSink(base_directory=output_dir, suffix='confounds'),
                           name="ds_confounds", run_without_submitting=True)

    def get_gifti_name(in_file):
        import os
        import re
        in_format = re.compile(r'(?P<LR>[lr])h.(?P<space>\w+).gii')
        info = in_format.match(os.path.basename(in_file)).groupdict()
        info['LR'] = info['LR'].upper()
        return 'space-{space}.{LR}.func'.format(**info)

    name_surfs = pe.MapNode(niu.Function(function=get_gifti_name),
                            iterfield='in_file', name='name_surfs', run_without_submitting=True)

    ds_bold_surfs = pe.MapNode(DerivativesDataSink(base_directory=output_dir),
                               iterfield=['in_file', 'suffix'], name='ds_bold_surfs',
                               run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_confounds, [('source_file', 'source_file'),
                                   ('confounds', 'in_file')]),
        ])

    if 'T1w' in output_spaces:
        workflow.connect([
            (inputnode, ds_epi_t1, [('source_file', 'source_file'),
                                    ('epi_t1', 'in_file')]),
            (inputnode, ds_epi_mask_t1, [('source_file', 'source_file'),
                                         ('epi_mask_t1', 'in_file')]),
            ])
    if 'template' in output_spaces:
        workflow.connect([
            (inputnode, ds_epi_mni, [('source_file', 'source_file'),
                                     ('epi_mni', 'in_file')]),
            (inputnode, ds_epi_mask_mni, [('source_file', 'source_file'),
                                          ('epi_mask_mni', 'in_file')]),
            ])
    if freesurfer and any(space.startswith('fs') for space in output_spaces):
        workflow.connect([
            (inputnode, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_bold_surfs, [('source_file', 'source_file'),
                                        ('surfaces', 'in_file')]),
            (name_surfs, ds_bold_surfs, [('out', 'suffix')]),
            ])

    return workflow
Exemplo n.º 12
0
def init_func_derivatives_wf(
    bids_root,
    cifti_output,
    freesurfer,
    metadata,
    output_dir,
    spaces,
    use_aroma,
    name='func_derivatives_wf',
):
    """
    Set up a battery of datasinks to store derivatives in the right location.

    Parameters
    ----------
    bids_root : :obj:`str`
        Original BIDS dataset path.
    cifti_output : :obj:`bool`
        Whether the ``--cifti-output`` flag was set.
    freesurfer : :obj:`bool`
        Whether FreeSurfer anatomical processing was run.
    metadata : :obj:`dict`
        Metadata dictionary associated to the BOLD run.
    output_dir : :obj:`str`
        Where derivatives should be written out to.
    spaces : :py:class:`~niworkflows.utils.spaces.SpatialReferences`
        A container for storing, organizing, and parsing spatial normalizations. Composed of
        :py:class:`~niworkflows.utils.spaces.Reference` objects representing spatial references.
        Each ``Reference`` contains a space, which is a string of either TemplateFlow template IDs
        (e.g., ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNIPediatricAsym``), nonstandard references
        (e.g., ``T1w`` or ``anat``, ``sbref``, ``run``, etc.), or a custom template located in
        the TemplateFlow root directory. Each ``Reference`` may also contain a spec, which is a
        dictionary with template specifications (e.g., a specification of ``{'resolution': 2}``
        would lead to resampling on a 2mm resolution of the space).
    use_aroma : :obj:`bool`
        Whether ``--use-aroma`` flag was set.
    name : :obj:`str`
        This workflow's identifier (default: ``func_derivatives_wf``).

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.utility import KeySelect
    from smriprep.workflows.outputs import _bids_relative

    timing_parameters = prepare_timing_parameters(metadata)

    nonstd_spaces = set(spaces.get_nonstandard())
    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'aroma_noise_ics', 'bold_aparc_std', 'bold_aparc_t1', 'bold_aseg_std',
        'bold_aseg_t1', 'bold_cifti', 'bold_mask_std', 'bold_mask_t1',
        'bold_std', 'bold_std_ref', 'bold_t1', 'bold_t1_ref', 'bold_native',
        'bold_native_ref', 'bold_mask_native', 'cifti_variant',
        'cifti_metadata', 'cifti_density', 'confounds', 'confounds_metadata',
        'melodic_mix', 'nonaggr_denoised_file', 'source_file', 'surf_files',
        'surf_refs', 'template', 'spatial_reference', 'bold2anat_xfm',
        'anat2bold_xfm', 'acompcor_masks', 'tcompcor_mask'
    ]),
                        name='inputnode')

    raw_sources = pe.Node(niu.Function(function=_bids_relative),
                          name='raw_sources')
    raw_sources.inputs.bids_root = bids_root

    ds_confounds = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                               desc='confounds',
                                               suffix='timeseries',
                                               dismiss_entities=("echo", )),
                           name="ds_confounds",
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    ds_ref_t1w_xfm = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                 to='T1w',
                                                 mode='image',
                                                 suffix='xfm',
                                                 extension='.txt',
                                                 dismiss_entities=('echo', ),
                                                 **{'from': 'scanner'}),
                             name='ds_ref_t1w_xfm',
                             run_without_submitting=True)
    ds_ref_t1w_inv_xfm = pe.Node(DerivativesDataSink(
        base_directory=output_dir,
        to='scanner',
        mode='image',
        suffix='xfm',
        extension='.txt',
        dismiss_entities=('echo', ),
        **{'from': 'T1w'}),
                                 name='ds_t1w_tpl_inv_xfm',
                                 run_without_submitting=True)

    workflow.connect([
        (inputnode, raw_sources, [('source_file', 'in_files')]),
        (inputnode, ds_confounds, [('source_file', 'source_file'),
                                   ('confounds', 'in_file'),
                                   ('confounds_metadata', 'meta_dict')]),
        (inputnode, ds_ref_t1w_xfm, [('source_file', 'source_file'),
                                     ('bold2anat_xfm', 'in_file')]),
        (inputnode, ds_ref_t1w_inv_xfm, [('source_file', 'source_file'),
                                         ('anat2bold_xfm', 'in_file')]),
    ])

    if nonstd_spaces.intersection(('func', 'run', 'bold', 'boldref', 'sbref')):
        ds_bold_native = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='preproc',
            compress=True,
            SkullStripped=False,
            TaskName=metadata.get('TaskName'),
            **timing_parameters),
                                 name='ds_bold_native',
                                 run_without_submitting=True,
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_native_ref = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='boldref',
            compress=True,
            dismiss_entities=("echo", )),
                                     name='ds_bold_native_ref',
                                     run_without_submitting=True,
                                     mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_mask_native = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='brain',
            suffix='mask',
            compress=True,
            dismiss_entities=("echo", )),
                                      name='ds_bold_mask_native',
                                      run_without_submitting=True,
                                      mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, ds_bold_native, [('source_file', 'source_file'),
                                         ('bold_native', 'in_file')]),
            (inputnode, ds_bold_native_ref, [('source_file', 'source_file'),
                                             ('bold_native_ref', 'in_file')]),
            (inputnode, ds_bold_mask_native, [('source_file', 'source_file'),
                                              ('bold_mask_native', 'in_file')
                                              ]),
            (raw_sources, ds_bold_mask_native, [('out', 'RawSources')]),
        ])

    # Resample to T1w space
    if nonstd_spaces.intersection(('T1w', 'anat')):
        ds_bold_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            space='T1w',
            desc='preproc',
            compress=True,
            SkullStripped=False,
            TaskName=metadata.get('TaskName'),
            **timing_parameters),
                             name='ds_bold_t1',
                             run_without_submitting=True,
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_t1_ref = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            space='T1w',
            suffix='boldref',
            compress=True,
            dismiss_entities=("echo", )),
                                 name='ds_bold_t1_ref',
                                 run_without_submitting=True,
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_mask_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            space='T1w',
            desc='brain',
            suffix='mask',
            compress=True,
            dismiss_entities=("echo", )),
                                  name='ds_bold_mask_t1',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([
            (inputnode, ds_bold_t1, [('source_file', 'source_file'),
                                     ('bold_t1', 'in_file')]),
            (inputnode, ds_bold_t1_ref, [('source_file', 'source_file'),
                                         ('bold_t1_ref', 'in_file')]),
            (inputnode, ds_bold_mask_t1, [('source_file', 'source_file'),
                                          ('bold_mask_t1', 'in_file')]),
            (raw_sources, ds_bold_mask_t1, [('out', 'RawSources')]),
        ])
        if freesurfer:
            ds_bold_aseg_t1 = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                space='T1w',
                desc='aseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                      name='ds_bold_aseg_t1',
                                      run_without_submitting=True,
                                      mem_gb=DEFAULT_MEMORY_MIN_GB)
            ds_bold_aparc_t1 = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                space='T1w',
                desc='aparcaseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                       name='ds_bold_aparc_t1',
                                       run_without_submitting=True,
                                       mem_gb=DEFAULT_MEMORY_MIN_GB)
            workflow.connect([
                (inputnode, ds_bold_aseg_t1, [('source_file', 'source_file'),
                                              ('bold_aseg_t1', 'in_file')]),
                (inputnode, ds_bold_aparc_t1, [('source_file', 'source_file'),
                                               ('bold_aparc_t1', 'in_file')]),
            ])

    if use_aroma:
        ds_aroma_noise_ics = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='AROMAnoiseICs',
            dismiss_entities=("echo", )),
                                     name="ds_aroma_noise_ics",
                                     run_without_submitting=True,
                                     mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_melodic_mix = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='MELODIC',
            suffix='mixing',
            dismiss_entities=("echo", )),
                                 name="ds_melodic_mix",
                                 run_without_submitting=True,
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_aroma_std = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            space='MNI152NLin6Asym',
            desc='smoothAROMAnonaggr',
            compress=True,
            TaskName=metadata.get('TaskName'),
            **timing_parameters),
                               name='ds_aroma_std',
                               run_without_submitting=True,
                               mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, ds_aroma_noise_ics, [('source_file', 'source_file'),
                                             ('aroma_noise_ics', 'in_file')]),
            (inputnode, ds_melodic_mix, [('source_file', 'source_file'),
                                         ('melodic_mix', 'in_file')]),
            (inputnode, ds_aroma_std, [('source_file', 'source_file'),
                                       ('nonaggr_denoised_file', 'in_file')]),
        ])

    if getattr(spaces, '_cached') is None:
        return workflow

    # Store resamplings in standard spaces when listed in --output-spaces
    if spaces.cached.references:
        from niworkflows.interfaces.space import SpaceDataSource

        spacesource = pe.Node(SpaceDataSource(),
                              name='spacesource',
                              run_without_submitting=True)
        spacesource.iterables = ('in_tuple', [
            (s.fullname, s.spec) for s in spaces.cached.get_standard(dim=(3, ))
        ])

        select_std = pe.Node(KeySelect(
            fields=['template', 'bold_std', 'bold_std_ref', 'bold_mask_std']),
                             name='select_std',
                             run_without_submitting=True,
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

        ds_bold_std = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='preproc',
            compress=True,
            SkullStripped=False,
            TaskName=metadata.get('TaskName'),
            **timing_parameters),
                              name='ds_bold_std',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_std_ref = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='boldref',
            compress=True,
            dismiss_entities=("echo", )),
                                  name='ds_bold_std_ref',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_mask_std = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='brain',
            suffix='mask',
            compress=True,
            dismiss_entities=("echo", )),
                                   name='ds_bold_mask_std',
                                   run_without_submitting=True,
                                   mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, ds_bold_std, [('source_file', 'source_file')]),
            (inputnode, ds_bold_std_ref, [('source_file', 'source_file')]),
            (inputnode, ds_bold_mask_std, [('source_file', 'source_file')]),
            (inputnode, select_std, [('bold_std', 'bold_std'),
                                     ('bold_std_ref', 'bold_std_ref'),
                                     ('bold_mask_std', 'bold_mask_std'),
                                     ('template', 'template'),
                                     ('spatial_reference', 'keys')]),
            (spacesource, select_std, [('uid', 'key')]),
            (select_std, ds_bold_std, [('bold_std', 'in_file')]),
            (spacesource, ds_bold_std, [('space', 'space'),
                                        ('cohort', 'cohort'),
                                        ('resolution', 'resolution'),
                                        ('density', 'density')]),
            (select_std, ds_bold_std_ref, [('bold_std_ref', 'in_file')]),
            (spacesource, ds_bold_std_ref, [('space', 'space'),
                                            ('cohort', 'cohort'),
                                            ('resolution', 'resolution'),
                                            ('density', 'density')]),
            (select_std, ds_bold_mask_std, [('bold_mask_std', 'in_file')]),
            (spacesource, ds_bold_mask_std, [('space', 'space'),
                                             ('cohort', 'cohort'),
                                             ('resolution', 'resolution'),
                                             ('density', 'density')]),
            (raw_sources, ds_bold_mask_std, [('out', 'RawSources')]),
        ])

        if freesurfer:
            select_fs_std = pe.Node(KeySelect(
                fields=['bold_aseg_std', 'bold_aparc_std', 'template']),
                                    name='select_fs_std',
                                    run_without_submitting=True,
                                    mem_gb=DEFAULT_MEMORY_MIN_GB)
            ds_bold_aseg_std = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                desc='aseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                       name='ds_bold_aseg_std',
                                       run_without_submitting=True,
                                       mem_gb=DEFAULT_MEMORY_MIN_GB)
            ds_bold_aparc_std = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                desc='aparcaseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                        name='ds_bold_aparc_std',
                                        run_without_submitting=True,
                                        mem_gb=DEFAULT_MEMORY_MIN_GB)
            workflow.connect([
                (spacesource, select_fs_std, [('uid', 'key')]),
                (inputnode, select_fs_std, [('bold_aseg_std', 'bold_aseg_std'),
                                            ('bold_aparc_std',
                                             'bold_aparc_std'),
                                            ('template', 'template'),
                                            ('spatial_reference', 'keys')]),
                (select_fs_std, ds_bold_aseg_std, [('bold_aseg_std', 'in_file')
                                                   ]),
                (spacesource, ds_bold_aseg_std, [('space', 'space'),
                                                 ('cohort', 'cohort'),
                                                 ('resolution', 'resolution'),
                                                 ('density', 'density')]),
                (select_fs_std, ds_bold_aparc_std, [('bold_aparc_std',
                                                     'in_file')]),
                (spacesource, ds_bold_aparc_std, [('space', 'space'),
                                                  ('cohort', 'cohort'),
                                                  ('resolution', 'resolution'),
                                                  ('density', 'density')]),
                (inputnode, ds_bold_aseg_std, [('source_file', 'source_file')
                                               ]),
                (inputnode, ds_bold_aparc_std, [('source_file', 'source_file')
                                                ])
            ])

    fs_outputs = spaces.cached.get_fs_spaces()
    if freesurfer and fs_outputs:
        from niworkflows.interfaces.surf import Path2BIDS

        select_fs_surf = pe.Node(KeySelect(fields=['surfaces', 'surf_kwargs']),
                                 name='select_fs_surf',
                                 run_without_submitting=True,
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)
        select_fs_surf.iterables = [('key', fs_outputs)]
        select_fs_surf.inputs.surf_kwargs = [{'space': s} for s in fs_outputs]

        name_surfs = pe.MapNode(Path2BIDS(pattern=r'(?P<hemi>[lr])h.\w+'),
                                iterfield='in_file',
                                name='name_surfs',
                                run_without_submitting=True)

        ds_bold_surfs = pe.MapNode(DerivativesDataSink(
            base_directory=output_dir,
            extension=".func.gii",
            TaskName=metadata.get('TaskName'),
            **timing_parameters),
                                   iterfield=['in_file', 'hemi'],
                                   name='ds_bold_surfs',
                                   run_without_submitting=True,
                                   mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, select_fs_surf, [('surf_files', 'surfaces'),
                                         ('surf_refs', 'keys')]),
            (select_fs_surf, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_bold_surfs, [('source_file', 'source_file')]),
            (select_fs_surf, ds_bold_surfs, [('surfaces', 'in_file'),
                                             ('key', 'space')]),
            (name_surfs, ds_bold_surfs, [('hemi', 'hemi')]),
        ])

    # CIFTI output
    if cifti_output:
        ds_bold_cifti = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='bold',
            compress=False,
            TaskName=metadata.get('TaskName'),
            **timing_parameters),
                                name='ds_bold_cifti',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([(inputnode, ds_bold_cifti,
                           [(('bold_cifti', _unlist), 'in_file'),
                            ('source_file', 'source_file'),
                            (('cifti_metadata', _get_surface), 'space'),
                            ('cifti_density', 'density'),
                            (('cifti_metadata', _read_json), 'meta_dict')])])

    if "compcor" in config.execution.debug:
        ds_acompcor_masks = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc=[f"CompCor{_}" for _ in "CWA"],
            suffix="mask",
            compress=True),
                                    name="ds_acompcor_masks",
                                    run_without_submitting=True)
        ds_tcompcor_mask = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc="CompCorT",
            suffix="mask",
            compress=True),
                                   name="ds_tcompcor_mask",
                                   run_without_submitting=True)
        workflow.connect([
            (inputnode, ds_acompcor_masks, [("acompcor_masks", "in_file"),
                                            ("source_file", "source_file")]),
            (inputnode, ds_tcompcor_mask, [("tcompcor_mask", "in_file"),
                                           ("source_file", "source_file")]),
        ])

    return workflow
Exemplo n.º 13
0
def init_bold_preproc_report_wf(mem_gb,
                                reportlets_dir,
                                name='bold_preproc_report_wf'):
    """
    Generate a visual report.

    This workflow generates and saves a reportlet showing the effect of resampling
    the BOLD signal using the standard deviation maps.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.bold.resampling import init_bold_preproc_report_wf
            wf = init_bold_preproc_report_wf(mem_gb=1, reportlets_dir='.')

    Parameters
    ----------
    mem_gb : :obj:`float`
        Size of BOLD file in GB
    reportlets_dir : :obj:`str`
        Directory in which to save reportlets
    name : :obj:`str`, optional
        Workflow name (default: bold_preproc_report_wf)

    Inputs
    ------
    in_pre
        BOLD time-series, before resampling
    in_post
        BOLD time-series, after resampling
    name_source
        BOLD series NIfTI file
        Used to recover original information lost during processing

    """
    from nipype.algorithms.confounds import TSNR
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces import SimpleBeforeAfter
    from ...interfaces import DerivativesDataSink

    workflow = Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_pre', 'in_post', 'name_source']),
        name='inputnode')

    pre_tsnr = pe.Node(TSNR(), name='pre_tsnr', mem_gb=mem_gb * 4.5)
    pos_tsnr = pe.Node(TSNR(), name='pos_tsnr', mem_gb=mem_gb * 4.5)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name='bold_rpt', mem_gb=0.1)
    ds_report_bold = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                                 desc='preproc',
                                                 datatype="figures",
                                                 dismiss_entities=("echo", )),
                             name='ds_report_bold',
                             mem_gb=DEFAULT_MEMORY_MIN_GB,
                             run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_report_bold, [('name_source', 'source_file')]),
        (inputnode, pre_tsnr, [('in_pre', 'in_file')]),
        (inputnode, pos_tsnr, [('in_post', 'in_file')]),
        (pre_tsnr, bold_rpt, [('stddev_file', 'before')]),
        (pos_tsnr, bold_rpt, [('stddev_file', 'after')]),
        (bold_rpt, ds_report_bold, [('out_report', 'in_file')]),
    ])

    return workflow
Exemplo n.º 14
0
def init_combine_mp2rage_wf(sourcedata,
                            derivatives,
                            name='combine_mp2rages',
                            n_mp2rages=2):

    wf = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'sourcedata', 'derivatives', 'subject', 'session', 'acquisition'
    ]),
                        name='inputnode')

    inputnode.inputs.sourcedata = sourcedata
    inputnode.inputs.derivatives = derivatives

    get_parameters = pe.MapNode(niu.Function(
        function=get_mp2rage_pars,
        input_names=['sourcedata', 'subject', 'session', 'acquisition'],
        output_names=['mp2rage_parameters']),
                                iterfield=['acquisition'],
                                name='get_mp2rage_pars')

    wf.connect([(inputnode, get_parameters, [('sourcedata', 'sourcedata'),
                                             ('subject', 'subject'),
                                             ('session', 'session'),
                                             ('acquisition', 'acquisition')])])

    make_t1w = pe.MapNode(niu.Function(function=fit_mp2rage,
                                       input_names=['mp2rage_parameters'],
                                       output_names=['t1w_uni', 't1map']),
                          iterfield=['mp2rage_parameters'],
                          name='make_t1w')

    wf.connect([(get_parameters, make_t1w, [('mp2rage_parameters',
                                             'mp2rage_parameters')])])

    get_first_inversion = pe.MapNode(niu.Function(
        function=get_inv,
        input_names=['mp2rage_parameters', 'inv', 'echo'],
        output_names='inv1'),
                                     iterfield=['mp2rage_parameters'],
                                     name='get_first_inversion')

    get_first_inversion.inputs.inv = 1
    get_first_inversion.inputs.echo = 1
    wf.connect(get_parameters, 'mp2rage_parameters', get_first_inversion,
               'mp2rage_parameters')

    split = pe.Node(niu.Split(splits=[1, n_mp2rages - 1]), name='split')
    wf.connect(get_first_inversion, 'inv1', split, 'inlist')

    flirt = pe.MapNode(fsl.FLIRT(dof=6), iterfield=['in_file'], name='flirt')

    wf.connect(split, ('out1', _pickone), flirt, 'reference')
    wf.connect(split, 'out2', flirt, 'in_file')

    convert2itk = pe.MapNode(C3dAffineTool(),
                             iterfield=['source_file', 'transform_file'],
                             name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True

    wf.connect(flirt, 'out_matrix_file', convert2itk, 'transform_file')
    wf.connect(split, ('out1', _pickone), convert2itk, 'reference_file')
    wf.connect(split, 'out2', convert2itk, 'source_file')

    transform_t1w_wf = init_transform_to_first_image_wf('transforms_t1w',
                                                        n_images=n_mp2rages)

    wf.connect(make_t1w, 't1w_uni', transform_t1w_wf, 'inputnode.in_files')
    wf.connect(convert2itk, 'itk_transform', transform_t1w_wf,
               'inputnode.transforms')

    get_second_inversion = pe.MapNode(niu.Function(
        function=get_inv,
        input_names=['mp2rage_parameters', 'inv', 'echo'],
        output_names='inv2'),
                                      iterfield=['mp2rage_parameters'],
                                      name='get_second_inversion')
    get_second_inversion.inputs.inv = 2

    transform_inv2_wf = init_transform_to_first_image_wf('transforms_inv2',
                                                         n_images=n_mp2rages)
    wf.connect(get_parameters, 'mp2rage_parameters', get_second_inversion,
               'mp2rage_parameters')
    wf.connect(get_second_inversion, 'inv2', transform_inv2_wf,
               'inputnode.in_files')
    wf.connect(convert2itk, 'itk_transform', transform_inv2_wf,
               'inputnode.transforms')

    transform_t1map_wf = init_transform_to_first_image_wf('transform_t1map',
                                                          n_images=n_mp2rages)

    wf.connect(make_t1w, 't1map', transform_t1map_wf, 'inputnode.in_files')
    wf.connect(convert2itk, 'itk_transform', transform_t1map_wf,
               'inputnode.transforms')

    ds_t1w = pe.MapNode(DerivativesDataSink(base_directory=derivatives,
                                            keep_dtype=False,
                                            out_path_base='t1w',
                                            suffix='T1w'),
                        iterfield=['in_file', 'source_file'],
                        name='ds_t1w')

    reorient_t1w = pe.MapNode(Reorient(),
                              iterfield=['in_file'],
                              name='reorient_t1w')

    wf.connect(make_t1w, 't1w_uni', reorient_t1w, 'in_file')
    wf.connect(reorient_t1w, 'out_file', ds_t1w, 'in_file')
    wf.connect(get_first_inversion, 'inv1', ds_t1w, 'source_file')

    ds_t1map = pe.MapNode(DerivativesDataSink(base_directory=derivatives,
                                              keep_dtype=False,
                                              out_path_base='t1map',
                                              suffix='T1w'),
                          iterfield=['in_file', 'source_file'],
                          name='ds_t1map')

    reorient_t1map = pe.MapNode(Reorient(),
                                iterfield=['in_file'],
                                name='reorient_t1map')

    wf.connect(make_t1w, 't1map', reorient_t1map, 'in_file')
    wf.connect(reorient_t1map, 'out_file', ds_t1map, 'in_file')
    wf.connect(get_first_inversion, 'inv1', ds_t1map, 'source_file')

    ds_t1w_average = pe.Node(DerivativesDataSink(
        base_directory=derivatives,
        keep_dtype=False,
        out_path_base='averaged_mp2rages',
        suffix='T1w',
        space='average'),
                             name='ds_t1w_average')

    rename = pe.Node(niu.Rename(use_fullpath=True), name='rename')
    rename.inputs.format_string = '%(path)s/sub-%(subject_id)s_ses-%(session)s_MPRAGE.nii.gz'
    rename.inputs.parse_string = '(?P<path>.+)/sub-(?P<subject_id>.+)_ses-(?P<session>.+)_acq-.+_MPRAGE.nii(.gz)?'

    wf.connect(get_first_inversion, ('inv1', _pickone), rename, 'in_file')
    reorient_average_t1w = pe.Node(Reorient(), name='reorient_average_t1w')
    wf.connect(transform_t1w_wf, 'outputnode.mean_image', reorient_average_t1w,
               'in_file')
    wf.connect(reorient_average_t1w, 'out_file', ds_t1w_average, 'in_file')
    wf.connect(rename, 'out_file', ds_t1w_average, 'source_file')

    ds_t1map_average = pe.Node(DerivativesDataSink(
        base_directory=derivatives,
        keep_dtype=False,
        out_path_base='averaged_mp2rages',
        suffix='T1map',
        space='average'),
                               name='ds_t1map_average')

    reorient_t1map_average = pe.Node(Reorient(), name='reorient_t1map_average')
    wf.connect(rename, 'out_file', ds_t1map_average, 'source_file')
    wf.connect(transform_t1map_wf, 'outputnode.mean_image',
               reorient_t1map_average, 'in_file')
    wf.connect(reorient_t1map_average, 'out_file', ds_t1map_average, 'in_file')

    ds_inv2 = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                          keep_dtype=False,
                                          out_path_base='averaged_mp2rages',
                                          suffix='INV2',
                                          space='average'),
                      name='ds_inv2')

    reorient_inv2 = pe.Node(Reorient(), name='reorient_inv2')

    wf.connect(rename, 'out_file', ds_inv2, 'source_file')
    wf.connect(transform_inv2_wf, 'outputnode.mean_image', reorient_inv2,
               'in_file')
    wf.connect(reorient_inv2, 'out_file', ds_inv2, 'in_file')

    return wf
Exemplo n.º 15
0
wf = init_anat_template_wf(False, 8, 2)
wf.inputs.inputnode.t1w = t1w

wf.base_dir = '/workflow_folders'

transformer = pe.MapNode(ants.ApplyTransforms(interpolation='LanczosWindowedSinc'),
                         iterfield=['input_image', 'transforms'],
                      name='transformer')
transformer.inputs.input_image = inv2

wf.connect(wf.get_node('outputnode'), 'template_transforms', transformer, 'transforms')
wf.connect(wf.get_node('outputnode'), 't1_template', transformer, 'reference_image')

ds_inv2 = pe.MapNode(DerivativesDataSink(base_directory='/derivatives',
                                         keep_dtype=False,
                                         out_path_base='inv2_in_t1w_space',
                                         suffix='MPRAGE',
                                         space='average'),
                     iterfield=['in_file', 'source_file'],
                     name='ds_inv2')
ds_inv2.inputs.source_file = inv2
wf.connect(transformer, 'output_image', ds_inv2, 'in_file')

ds_template = pe.Node(DerivativesDataSink(base_directory='/derivatives',
                                         keep_dtype=False,
                                         out_path_base='inv2_in_t1w_space',
                                         suffix='T1w',
                                         space='average'),
                     iterfield=['in_file', 'source_file'],
                     name='ds_template')
ds_template.inputs.source_file = t1w[0]
Exemplo n.º 16
0
def epi_unwarp(name='EPIUnwarpWorkflow', settings=None):
    """ A workflow to correct EPI images """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'epi', 'sbref_brain', 'fieldmap', 'fmap_movpar', 'fmap_mask',
        'epi_brain'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['epi_correct', 'epi_mean']),
        name='outputnode')

    # Read metadata
    meta = pe.MapNode(
        ReadSidecarJSON(fields=['TotalReadoutTime', 'PhaseEncodingDirection']),
        iterfield=['in_file'],
        name='metadata')

    encfile = pe.Node(interface=niu.Function(
        input_names=['input_images', 'in_dict'],
        output_names=['parameters_file'],
        function=create_encoding_file),
                      name='TopUp_encfile',
                      updatehash=True)

    fslsplit = pe.Node(fsl.Split(dimension='t'), name='EPI_split')

    # Now, we cannot use the LSR method
    unwarp_epi = pe.MapNode(fsl.ApplyTOPUP(method='jac', in_index=[1]),
                            iterfield=['in_files'],
                            name='TopUpApply')

    # Merge back
    fslmerge = pe.Node(fsl.Merge(dimension='t'), name='EPI_corr_merge')

    # Compute mean
    epi_mean = pe.Node(fsl.MeanImage(dimension='T'), name='EPI_mean')

    workflow.connect([
        (inputnode, meta, [('epi', 'in_file')]),
        (inputnode, encfile, [('epi', 'input_images')]),
        (inputnode, fslsplit, [('epi_brain', 'in_file')]),
        (meta, encfile, [('out_dict', 'in_dict')]),
        (inputnode, unwarp_epi, [('fieldmap', 'in_topup_fieldcoef'),
                                 ('fmap_movpar', 'in_topup_movpar')]),
        (encfile, unwarp_epi, [('parameters_file', 'encoding_file')]),
        (fslsplit, unwarp_epi, [('out_files', 'in_files')]),
        (unwarp_epi, fslmerge, [('out_corrected', 'in_files')]),
        (fslmerge, epi_mean, [('merged_file', 'in_file')]),
        (fslmerge, outputnode, [('merged_file', 'epi_correct')]),
        (epi_mean, outputnode, [('out_file', 'epi_mean')])
    ])

    # Plot result
    png_epi_corr = pe.Node(niu.Function(
        input_names=['in_file', 'overlay_file', 'out_file'],
        output_names=['out_file'],
        function=stripped_brain_overlay),
                           name='PNG_epi_corr')
    png_epi_corr.inputs.out_file = 'corrected_EPI.png'

    ds_png = pe.Node(DerivativesDataSink(base_directory=settings['output_dir'],
                                         suffix='sdc'),
                     name='DerivativesPNG')

    workflow.connect([(epi_mean, png_epi_corr, [('out_file', 'overlay_file')]),
                      (inputnode, png_epi_corr, [('fmap_mask', 'in_file')]),
                      (inputnode, ds_png, [('epi', 'source_file')]),
                      (png_epi_corr, ds_png, [('out_file', 'in_file')])])

    return workflow
Exemplo n.º 17
0
def t1w_preprocessing(name='t1w_preprocessing', settings=None):
    """T1w images preprocessing pipeline"""

    if settings is None:
        raise RuntimeError('Workflow settings are missing')

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['t1w']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['t1_seg', 'bias_corrected_t1', 't1_brain', 't1_2_mni',
                't1_2_mni_forward_transform', 't1_2_mni_reverse_transform',
                't1_segmentation']), name='outputnode')

    # 1. Reorient T1
    arw = pe.Node(fs.MRIConvert(out_type='niigz', out_orientation='RAS'), name='Reorient')

    # 2. T1 Bias Field Correction
    inu_n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias_Field_Correction')

    # 3. Skull-stripping
    asw = skullstrip_wf()
    if settings.get('skull_strip_ants', False):
        asw = skullstrip_ants(settings=settings)

    # 4. Segmentation
    t1_seg = pe.Node(fsl.FAST(no_bias=True, probability_maps=True), name='T1_Segmentation')

    # 5. T1w to MNI registration
    t1_2_mni = pe.Node(ants.Registration(), name='T1_2_MNI_Registration')
    t1_2_mni.inputs.collapse_output_transforms = False
    t1_2_mni.inputs.write_composite_transform = False
    t1_2_mni.inputs.output_transform_prefix = 'T1_to_MNI_'
    t1_2_mni.inputs.output_warped_image = 't1_to_mni.nii.gz'
    t1_2_mni.inputs.num_threads = 4
    t1_2_mni.inputs.fixed_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')
    t1_2_mni.inputs.fixed_image_mask = op.join(
        get_mni_template_ras(), 'MNI152_T1_1mm_brain_mask.nii.gz')

    # Hack to avoid re-running ANTs all the times
    grabber_interface = nio.JSONFileGrabber()
    setattr(grabber_interface, '_always_run', False)
    t1_2_mni_params = pe.Node(grabber_interface, name='t1_2_mni_params')
    t1_2_mni_params.inputs.in_file = (
        pkgr.resource_filename('fmriprep', 'data/{}.json'.format(
            settings.get('ants_t1-mni_settings', 't1-mni_registration2')))
    )

    # Resampe the brain mask and the tissue probability maps into mni space
    bmask_mni = pe.Node(ants.ApplyTransforms(
        dimension=3, default_value=0, interpolation='NearestNeighbor'), name='brain_mni_warp')
    bmask_mni.inputs.reference_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')
    tpms_mni = pe.MapNode(ants.ApplyTransforms(dimension=3, default_value=0, interpolation='Linear'),
                          iterfield=['input_image'], name='tpms_mni_warp')
    tpms_mni.inputs.reference_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')


    workflow.connect([
        (inputnode, arw, [('t1w', 'in_file')]),
        (arw, inu_n4, [('out_file', 'input_image')]),
        (inu_n4, asw, [('output_image', 'inputnode.in_file')]),
        (asw, t1_seg, [('outputnode.out_file', 'in_files')]),
        (inu_n4, t1_2_mni, [('output_image', 'moving_image')]),
        (asw, t1_2_mni, [('outputnode.out_mask', 'moving_image_mask')]),
        (t1_seg, outputnode, [('tissue_class_map', 't1_seg')]),
        (inu_n4, outputnode, [('output_image', 'bias_corrected_t1')]),
        (t1_seg, outputnode, [('tissue_class_map', 't1_segmentation')]),
        (t1_2_mni, outputnode, [
            ('warped_image', 't1_2_mni'),
            ('forward_transforms', 't1_2_mni_forward_transform'),
            ('reverse_transforms', 't1_2_mni_reverse_transform')
        ]),
        (asw, bmask_mni, [('outputnode.out_mask', 'input_image')]),
        (t1_2_mni, bmask_mni, [('forward_transforms', 'transforms'),
                               ('forward_invert_flags', 'invert_transform_flags')]),
        (t1_seg, tpms_mni, [('probability_maps', 'input_image')]),
        (t1_2_mni, tpms_mni, [('forward_transforms', 'transforms'),
                               ('forward_invert_flags', 'invert_transform_flags')]),
        (asw, outputnode, [
            ('outputnode.out_file', 't1_brain')]),
    ])

    # Connect reporting nodes
    t1_stripped_overlay = pe.Node(niu.Function(
        input_names=['in_file', 'overlay_file', 'out_file'], output_names=['out_file'],
        function=stripped_brain_overlay), name='PNG_T1_SkullStrip')
    t1_stripped_overlay.inputs.out_file = 't1_stripped_overlay.png'

    # The T1-to-MNI will be plotted using the segmentation. That's why we transform it first
    seg_2_mni = pe.Node(ants.ApplyTransforms(
        dimension=3, default_value=0, interpolation='NearestNeighbor'), name='T1_2_MNI_warp')
    seg_2_mni.inputs.reference_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')

    t1_2_mni_overlay = pe.Node(niu.Function(
        input_names=['in_file', 'overlay_file', 'out_file'], output_names=['out_file'],
        function=stripped_brain_overlay), name='PNG_T1_to_MNI')
    t1_2_mni_overlay.inputs.out_file = 't1_to_mni_overlay.png'
    t1_2_mni_overlay.inputs.overlay_file = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')

    datasink = pe.Node(
        interface=nio.DataSink(
            base_directory=op.join(settings['output_dir'], 'images')),
        name='datasink',
        parameterization=False
    )

    workflow.connect([
        (inu_n4, t1_stripped_overlay, [('output_image', 'overlay_file')]),
        (asw, t1_stripped_overlay, [('outputnode.out_mask', 'in_file')]),
        (t1_stripped_overlay, datasink, [('out_file', '@t1_stripped_overlay')]),
        (t1_seg, seg_2_mni, [('tissue_class_map', 'input_image')]),
        (t1_2_mni, seg_2_mni, [('forward_transforms', 'transforms'),
                               ('forward_invert_flags', 'invert_transform_flags')]),
        (seg_2_mni, t1_2_mni_overlay, [('output_image', 'in_file')]),
        (t1_2_mni_overlay, datasink, [('out_file', '@t1_2_mni_overlay')]),
    ])

    # Write corrected file in the designated output dir
    ds_t1_bias = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='inu'), name='DerivT1_inu')
    ds_t1_seg = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='inu_seg'), name='DerivT1_seg')
    ds_mask = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='bmask'), name='DerivT1_mask')

    ds_t1_mni = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='mni'), name='DerivT1w_MNI')
    ds_t1_mni_aff = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='mni_affine'), name='DerivT1w_MNI_affine')

    ds_bmask_mni = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='bmask_mni'), name='DerivT1_Mask_MNI')
    ds_tpms_mni = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='tpm_mni'), name='DerivT1_TPMs_MNI')

    if settings.get('debug', False):
        workflow.connect([
            (t1_2_mni, ds_t1_mni_aff, [('forward_transforms', 'in_file')])
        ])
    else:
        ds_t1_mni_warp = pe.Node(
            DerivativesDataSink(base_directory=settings['output_dir'],
                suffix='mni_warp'), name='DerivT1w_MNI_warp')

        def _get_aff(inlist):
            return inlist[:-1]

        def _get_warp(inlist):
            return inlist[-1]

        workflow.connect([
            (inputnode, ds_t1_mni_warp, [('t1w', 'source_file')]),
            (t1_2_mni, ds_t1_mni_aff, [
                (('forward_transforms', _get_aff), 'in_file')]),
            (t1_2_mni, ds_t1_mni_warp, [
                (('forward_transforms', _get_warp), 'in_file')])
        ])

    workflow.connect([
        (inputnode, ds_t1_bias, [('t1w', 'source_file')]),
        (inputnode, ds_t1_seg, [('t1w', 'source_file')]),
        (inputnode, ds_mask, [('t1w', 'source_file')]),
        (inputnode, ds_t1_mni, [('t1w', 'source_file')]),
        (inputnode, ds_t1_mni_aff, [('t1w', 'source_file')]),
        (inputnode, ds_bmask_mni, [('t1w', 'source_file')]),
        (inputnode, ds_tpms_mni, [('t1w', 'source_file')]),
        (asw, ds_t1_bias, [('outputnode.out_file', 'in_file')]),
        (t1_seg, ds_t1_seg, [('tissue_class_map', 'in_file')]),
        (asw, ds_mask, [('outputnode.out_mask', 'in_file')]),
        (t1_2_mni, ds_t1_mni, [('warped_image', 'in_file')]),
        (bmask_mni, ds_bmask_mni, [('output_image', 'in_file')]),
        (tpms_mni, ds_tpms_mni, [('output_image', 'in_file')])

    ])

    # ANTs inputs connected here for clarity
    workflow.connect([
        (t1_2_mni_params, t1_2_mni, [
            ('metric', 'metric'),
            ('metric_weight', 'metric_weight'),
            ('dimension', 'dimension'),
            ('write_composite_transform', 'write_composite_transform'),
            ('radius_or_number_of_bins', 'radius_or_number_of_bins'),
            ('shrink_factors', 'shrink_factors'),
            ('smoothing_sigmas', 'smoothing_sigmas'),
            ('sigma_units', 'sigma_units'),
            ('output_transform_prefix', 'output_transform_prefix'),
            ('transforms', 'transforms'),
            ('transform_parameters', 'transform_parameters'),
            ('initial_moving_transform_com', 'initial_moving_transform_com'),
            ('number_of_iterations', 'number_of_iterations'),
            ('convergence_threshold', 'convergence_threshold'),
            ('convergence_window_size', 'convergence_window_size'),
            ('sampling_strategy', 'sampling_strategy'),
            ('sampling_percentage', 'sampling_percentage'),
            ('output_warped_image', 'output_warped_image'),
            ('use_histogram_matching', 'use_histogram_matching'),
            ('use_estimate_learning_rate_once',
             'use_estimate_learning_rate_once'),
            ('collapse_output_transforms', 'collapse_output_transforms'),
            ('winsorize_lower_quantile', 'winsorize_lower_quantile'),
            ('winsorize_upper_quantile', 'winsorize_upper_quantile'),
        ])
    ])

    return workflow
def init_masking_wf(name='mask_wf',
                    derivatives='/derivatives',
                    num_threads=8):

    wf = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['inv2',
                                                      't1w',
                                                      't1map',
                                                      'manual_inside',
                                                      'manual_outside'],
                                              ),
                        name='inputnode')

    n4 = pe.Node(ants.N4BiasFieldCorrection(copy_header=True,
                                            num_threads=num_threads),
                 name='n4')

    wf.connect(inputnode, 'inv2', n4, 'input_image')


    bet = pe.Node(fsl.BET(mask=True, skull=True), name='bet')
    wf.connect(n4, 'output_image', bet, 'in_file')


    nighres_brain_extract = pe.Node(niu.Function(function=nighres_skullstrip,
                                          input_names=['inv2', 't1w', 't1map'],
                                          output_names=['brainmask']),
                             name='nighres_brain_extract')

    wf.connect(n4, 'output_image', nighres_brain_extract, 'inv2')
    wf.connect(inputnode, 't1w', nighres_brain_extract, 't1w')
    wf.connect(inputnode, 't1map', nighres_brain_extract, 't1map')

    dura_masker = pe.Node(niu.Function(function=nighres_dura_masker,
                                          input_names=['inv2', 'inv2_mask'],
                                          output_names=['duramask']),
                             name='dura_masker')

    wf.connect(n4, 'output_image', dura_masker, 'inv2')
    wf.connect(nighres_brain_extract, 'brainmask', dura_masker, 'inv2_mask')



    afni_mask = pe.Node(afni.Automask(outputtype='NIFTI_GZ',
                                      clfrac=0.5),
                        name='afni_mask')
    wf.connect(bet, 'out_file', afni_mask, 'in_file')

    threshold_dura = pe.Node(fsl.Threshold(thresh=.8, args='-bin'),
                             name='threshold_dura')
    wf.connect(dura_masker, 'duramask', threshold_dura, 'in_file')

    mask_t1map = pe.Node(fsl.ApplyMask(), name='mask_t1map')
    wf.connect(inputnode, 't1map', mask_t1map, 'in_file')
    wf.connect(afni_mask, 'out_file', mask_t1map, 'mask_file')

    t1w_masker = pe.Node(niu.Function(function=mask_t1w,
                                    input_names=['t1w', 'inv2', 't1w_mask',
                                                 'dura_mask', 'manual_inside', 
                                                 'manual_outside'],
                                    output_names=['out_file',
                                                  'brain_mask']),
                       name='t1w_masker')


    wf.connect(inputnode, 't1w', t1w_masker, 't1w')
    wf.connect(inputnode, 'manual_inside', t1w_masker, 'manual_inside')
    wf.connect(inputnode, 'manual_outside', t1w_masker, 'manual_outside')
    wf.connect(n4, 'output_image', t1w_masker, 'inv2')
    wf.connect(afni_mask, 'out_file', t1w_masker, 't1w_mask')
    wf.connect(threshold_dura, 'out_file', t1w_masker, 'dura_mask')


    ds_t1map = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         keep_dtype=False,
                                         out_path_base='masked_mp2rages',
                                         suffix='T1map',
                                         desc='masked',
                                         space='average'),
                                         name='ds_t1map')

    wf.connect(inputnode, 't1map', ds_t1map, 'source_file')
    wf.connect(mask_t1map, 'out_file', ds_t1map, 'in_file')

    ds_t1w = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         keep_dtype=False,
                                         out_path_base='masked_mp2rages',
                                         desc='masked',
                                         suffix='T1w'),
                                         name='ds_t1w')


    ds_dura = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         keep_dtype=False,
                                         out_path_base='masked_mp2rages',
                                         desc='dura',
                                         suffix='mask'),
                                         name='ds_dura')

    ds_brainmask = pe.Node(DerivativesDataSink(base_directory=derivatives,
                                         keep_dtype=False,
                                         out_path_base='masked_mp2rages',
                                         desc='brainmask',
                                         suffix='mask'),
                                         name='ds_brainmask')

    wf.connect(inputnode, 't1w', ds_t1w, 'source_file')
    wf.connect(t1w_masker, 'out_file', ds_t1w, 'in_file')

    wf.connect(inputnode, 't1w', ds_dura, 'source_file')
    wf.connect(dura_masker, 'duramask', ds_dura, 'in_file')

    wf.connect(inputnode, 't1w', ds_brainmask, 'source_file')
    wf.connect(t1w_masker, 'brain_mask', ds_brainmask, 'in_file')

    return wf
Exemplo n.º 19
0

merge_hmc_pars = pe.Node(niu.Merge(len(bold)),
                         name='merge_hmc_pars')

for i, b in enumerate(bold):
    mcw = create_motion_correction_workflow(name='moco_{}'.format(i+1),
                                            method='AFNI', lightweight=True)
    mcw.inputs.inputspec.which_file_is_EPI_space = 'first'

    source_file = '/sourcedata/sub-tk/ses-odc2/func/sub-tk_ses-odc2_task-checkerboard_acq-07_run-02_bold.nii'

    wf.connect(inputnode, ('bold', select_range, i), mcw, 'inputspec.in_files')

    wf.connect(mcw, 'outputspec.hmc_confounds', merge_hmc_pars, 'in{}'.format(i+1))

ds_params = pe.MapNode(DerivativesDataSink(out_path_base='analysis_experiments',
                                        suffix='hmc_confounds_afni',
                                        base_directory='/derivatives'),
                       iterfield=['in_file', 'source_file'],
                        name='ds_pars')

wf.connect(inputnode, 'bold', ds_params, 'source_file')
wf.connect(merge_hmc_pars, 'out', ds_params, 'in_file')

wf.run(plugin='MultiProc', 
       plugin_args={'n_procs' : 10})



Exemplo n.º 20
0
def main(sourcedata,
         derivatives,
         tmp_dir,
         subject=None,
         session=None,
         run=None):

    print(subject, session, run)

    layout = BIDSLayout(sourcedata)
    derivatives_layout = BIDSLayout('/derivatives/spynoza', validate=False)

    cortex_l = get_derivative(derivatives,
                              'nighres',
                              'anat',
                              subject,
                              'dseg',
                              session='anat',
                              space='average',
                              description='cortex',
                              hemi='left')

    cortex_r = get_derivative(derivatives,
                              'nighres',
                              'anat',
                              subject,
                              'dseg',
                              session='anat',
                              space='average',
                              description='cortex',
                              hemi='left')

    mask = derivatives_layout.get(subject=subject,
                                  session=session,
                                  suffix='mask',
                                  return_type='file')
    mask = sorted(mask)
    assert (len(mask) == 1)
    mask = mask[0]

    bold = derivatives_layout.get(subject=subject,
                                  session=session,
                                  suffix='preproc',
                                  return_type='file')
    bold = sorted(bold)
    print('BOLD: {}'.format(bold))
    print('MASK: {}'.format(mask))

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['cortex_l', 'cortex_r', 'bold', 'mask']),
        name='inputnode')

    inputnode.inputs.cortex_l = cortex_l
    inputnode.inputs.cortex_r = cortex_r
    inputnode.inputs.bold = bold
    inputnode.inputs.mask = mask

    get_masks = pe.MapNode(niu.Function(
        function=get_brain_regions_cruise,
        input_names=['cortex_l', 'cortex_r', 'type'],
        output_names=['out']),
                           iterfield=['type'],
                           name='get_masks')
    get_masks.inputs.type = ['csf', 'wm']

    wf = pe.Workflow(name='get_confounds_{}_{}'.format(subject, session),
                     base_dir='/workflow_folders')
    wf.connect(inputnode, 'cortex_l', get_masks, 'cortex_l')
    wf.connect(inputnode, 'cortex_r', get_masks, 'cortex_r')

    resampler = pe.MapNode(niu.Function(
        function=resample_img,
        input_names=['input_image', 'ref_image', 'interpolation'],
        output_names=['resampled_image'],
    ),
                           iterfield=['input_image'],
                           name='resampler')

    wf.connect(inputnode, ('bold', pickfirst), resampler, 'ref_image')
    wf.connect(get_masks, 'out', resampler, 'input_image')

    compcorr = pe.MapNode(ACompCor(merge_method='union'),
                          iterfield=['realigned_file'],
                          name='acompcorr')

    wf.connect(resampler, 'resampled_image', compcorr, 'mask_files')
    wf.connect(inputnode, 'bold', compcorr, 'realigned_file')

    dvars = pe.MapNode(ComputeDVARS(), iterfield=['in_file'], name='dvars')
    wf.connect(inputnode, 'mask', dvars, 'in_mask')
    wf.connect(inputnode, 'bold', dvars, 'in_file')

    add_header = pe.MapNode(AddTSVHeader(columns=["dvars"]),
                            iterfield=['in_file'],
                            name="add_header_dvars")
    wf.connect(dvars, 'out_std', add_header, 'in_file')

    concat = pe.MapNode(GatherConfounds(),
                        iterfield=['acompcor', 'dvars'],
                        name="concat")
    wf.connect(add_header, 'out_file', concat, 'dvars')
    wf.connect(compcorr, 'components_file', concat, 'acompcor')

    ds_confounds = pe.MapNode(DerivativesDataSink(out_path_base='spynoza',
                                                  suffix='confounds_compcor',
                                                  base_directory=derivatives),
                              iterfield=['in_file', 'source_file'],
                              name='ds_reg_report')

    wf.connect(inputnode, 'bold', ds_confounds, 'source_file')
    wf.connect(concat, 'confounds_file', ds_confounds, 'in_file')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': 10})
Exemplo n.º 21
0
def init_anat_derivatives_wf(output_dir,
                             output_spaces,
                             template,
                             freesurfer,
                             name='anat_derivatives_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1_preproc', 't1_mask', 't1_seg', 't1_tpms',
        't1_2_mni_forward_transform', 't1_2_mni', 'mni_mask', 'mni_seg',
        'mni_tpms', 'surfaces'
    ]),
                        name='inputnode')

    ds_t1_preproc = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                suffix='preproc'),
                            name='ds_t1_preproc',
                            run_without_submitting=True)

    ds_t1_mask = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix='brainmask'),
                         name='ds_t1_mask',
                         run_without_submitting=True)

    ds_t1_seg = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            suffix='dtissue'),
                        name='ds_t1_seg',
                        run_without_submitting=True)

    ds_t1_tpms = pe.Node(DerivativesDataSink(
        base_directory=output_dir, suffix='class-{extra_value}_probtissue'),
                         name='ds_t1_tpms',
                         run_without_submitting=True)
    ds_t1_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    suffix_fmt = 'space-{}_{}'.format
    ds_t1_mni = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            suffix=suffix_fmt(
                                                template, 'preproc')),
                        name='ds_t1_mni',
                        run_without_submitting=True)

    ds_mni_mask = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              suffix=suffix_fmt(
                                                  template, 'brainmask')),
                          name='ds_mni_mask',
                          run_without_submitting=True)

    ds_mni_seg = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix=suffix_fmt(
                                                 template, 'dtissue')),
                         name='ds_mni_seg',
                         run_without_submitting=True)

    ds_mni_tpms = pe.Node(DerivativesDataSink(
        base_directory=output_dir,
        suffix=suffix_fmt(template, 'class-{extra_value}_probtissue')),
                          name='ds_mni_tpms',
                          run_without_submitting=True)
    ds_mni_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    ds_t1_mni_warp = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                 suffix=suffix_fmt(
                                                     template, 'warp')),
                             name='ds_t1_mni_warp',
                             run_without_submitting=True)

    def get_gifti_name(in_file):
        import os
        import re
        in_format = re.compile(r'(?P<LR>[lr])h.(?P<surf>.+)_converted.gii')
        name = os.path.basename(in_file)
        info = in_format.match(name).groupdict()
        info['LR'] = info['LR'].upper()
        return '{surf}.{LR}.surf'.format(**info)

    name_surfs = pe.MapNode(niu.Function(function=get_gifti_name),
                            iterfield='in_file',
                            name='name_surfs')

    ds_surfs = pe.MapNode(DerivativesDataSink(base_directory=output_dir),
                          iterfield=['in_file', 'suffix'],
                          name='ds_surfs',
                          run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_t1_preproc, [('source_file', 'source_file'),
                                    ('t1_preproc', 'in_file')]),
        (inputnode, ds_t1_mask, [('source_file', 'source_file'),
                                 ('t1_mask', 'in_file')]),
        (inputnode, ds_t1_seg, [('source_file', 'source_file'),
                                ('t1_seg', 'in_file')]),
        (inputnode, ds_t1_tpms, [('source_file', 'source_file'),
                                 ('t1_tpms', 'in_file')]),
    ])

    if freesurfer:
        workflow.connect([
            (inputnode, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_surfs, [('source_file', 'source_file'),
                                   ('surfaces', 'in_file')]),
            (name_surfs, ds_surfs, [('out', 'suffix')]),
        ])
    if 'template' in output_spaces:
        workflow.connect([
            (inputnode, ds_t1_mni_warp, [('source_file', 'source_file'),
                                         ('t1_2_mni_forward_transform',
                                          'in_file')]),
            (inputnode, ds_t1_mni, [('source_file', 'source_file'),
                                    ('t1_2_mni', 'in_file')]),
            (inputnode, ds_mni_mask, [('source_file', 'source_file'),
                                      ('mni_mask', 'in_file')]),
            (inputnode, ds_mni_seg, [('source_file', 'source_file'),
                                     ('mni_seg', 'in_file')]),
            (inputnode, ds_mni_tpms, [('source_file', 'source_file'),
                                      ('mni_tpms', 'in_file')]),
        ])

    return workflow
Exemplo n.º 22
0
def init_single_subject_wf(subject_id):
    """
    Organize the preprocessing pipeline for a single subject.

    It collects and reports information about the subject, and prepares
    sub-workflows to perform anatomical and functional preprocessing.
    Anatomical preprocessing is performed in a single workflow, regardless of
    the number of sessions.
    Functional preprocessing is performed using a separate workflow for each
    individual BOLD series.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from nibabies.workflows.tests import mock_config
            from nibabies.workflows.base import init_single_subject_wf
            with mock_config():
                wf = init_single_subject_wf('01')

    Parameters
    ----------
    subject_id : :obj:`str`
        Subject label for this single-subject workflow.

    Inputs
    ------
    subjects_dir : :obj:`str`
        FreeSurfer's ``$SUBJECTS_DIR``.

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.bids import BIDSInfo, BIDSDataGrabber
    from niworkflows.interfaces.nilearn import NILEARN_VERSION
    from niworkflows.utils.bids import collect_data
    from niworkflows.utils.spaces import Reference

    from .anatomical import init_infant_anat_wf
    from ..utils.misc import fix_multi_source_name

    name = "single_subject_%s_wf" % subject_id
    subject_data = collect_data(
        config.execution.layout,
        subject_id,
        config.execution.task_id,
        config.execution.echo_idx,
        bids_filters=config.execution.bids_filters,
    )[0]

    if "flair" in config.workflow.ignore:
        subject_data["flair"] = []
    if "t2w" in config.workflow.ignore:
        subject_data["t2w"] = []

    anat_only = config.workflow.anat_only
    anat_derivatives = config.execution.anat_derivatives
    anat_modality = config.workflow.anat_modality
    spaces = config.workflow.spaces
    # Make sure we always go through these two checks
    if not anat_only and not subject_data["bold"]:
        task_id = config.execution.task_id
        raise RuntimeError(
            "No BOLD images found for participant {} and task {}. "
            "All workflows require BOLD images.".format(
                subject_id, task_id if task_id else "<all>"))

    if anat_derivatives:
        from smriprep.utils.bids import collect_derivatives

        std_spaces = spaces.get_spaces(nonstandard=False, dim=(3, ))
        anat_derivatives = collect_derivatives(
            anat_derivatives.absolute(),
            subject_id,
            std_spaces,
            config.workflow.run_reconall,
        )
        if anat_derivatives is None:
            config.loggers.workflow.warning(f"""\
Attempted to access pre-existing anatomical derivatives at \
<{config.execution.anat_derivatives}>, however not all expectations of fMRIPrep \
were met (for participant <{subject_id}>, spaces <{', '.join(std_spaces)}>, \
reconall <{config.workflow.run_reconall}>).""")

    if not anat_derivatives and not subject_data[anat_modality]:
        raise Exception(
            f"No {anat_modality} images found for participant {subject_id}. "
            "All workflows require T1w images.")

    workflow = Workflow(name=name)
    workflow.__desc__ = """
Results included in this manuscript come from preprocessing
performed using *fMRIPrep* {fmriprep_ver}
(@fmriprep1; @fmriprep2; RRID:SCR_016216),
which is based on *Nipype* {nipype_ver}
(@nipype1; @nipype2; RRID:SCR_002502).

""".format(
        fmriprep_ver=config.environment.version,
        nipype_ver=config.environment.nipype_version,
    )
    workflow.__postdesc__ = """

Many internal operations of *fMRIPrep* use
*Nilearn* {nilearn_ver} [@nilearn, RRID:SCR_001362],
mostly within the functional processing workflow.
For more details of the pipeline, see [the section corresponding
to workflows in *fMRIPrep*'s documentation]\
(https://nibabies.readthedocs.io/en/latest/workflows.html \
"FMRIPrep's documentation").


### Copyright Waiver

The above boilerplate text was automatically generated by fMRIPrep
with the express intention that users should copy and paste this
text into their manuscripts *unchanged*.
It is released under the [CC0]\
(https://creativecommons.org/publicdomain/zero/1.0/) license.

### References

""".format(nilearn_ver=NILEARN_VERSION)

    fmriprep_dir = str(config.execution.fmriprep_dir)

    inputnode = pe.Node(niu.IdentityInterface(fields=["subjects_dir"]),
                        name="inputnode")

    bidssrc = pe.Node(
        BIDSDataGrabber(
            subject_data=subject_data,
            anat_only=anat_only,
            anat_derivatives=anat_derivatives,
            subject_id=subject_id,
        ),
        name="bidssrc",
    )

    bids_info = pe.Node(
        BIDSInfo(bids_dir=config.execution.bids_dir, bids_validate=False),
        name="bids_info",
    )

    summary = pe.Node(
        SubjectSummary(
            std_spaces=spaces.get_spaces(nonstandard=False),
            nstd_spaces=spaces.get_spaces(standard=False),
        ),
        name="summary",
        run_without_submitting=True,
    )

    about = pe.Node(
        AboutSummary(version=config.environment.version,
                     command=" ".join(sys.argv)),
        name="about",
        run_without_submitting=True,
    )

    ds_report_summary = pe.Node(
        DerivativesDataSink(
            base_directory=fmriprep_dir,
            desc="summary",
            datatype="figures",
            dismiss_entities=("echo", ),
        ),
        name="ds_report_summary",
        run_without_submitting=True,
    )

    ds_report_about = pe.Node(
        DerivativesDataSink(
            base_directory=fmriprep_dir,
            desc="about",
            datatype="figures",
            dismiss_entities=("echo", ),
        ),
        name="ds_report_about",
        run_without_submitting=True,
    )

    # Preprocessing of anatomical (includes registration to UNCInfant)
    anat_preproc_wf = init_infant_anat_wf(
        ants_affine_init=config.workflow.ants_affine_init or True,
        age_months=config.workflow.age_months,
        anat_modality=anat_modality,
        t1w=subject_data['t1w'],
        t2w=subject_data['t2w'],
        bids_root=config.execution.bids_dir,
        existing_derivatives=anat_derivatives,
        freesurfer=config.workflow.run_reconall,
        longitudinal=config.workflow.longitudinal,
        omp_nthreads=config.nipype.omp_nthreads,
        output_dir=fmriprep_dir,
        segmentation_atlases=config.execution.segmentation_atlases_dir,
        skull_strip_mode=config.workflow.skull_strip_t1w,
        skull_strip_template=Reference.from_string(
            config.workflow.skull_strip_template)[0],
        sloppy=config.execution.sloppy,
        spaces=spaces,
    )

    # fmt: off
    workflow.connect([
        (inputnode, anat_preproc_wf, [
            ('subjects_dir', 'inputnode.subjects_dir'),
        ]),
        (inputnode, summary, [
            ('subjects_dir', 'subjects_dir'),
        ]),
        (bidssrc, summary, [
            ('bold', 'bold'),
        ]),
        (bids_info, summary, [
            ('subject', 'subject_id'),
        ]),
        (bids_info, anat_preproc_wf, [
            (('subject', _prefix), 'inputnode.subject_id'),
        ]),
        (
            bidssrc,
            anat_preproc_wf,
            [
                ('t1w', 'inputnode.t1w'),
                ('t2w', 'inputnode.t2w'),
                # ('roi', 'inputnode.roi'),
                # ('flair', 'inputnode.flair'),
            ]),
        (summary, ds_report_summary, [
            ('out_report', 'in_file'),
        ]),
        (about, ds_report_about, [
            ('out_report', 'in_file'),
        ]),
    ])

    if not anat_derivatives:
        workflow.connect([
            (bidssrc, bids_info, [
                (('t1w', fix_multi_source_name), 'in_file'),
            ]),
            (bidssrc, summary, [
                ('t1w', 't1w'),
                ('t2w', 't2w'),
            ]),
            (bidssrc, ds_report_summary, [
                (('t1w', fix_multi_source_name), 'source_file'),
            ]),
            (bidssrc, ds_report_about, [
                (('t1w', fix_multi_source_name), 'source_file'),
            ]),
        ])
    else:
        workflow.connect([
            (bidssrc, bids_info, [
                (('bold', fix_multi_source_name), 'in_file'),
            ]),
            (anat_preproc_wf, summary, [
                ('outputnode.t1w_preproc', 't1w'),
            ]),
            (anat_preproc_wf, ds_report_summary, [
                ('outputnode.t1w_preproc', 'source_file'),
            ]),
            (anat_preproc_wf, ds_report_about, [
                ('outputnode.t1w_preproc', 'source_file'),
            ]),
        ])
    # fmt: on

    # Overwrite ``out_path_base`` of smriprep's DataSinks
    for node in workflow.list_node_names():
        if node.split(".")[-1].startswith("ds_"):
            workflow.get_node(node).interface.out_path_base = ""

    if anat_only:
        return workflow

    raise NotImplementedError("BOLD processing is not yet implemented.")

    # Append the functional section to the existing anatomical exerpt
    # That way we do not need to stream down the number of bold datasets
    anat_preproc_wf.__postdesc__ = ((anat_preproc_wf.__postdesc__ or "") + f"""

Functional data preprocessing

: For each of the {len(subject_data['bold'])} BOLD runs found per subject (across all
tasks and sessions), the following preprocessing was performed.
""")

    for bold_file in subject_data["bold"]:
        func_preproc_wf = init_func_preproc_wf(bold_file)

        # fmt: off
        workflow.connect([
            (
                anat_preproc_wf,
                func_preproc_wf,
                [
                    ('outputnode.anat_preproc', 'inputnode.anat_preproc'),
                    ('outputnode.anat_mask', 'inputnode.anat_mask'),
                    ('outputnode.anat_dseg', 'inputnode.anat_dseg'),
                    ('outputnode.anat_aseg', 'inputnode.anat_aseg'),
                    ('outputnode.anat_aparc', 'inputnode.anat_aparc'),
                    ('outputnode.anat_tpms', 'inputnode.anat_tpms'),
                    ('outputnode.template', 'inputnode.template'),
                    ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'),
                    ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'),
                    # Undefined if --fs-no-reconall, but this is safe
                    ('outputnode.subjects_dir', 'inputnode.subjects_dir'),
                    ('outputnode.subject_id', 'inputnode.subject_id'),
                    ('outputnode.anat2fsnative_xfm',
                     'inputnode.t1w2fsnative_xfm'),
                    ('outputnode.fsnative2anat_xfm',
                     'inputnode.fsnative2t1w_xfm'),
                ]),
        ])
        # fmt: on

    return workflow