Ejemplo n.º 1
0
def test_phases(bids_layouts, tmpdir, output_path, workdir):
    """Test creation of the workflow."""
    tmpdir.chdir()

    data = bids_layouts['ds001600']
    wf = Workflow(name='phases_ds001600')
    phdiff_wf = init_phdiff_wf(omp_nthreads=2)
    phdiff_wf.inputs.inputnode.magnitude = data.get(
        suffix=['magnitude1', 'magnitude2'],
        acquisition='v2',
        return_type='file',
        extension=['.nii', '.nii.gz'])

    phdiff_files = data.get(suffix=['phase1', 'phase2'],
                            acquisition='v2',
                            extension=['.nii', '.nii.gz'])

    phdiff_wf.inputs.inputnode.phasediff = [(ph.path, ph.get_metadata())
                                            for ph in phdiff_files]

    if output_path:
        from ...interfaces.reportlets import FieldmapReportlet
        rep = pe.Node(FieldmapReportlet(reference_label='Magnitude'),
                      'simple_report')
        rep.interface._always_run = True

        ds_report = pe.Node(DerivativesDataSink(
            base_directory=str(output_path),
            out_path_base='sdcflows',
            datatype='figures',
            suffix='fieldmap',
            desc='twophases',
            dismiss_entities='fmap'),
                            name='ds_report')
        ds_report.inputs.source_file = phdiff_files[0].path

        dsink_fmap = pe.Node(DerivativesDataSink(
            base_directory=str(output_path),
            suffix='fieldmap',
            desc='twophases',
            dismiss_entities='fmap'),
                             name='dsink_fmap')
        dsink_fmap.interface.out_path_base = 'sdcflows'
        dsink_fmap.inputs.source_file = phdiff_files[0].path

        wf.connect([
            (phdiff_wf, rep, [('outputnode.fmap', 'fieldmap'),
                              ('outputnode.fmap_ref', 'reference'),
                              ('outputnode.fmap_mask', 'mask')]),
            (rep, ds_report, [('out_report', 'in_file')]),
            (phdiff_wf, dsink_fmap, [('outputnode.fmap', 'in_file')]),
        ])
    else:
        wf.add_nodes([phdiff_wf])

    if workdir:
        wf.base_dir = str(workdir)

    wf.run()
Ejemplo n.º 2
0
def init_t1w_derivatives_wf(bids_root, output_dir, name='t1w_derivatives_wf'):
    """Set up a battery of datasinks to store derivatives in the right location."""
    base_directory = str(output_dir.parent)
    out_path_base = str(output_dir.name)

    wf = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject', 'session', 't1w_template_space', 't1w_brain_template_space'
    ]),
                        name='inputnode')

    def generic_bids_file_fct(bids_root, subject, session):
        from pathlib import Path
        return Path(
            bids_root
        ) / f"sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_T1w.nii.gz"

    generic_bids_file = pe.Node(niu.Function(
        input_names=["bids_root", "subject", "session"],
        output_names=["out_file"],
        function=generic_bids_file_fct),
                                name='generic_bids_file')
    generic_bids_file.inputs.bids_root = bids_root
    wf.connect(inputnode, "subject", generic_bids_file, "subject")
    wf.connect(inputnode, "session", generic_bids_file, "session")

    ds_t1w_preproc = pe.Node(DerivativesDataSink(base_directory=base_directory,
                                                 out_path_base=out_path_base,
                                                 keep_dtype=True,
                                                 compress=True,
                                                 space="tpl"),
                             name='ds_t1w_preproc',
                             run_without_submitting=True)
    wf.connect(generic_bids_file, "out_file", ds_t1w_preproc, "source_file")
    wf.connect(inputnode, "t1w_template_space", ds_t1w_preproc, "in_file")

    ds_t1w_brain = pe.Node(DerivativesDataSink(base_directory=base_directory,
                                               out_path_base=out_path_base,
                                               keep_dtype=True,
                                               compress=True,
                                               space="tpl",
                                               desc="brain"),
                           name='ds_t1w_brain',
                           run_without_submitting=True)
    wf.connect(generic_bids_file, "out_file", ds_t1w_brain, "source_file")
    wf.connect(inputnode, "t1w_brain_template_space", ds_t1w_brain, "in_file")

    return wf
Ejemplo n.º 3
0
def main(derivatives, subject, session, workflow_folder, n_procs=8):
    fn = op.join(
        derivatives, 'modelfitting', 'glm7', 'sub-{subject}', 'ses-{session}',
        'func',
        'sub-{subject}_ses-{session}_left_over_right_zmap.nii.gz').format(
            subject=subject, session=session)

    os.environ['SUBJECTS_DIR'] = op.join(derivatives, 'freesurfer')

    wf = pe.Workflow(name='sample_fs_{}_{}'.format(subject, session),
                     base_dir=workflow_folder)

    input_node = pe.Node(niu.IdentityInterface(fields=['source_file']),
                         name='input_node')
    input_node.inputs.source_file = fn

    config_node = pe.Node(
        niu.IdentityInterface(fields=['depth', 'hemisphere']),
        name='config_node')
    config_node.iterables = [('depth', np.arange(1, 7)),
                             ('hemisphere', ['lh', 'rh'])]

    def get_surf_name(depth, n_surfs=8):
        return 'equi{}.pial'.format(str(float(depth) / (n_surfs - 1)))

    sampler = pe.Node(fs.SampleToSurface(subjects_dir=os.path.join(
        derivatives, 'freesurfer'),
                                         override_reg_subj=True,
                                         reg_header=True,
                                         subject_id='sub-{}'.format(subject),
                                         interp_method='trilinear',
                                         projection_stem='',
                                         out_type='gii'),
                      name='sampler')

    wf.connect(input_node, 'source_file', sampler, 'source_file')
    wf.connect(config_node, ('depth', get_surf_name), sampler, 'surface')
    wf.connect(config_node, 'hemisphere', sampler, 'hemi')

    def get_desc(depth, n_surfs=8):
        return 'zmap-depth-{:.03f}'.format(float(depth) / (n_surfs - 1))

    def get_extra_values(hemi):
        return ['hemi-{}'.format(hemi)]

    ds = pe.MapNode(DerivativesDataSink(
        base_directory=derivatives,
        out_path_base='sampled_giis',
    ),
                    iterfield=['in_file', 'source_file'],
                    name='datasink')

    wf.connect(input_node, 'source_file', ds, 'source_file')
    wf.connect(sampler, 'out_file', ds, 'in_file')
    wf.connect(config_node, ('depth', get_desc), ds, 'desc')
    wf.connect(config_node, ('hemisphere', get_extra_values), ds,
               'extra_values')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_procs})
Ejemplo n.º 4
0
def main(subject, sourcedata):
    derivatives = op.join(sourcedata, 'derivatives')

    derivatives_fmriprep = op.join(derivatives, 'fmriprep')
    derivatives_freesurfer = op.join(derivatives, 'freesurfer')
    # os.environ['SUBJECTS_DIR'] = derivatives_freesurfer

    fn_template = op.join(
        derivatives_fmriprep, f'sub-{subject}', 'func',
        f'sub-{subject}_task-numrisk_acq-*_run-*_space-fsaverage6_hemi-*.func.gii'
    )

    fns = glob.glob(fn_template)

    workflow = pe.Workflow(name=f'smooth_sub-{subject}', base_dir='/scratch')

    input_node = pe.Node(niu.IdentityInterface(fields=['surface_files']),
                         name='input_node')
    input_node.inputs.surface_files = fns

    def get_hemis(in_files):
        import re

        reg = re.compile(
            '.*/(?P<subject>sub-[0-9]+)_task.*_hemi-(?P<hemi>L|R)\.func\.gii')
        hemis = [reg.match(fn).group(2) for fn in in_files]
        hemis = ['lh' if hemi == 'L' else 'rh' for hemi in hemis]

        return hemis

    smoother = pe.MapNode(freesurfer.SurfaceSmooth(fwhm=5,
                                                   subject_id='fsaverage6'),
                          iterfield=['in_file', 'hemi'],
                          name='smoother')

    workflow.connect(input_node, 'surface_files', smoother, 'in_file')
    workflow.connect(input_node, ('surface_files', get_hemis), smoother,
                     'hemi')

    def get_suffix(hemis):
        import re
        reg = re.compile(
            '.*/(?P<subject>sub-[0-9]+)_task.*_hemi-(?P<hemi>L|R)\.func\.gii')
        hemis = [reg.match(fn).group(2) for fn in in_files]

        return ['_hemi-{}'.format(hemi) for hemi in hemis]

    ds = pe.MapNode(DerivativesDataSink(out_path_base='smoothed',
                                        keep_dtype=True),
                    iterfield=['source_file', 'in_file', 'suffix'],
                    name='datasink')
    ds.inputs.base_directory = derivatives
    ds.inputs.desc = 'smoothed'

    workflow.connect(input_node, 'surface_files', ds, 'source_file')
    workflow.connect(smoother, 'out_file', ds, 'in_file')
    workflow.connect(input_node, ('surface_files', get_suffix), ds, 'suffix')

    workflow.run(plugin='MultiProc', plugin_args={'n_procs': 15})
Ejemplo n.º 5
0
def test_workflow(bids_layouts, tmpdir, output_path, dataset):
    """Test creation of the workflow."""
    tmpdir.chdir()

    data = bids_layouts[dataset]
    wf = Workflow(name='tstworkflow')
    phdiff_wf = init_phdiff_wf(omp_nthreads=1)
    phdiff_wf.inputs.inputnode.magnitude = data.get(
        suffix=['magnitude1', 'magnitude2'],
        acq='v4',
        return_type='file',
        extension=['.nii', '.nii.gz'])

    phdiff_file = data.get(suffix='phasediff',
                           acq='v4',
                           extension=['.nii', '.nii.gz'])[0]

    phdiff_wf.inputs.inputnode.phasediff = phdiff_file.path
    phdiff_wf.inputs.inputnode.metadata = phdiff_file.get_metadata()

    if output_path:
        from ...interfaces.reportlets import FieldmapReportlet
        rep = pe.Node(FieldmapReportlet(), 'simple_report')

        dsink = pe.Node(DerivativesDataSink(base_directory=str(output_path),
                                            keep_dtype=True),
                        name='dsink')
        dsink.interface.out_path_base = 'sdcflows'
        dsink.inputs.source_file = phdiff_file.path

        wf.connect([
            (phdiff_wf, rep, [('outputnode.fmap', 'fieldmap'),
                              ('outputnode.fmap_ref', 'reference'),
                              ('outputnode.fmap_mask', 'mask')]),
            (rep, dsink, [('out_report', 'in_file')]),
        ])
    else:
        wf.add_nodes([phdiff_wf])

    wf.run()
Ejemplo n.º 6
0
def init_bold_confs_wf(
    out_dir,
    out_path_base,
    source_file,
    mem_gb,
    regressors_all_comps,
    regressors_dvars_th,
    regressors_fd_th,
    dt=None,
    work_dir=None,
    name="bold_confs_wf",
):
    """
    This workflow calculates confounds for a BOLD series, and aggregates them
    into a :abbr:`TSV (tab-separated value)` file, for use as nuisance
    regressors in a :abbr:`GLM (general linear model)`.

    The following confounds are calculated, with column headings in parentheses:

    #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``)
    #. DVARS - original and standardized variants (``dvars``, ``std_dvars``)
    #. Framewise displacement, based on head-motion parameters
       (``framewise_displacement``)
    #. Temporal CompCor (``t_comp_cor_XX``)
    #. Anatomical CompCor (``a_comp_cor_XX``)
    #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off
       (``cosine_XX``)
    #. Non-steady-state volumes (``non_steady_state_XX``)
    #. Estimated head-motion parameters, in mm and rad
       (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``)


    Prior to estimating aCompCor and tCompCor, non-steady-state volumes are
    censored and high-pass filtered using a :abbr:`DCT (discrete cosine
    transform)` basis.
    The cosine basis, as well as one regressor per censored volume, are included
    for convenience.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.confounds import init_bold_confs_wf
        wf = init_bold_confs_wf(
            mem_gb=1,
            regressors_all_comps=False,
            regressors_dvars_th=1.5,
            regressors_fd_th=0.5,
            dt=2.0,
        )

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB - please note that this size
            should be calculated after resamplings that may extend
            the FoV
        regressors_all_comps: bool
            Indicates whether CompCor decompositions should return all
            components instead of the minimal number of components necessary
            to explain 50 percent of the variance in the decomposition mask.
        regressors_dvars_th
            Criterion for flagging DVARS outliers
        regressors_fd_th
            Criterion for flagging framewise displacement outliers
        dt: float
            repetition time
        name : str
            Name of workflow (default: ``bold_confs_wf``)


    **Inputs**

        bold
            BOLD image, after the prescribed corrections (STC, HMC and SDC)
            when available.
        bold_mask
            BOLD series mask
        movpar_file
            SPM-formatted motion parameters file
        skip_vols
            number of non steady state volumes
        csf_mask
            csk mask in MNI 2mm space
        wm_mask
            wm mask in MNI 2mm space
        cortical_gm_mask
            gm mask in MNI 2mm space
        

    **Outputs**

        confounds_file
            TSV of all aggregated confounds
        confounds_metadata
            Confounds metadata dictionary.

    """

    DerivativesDataSink.out_path_base = out_path_base

    workflow = Workflow(name=name, base_dir=work_dir)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold', 'bold_mask', 'movpar_file', 'skip_vols', 'csf_mask', 'wm_mask',
        'cortical_gm_mask'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['confounds_file', 'confounds_metadata']),
        name='outputnode')

    # create tcc mask: fslmaths cortical_gm_mask -dilD -mul -1 -add bold_mask -bin
    tcc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-dilD -mul -1 -add',
                                           args='-bin'),
                      name='tcc_roi')

    # create acc mask fslmaths wm_mask -add csf_mask
    acc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-add'), name='acc_roi')

    # Ensure ROIs don't go off-limits (reduced FoV)
    csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk')
    wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk')
    acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk')
    tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk')

    # DVARS
    dvars = pe.Node(nac.ComputeDVARS(save_nstd=True,
                                     save_std=True,
                                     remove_zerovariance=True),
                    name="dvars",
                    mem_gb=mem_gb)

    # Frame displacement
    fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"),
                    name="fdisp",
                    mem_gb=mem_gb)

    # a/t-Compcor
    mrg_lbl_cc = pe.Node(niu.Merge(3),
                         name='merge_rois_cc',
                         run_without_submitting=True)

    tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv',
                                header_prefix='t_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                percentile_threshold=.05,
                                failure_mode='NaN'),
                       name="tcompcor",
                       mem_gb=mem_gb)

    acompcor = pe.Node(ACompCor(components_file='acompcor.tsv',
                                header_prefix='a_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                mask_names=['combined', 'CSF', 'WM'],
                                merge_method='none',
                                failure_mode='NaN'),
                       name="acompcor",
                       mem_gb=mem_gb)

    # Set number of components
    if regressors_all_comps:
        acompcor.inputs.num_components = 'all'
        tcompcor.inputs.num_components = 'all'
    else:
        acompcor.inputs.variance_threshold = 0.5
        tcompcor.inputs.variance_threshold = 0.5

    # Set TR if present
    if dt:
        tcompcor.inputs.repetition_time = dt
        acompcor.inputs.repetition_time = dt

    # Global and segment regressors
    mrg_lbl = pe.Node(niu.Merge(3),
                      name='merge_rois',
                      run_without_submitting=True)
    signals = pe.Node(SignalExtraction(
        class_labels=["csf", "white_matter", "global_signal"]),
                      name="signals",
                      mem_gb=mem_gb)

    # Arrange confounds
    add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]),
                               name="add_dvars_header",
                               mem_gb=0.01,
                               run_without_submitting=True)
    add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]),
                                   name="add_std_dvars_header",
                                   mem_gb=0.01,
                                   run_without_submitting=True)
    add_motion_headers = pe.Node(AddTSVHeader(
        columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]),
                                 name="add_motion_headers",
                                 mem_gb=0.01,
                                 run_without_submitting=True)
    concat = pe.Node(GatherConfounds(),
                     name="concat",
                     mem_gb=0.01,
                     run_without_submitting=True)

    # CompCor metadata
    tcc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        drop_columns=['mask'],
        output=None,
        additional_metadata={'Method': 'tCompCor'},
        enforce_case=True),
                               name='tcc_metadata_fmt')
    acc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        output=None,
        additional_metadata={'Method': 'aCompCor'},
        enforce_case=True),
                               name='acc_metadata_fmt')
    mrg_conf_metadata = pe.Node(niu.Merge(2),
                                name='merge_confound_metadata',
                                run_without_submitting=True)
    mrg_conf_metadata2 = pe.Node(DictMerge(),
                                 name='merge_confound_metadata2',
                                 run_without_submitting=True)

    # Expand model to include derivatives and quadratics
    model_expand = pe.Node(
        ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'),
        name='model_expansion')

    # Add spike regressors
    spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th,
                                            dvars_thresh=regressors_dvars_th),
                            name='spike_regressors')

    # Generate reportlet (ROIs)
    mrg_compcor = pe.Node(niu.Merge(2),
                          name='merge_compcor',
                          run_without_submitting=True)
    rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'],
                                 generate_report=True),
                        name='rois_plot',
                        mem_gb=mem_gb)

    ds_report_bold_rois = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                      desc='rois',
                                                      source_file=source_file,
                                                      suffix='reportlet',
                                                      keep_dtype=True),
                                  name='ds_report_bold_rois',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (CompCor)
    mrg_cc_metadata = pe.Node(niu.Merge(2),
                              name='merge_compcor_metadata',
                              run_without_submitting=True)
    compcor_plot = pe.Node(
        CompCorVariancePlot(metadata_sources=['tCompCor', 'aCompCor']),
        name='compcor_plot')
    ds_report_compcor = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                    desc='compcorvar',
                                                    source_file=source_file,
                                                    keep_dtype=True),
                                name='ds_report_compcor',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (Confound correlation)
    conf_corr_plot = pe.Node(ConfoundsCorrelationPlot(
        reference_column='global_signal', max_dim=70),
                             name='conf_corr_plot')
    ds_report_conf_corr = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                      desc='confoundcorr',
                                                      source_file=source_file,
                                                      keep_dtype=True),
                                  name='ds_report_conf_corr',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        # generate tcc and acc rois
        (inputnode, tcc_roi, [('cortical_gm_mask', 'in_file'),
                              ('bold_mask', 'in_file2')]),
        (inputnode, acc_roi, [('wm_mask', 'in_file'),
                              ('csf_mask', 'in_file2')]),
        # Mask ROIs with bold_mask
        (inputnode, csf_msk, [('bold_mask', 'in_mask')]),
        (inputnode, wm_msk, [('bold_mask', 'in_mask')]),
        (inputnode, acc_msk, [('bold_mask', 'in_mask')]),
        (inputnode, tcc_msk, [('bold_mask', 'in_mask')]),
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]),
        (inputnode, fdisp, [('movpar_file', 'in_file')]),

        # tCompCor
        (inputnode, tcompcor, [('bold', 'realigned_file')]),
        (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (tcc_roi, tcc_msk, [('out_file', 'roi_file')]),
        (tcc_msk, tcompcor, [('out', 'mask_files')]),

        # aCompCor
        (inputnode, acompcor, [('bold', 'realigned_file')]),
        (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (acc_roi, acc_msk, [('out_file', 'roi_file')]),
        (acc_msk, mrg_lbl_cc, [('out', 'in1')]),
        (inputnode, mrg_lbl_cc, [('csf_mask', 'in2')]),
        (inputnode, mrg_lbl_cc, [('wm_mask', 'in3')]),
        (mrg_lbl_cc, acompcor, [('out', 'mask_files')]),

        # Global signals extraction (constrained by anatomy)
        (inputnode, signals, [('bold', 'in_file')]),
        (inputnode, csf_msk, [('csf_mask', 'roi_file')]),
        (csf_msk, mrg_lbl, [('out', 'in1')]),
        (inputnode, wm_msk, [('wm_mask', 'roi_file')]),
        (wm_msk, mrg_lbl, [('out', 'in2')]),
        (inputnode, mrg_lbl, [('bold_mask', 'in3')]),
        (mrg_lbl, signals, [('out', 'label_files')]),

        # Collate computed confounds together
        (inputnode, add_motion_headers, [('movpar_file', 'in_file')]),
        (dvars, add_dvars_header, [('out_nstd', 'in_file')]),
        (dvars, add_std_dvars_header, [('out_std', 'in_file')]),
        (signals, concat, [('out_file', 'signals')]),
        (fdisp, concat, [('out_file', 'fd')]),
        (tcompcor, concat, [('components_file', 'tcompcor'),
                            ('pre_filter_file', 'cos_basis')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (add_motion_headers, concat, [('out_file', 'motion')]),
        (add_dvars_header, concat, [('out_file', 'dvars')]),
        (add_std_dvars_header, concat, [('out_file', 'std_dvars')]),

        # Confounds metadata
        (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]),
        (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]),
        (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]),
        (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]),
        (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]),

        # Expand the model with derivatives, quadratics, and spikes
        (concat, model_expand, [('confounds_file', 'confounds_file')]),
        (model_expand, spike_regress, [('confounds_file', 'confounds_file')]),

        # Set outputs
        (spike_regress, outputnode, [('confounds_file', 'confounds_file')]),
        (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]),
        (inputnode, rois_plot, [('bold', 'in_file'),
                                ('bold_mask', 'in_mask')]),
        (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]),
        (acc_msk, mrg_compcor, [('out', 'in2')]),
        (mrg_compcor, rois_plot, [('out', 'in_rois')]),
        (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]),
        (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]),
        (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]),
        (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]),
        (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]),
        (concat, conf_corr_plot, [('confounds_file', 'confounds_file')]),
        (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]),
    ])

    return workflow
Ejemplo n.º 7
0
def init_template_derivatives_wf(bids_root,
                                 output_dir,
                                 name='template_derivatives_wf'):
    """Set up a battery of datasinks to store derivatives in the right location."""
    base_directory = str(output_dir.parent)
    out_path_base = str(output_dir.name)

    wf = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject', 't1w_preproc', 't1w_mask', 't1w_2_MNI_xfm',
        't1w_2_MNI_warp', 't1w_MNIspace', 'bianca_wm_mask_file',
        'bianca_vent_mask_file', "distance_map", "perivent_mask", "deepWM_mask"
    ]),
                        name='inputnode')

    def generic_bids_file_fct(bids_root, subject):
        from pathlib import Path
        return Path(bids_root) / f"sub-{subject}/anat/sub-{subject}_T1w.nii.gz"

    generic_bids_file = pe.Node(niu.Function(
        input_names=["bids_root", "subject"],
        output_names=["out_file"],
        function=generic_bids_file_fct),
                                name='generic_bids_file')
    generic_bids_file.inputs.bids_root = bids_root
    wf.connect(inputnode, "subject", generic_bids_file, "subject")

    ds_t1w_preproc = pe.Node(DerivativesDataSink(base_directory=base_directory,
                                                 out_path_base=out_path_base,
                                                 desc='preproc',
                                                 keep_dtype=True,
                                                 compress=True),
                             name='ds_t1w_preproc',
                             run_without_submitting=True)
    ds_t1w_preproc.inputs.SkullStripped = False
    wf.connect(generic_bids_file, "out_file", ds_t1w_preproc, "source_file")
    wf.connect(inputnode, "t1w_preproc", ds_t1w_preproc, "in_file")

    ds_t1w_mask = pe.Node(DerivativesDataSink(base_directory=base_directory,
                                              out_path_base=out_path_base,
                                              desc='brain',
                                              suffix='mask',
                                              compress=True),
                          name='ds_t1w_mask',
                          run_without_submitting=True)
    ds_t1w_mask.inputs.Type = 'Brain'
    wf.connect(generic_bids_file, "out_file", ds_t1w_mask, "source_file")
    wf.connect(inputnode, "t1w_mask", ds_t1w_mask, "in_file")

    # Bianca masks
    ds_bianca_wm_mask = pe.Node(DerivativesDataSink(
        base_directory=base_directory,
        out_path_base=out_path_base,
        desc='bianca',
        suffix='wmmask',
        compress=True),
                                name='ds_bianca_wm_mask',
                                run_without_submitting=True)
    wf.connect(generic_bids_file, "out_file", ds_bianca_wm_mask, "source_file")
    wf.connect(inputnode, "bianca_wm_mask_file", ds_bianca_wm_mask, "in_file")

    ds_bianca_vent_mask = pe.Node(DerivativesDataSink(
        base_directory=base_directory,
        out_path_base=out_path_base,
        desc='bianca',
        suffix='ventmask',
        compress=True),
                                  name='ds_bianca_vent_mask',
                                  run_without_submitting=True)
    wf.connect(generic_bids_file, "out_file", ds_bianca_vent_mask,
               "source_file")
    wf.connect(inputnode, "bianca_vent_mask_file", ds_bianca_vent_mask,
               "in_file")

    ds_distance_map = pe.Node(DerivativesDataSink(
        base_directory=base_directory,
        out_path_base=out_path_base,
        desc='bianca',
        suffix='ventdistmap',
        compress=True),
                              name='ds_distance_map',
                              run_without_submitting=True)
    wf.connect(generic_bids_file, "out_file", ds_distance_map, "source_file")
    wf.connect(inputnode, "distance_map", ds_distance_map, "in_file")

    ds_perivent_mask = pe.Node(DerivativesDataSink(
        base_directory=base_directory, out_path_base=out_path_base),
                               name="ds_perivent_mask")
    ds_perivent_mask.inputs.desc = "periventmask"
    wf.connect(generic_bids_file, "out_file", ds_perivent_mask, "source_file")
    wf.connect(inputnode, "perivent_mask", ds_perivent_mask, "in_file")

    ds_deepWM_mask = pe.Node(DerivativesDataSink(base_directory=base_directory,
                                                 out_path_base=out_path_base),
                             name="ds_deepWM_mask")
    ds_deepWM_mask.inputs.desc = "deepWMmask"
    wf.connect(generic_bids_file, "out_file", ds_deepWM_mask, "source_file")
    wf.connect(inputnode, "deepWM_mask", ds_deepWM_mask, "in_file")

    # MNI
    ds_t1w_to_MNI_warp = pe.Node(DerivativesDataSink(
        base_directory=base_directory,
        out_path_base=out_path_base,
        allowed_entities=['from', 'to'],
        suffix='warpfield',
        **{
            'from': 'tpl',
            'to': 'MNI'
        }),
                                 name='ds_t1w_to_MNI_warp',
                                 run_without_submitting=True)
    wf.connect(generic_bids_file, "out_file", ds_t1w_to_MNI_warp,
               "source_file")
    wf.connect(inputnode, "t1w_2_MNI_warp", ds_t1w_to_MNI_warp, "in_file")

    df_t1w_MNIspace = pe.Node(DerivativesDataSink(
        base_directory=base_directory,
        out_path_base=out_path_base,
        space="MNI",
        desc="warped2mm"),
                              name="df_t1w_MNIspace")
    wf.connect(generic_bids_file, "out_file", df_t1w_MNIspace, "source_file")
    wf.connect(inputnode, "t1w_MNIspace", df_t1w_MNIspace, "in_file")

    ds_t1w_to_MNI_xfm = pe.Node(DerivativesDataSink(
        base_directory=base_directory,
        out_path_base=out_path_base,
        allowed_entities=['from', 'to'],
        suffix='xfm',
        **{
            'from': 'tpl',
            'to': 'MNI'
        }),
                                name='ds_t1w_to_MNI_xfm',
                                run_without_submitting=True)
    wf.connect(generic_bids_file, "out_file", ds_t1w_to_MNI_xfm, "source_file")
    wf.connect(inputnode, "t1w_2_MNI_xfm", ds_t1w_to_MNI_xfm, "in_file")

    return wf
Ejemplo n.º 8
0
def post_locate_masking(locate_dir, wd_dir, crash_dir, out_dir, subjects_sessions, n_cpu=1):
    out_dir.mkdir(exist_ok=True, parents=True)

    wf = Workflow(name="post_locate_masking")
    wf.base_dir = wd_dir
    wf.config.remove_unnecessary_outputs = False
    wf.config["execution"]["crashdump_dir"] = crash_dir
    wf.config["monitoring"]["enabled"] = "true"

    base_directory = str(out_dir.parent)
    out_path_base = str(out_dir.name)

    subjects, sessions = list(zip(*subjects_sessions))
    infosource = Node(niu.IdentityInterface(fields=["subject", "session"]), name="infosource")
    infosource.iterables = [("subject", subjects),
                            ("session", sessions),
                            ]
    infosource.synchronize = True

    def subject_info_fnc(locate_dir, subject, session):
        from pathlib import Path
        subses = f"sub-{subject}ses-{session}"

        # bianca mask
        search_pattern = f"*/{subses}_biancamask.nii.gz"
        bianca_mask = list(Path(locate_dir).glob(search_pattern))
        if len(bianca_mask) != 1:
            raise Exception(f"Expected one file, but {len(bianca_mask)} found. {search_pattern}")
        bianca_mask = bianca_mask[0]

        # locate output
        search_pattern = f"*/*_results_directory/{subses}_BIANCA_LOCATE_binarylesionmap.nii.gz"
        locate_mask = list(Path(locate_dir).glob(search_pattern))
        if len(locate_mask) != 1:
            raise Exception(f"Expected one file, but {len(locate_mask)} found. {search_pattern}")
        locate_mask = locate_mask[0]

        generic_bids_file = f"sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_FLAIR.nii.gz"
        out_list = [bianca_mask, locate_mask, generic_bids_file]
        return [str(o) for o in out_list]  # as Path is not taken everywhere

    grabber = Node(niu.Function(input_names=["locate_dir", "subject", "session"],
                                output_names=["bianca_mask", "locate_mask", "generic_bids_file"],
                                function=subject_info_fnc),
                   name="grabber"
                   )
    grabber.inputs.locate_dir = locate_dir

    wf.connect([(infosource, grabber, [("subject", "subject"),
                                       ("session", "session"),
                                       ]
                 )
                ]
               )

    locate_output_masked = Node(fsl.ApplyMask(), name="locate_output_masked")
    wf.connect(grabber, "locate_mask", locate_output_masked, "in_file")
    wf.connect(grabber, "bianca_mask", locate_output_masked, "mask_file")

    ds = Node(DerivativesDataSink(base_directory=base_directory, out_path_base=out_path_base), name="ds")
    ds.inputs.suffix = "locateBinaryLesionMap"
    ds.inputs.desc = "biancaMasked"
    wf.connect(locate_output_masked, "out_file", ds, "in_file")
    wf.connect(grabber, "generic_bids_file", ds, "source_file")

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_cpu})
Ejemplo n.º 9
0
def main(derivatives, ds):

    if ds == 'ds-01':
        subjects = ['{:02d}'.format(s) for s in range(1, 20)]
    elif ds == 'ds-02':
        subjects = ['{:02d}'.format(s) for s in range(1, 16)]
        subjects.pop(3) # Remove 4

    subjects = subjects
    wf_folder = '/tmp/workflow_folders'

    templates = {'preproc':op.join(derivatives, ds, 'fmriprep', 'sub-{subject}', 'func',
                                   'sub-{subject}_task-randomdotmotion_run-*_space-T1w_desc-preproc_bold.nii.gz')}

    if ds == 'ds-01':
        templates['individual_mask'] = op.join(derivatives, ds, 'conjunct_masks', 'sub-{subject}', 'anat',
                                               'sub-{subject}_space-FLASH_desc-{mask}_space-T1w.nii.gz')

    elif ds =='ds-02':
        templates['individual_mask'] = op.join(derivatives, ds, 'conjunct_masks', 'sub-{subject}', 'anat',
                                               'sub-{subject}_desc-{mask}_mask.nii.gz')

    wf = pe.Workflow(name='extract_signal_masks_{}'.format(ds),
                     base_dir=wf_folder)

    mask_identity = pe.Node(niu.IdentityInterface(fields=['mask']),
                            name='mask_identity')
    mask_identity.iterables = [('mask', ['stnl', 'stnr'])]

    selector = pe.Node(nio.SelectFiles(templates),
                       name='selector')

    selector.iterables = [('subject', subjects)]
    wf.connect(mask_identity, 'mask', selector, 'mask')

    def extract_signal(preproc, mask):
        from nilearn import image
        from nilearn import input_data
        from nipype.utils.filemanip import split_filename
        import os.path as op
        import pandas as pd

        _, fn, ext = split_filename(preproc)
        masker = input_data.NiftiMasker(mask, standardize='psc')

        data = pd.DataFrame(masker.fit_transform(preproc))

        new_fn = op.abspath('{}_signal.csv'.format(fn))
        data.to_csv(new_fn)

        return new_fn

    extract_signal_node = pe.MapNode(niu.Function(function=extract_signal,
                                     input_names=['preproc', 'mask'],
                                     output_names=['signal']),
                         iterfield=['preproc'],
                        name='extract_signal_node')

    wf.connect(selector, 'preproc', extract_signal_node, 'preproc')
    wf.connect(selector, 'individual_mask', extract_signal_node, 'mask')

    datasink_signal = pe.MapNode(DerivativesDataSink(base_directory=op.join(derivatives, ds),
                                                      out_path_base='extracted_signal'),
                                 iterfield=['source_file', 'in_file'],
                                  name='datasink_signal')

    wf.connect(selector, 'preproc', datasink_signal, 'source_file')
    wf.connect(extract_signal_node, 'signal', datasink_signal, 'in_file')
    wf.connect(mask_identity, 'mask', datasink_signal, 'desc')


    wf.run(plugin='MultiProc',
           plugin_args={'n_procs':8})
Ejemplo n.º 10
0
def main(subject, session, bids_folder, space='fsnative', n_procs=12):

    base_dir = '/scratch/gdehol/workflow_folders'

    if not op.exists(base_dir):
        base_dir = '/tmp'

    wf = pe.Workflow(name=f'smooth_{subject}_{session}_{space}',
                     base_dir=base_dir)

    runs = get_runs(subject, session)
    fns_l = [
        get_surf_file(subject, session, run, bids_folder, 'lh') for run in runs
    ]
    fns_r = [
        get_surf_file(subject, session, run, bids_folder, 'rh') for run in runs
    ]
    fns = fns_l + fns_r

    hemis = ['lh'] * len(runs) + ['rh'] * len(runs)

    input_node = pe.Node(niu.IdentityInterface(
        fields=['freesurfer_subject', 'surface_files', 'hemis']),
                         name='input_node')
    input_node.inputs.freesurfer_subject = f'sub-{subject}'
    input_node.inputs.surface_files = fns
    input_node.inputs.hemis = hemis

    freesurfer_dir = op.join(bids_folder, 'derivatives', 'freesurfer')
    smoother = pe.MapNode(freesurfer.SurfaceSmooth(
        fwhm=5, subjects_dir=freesurfer_dir),
                          iterfield=['in_file', 'hemi'],
                          name='smoother')

    wf.connect(input_node, 'freesurfer_subject', smoother, 'subject_id')
    wf.connect(input_node, 'surface_files', smoother, 'in_file')
    wf.connect(input_node, 'hemis', smoother, 'hemi')

    def get_suffix(in_files):
        print(in_files)
        import re
        reg = re.compile(
            '.*/(?P<subject>sub-[0-9]+)_.*_hemi-(?P<hemi>L|R)_bold\.func\.gii')
        hemis = [reg.match(fn).group(2) for fn in in_files]

        return ['_hemi-{}'.format(hemi) for hemi in hemis]

    ds = pe.MapNode(DerivativesDataSink(
        out_path_base='smoothed',
        dismiss_entities=['suffix', 'extension'],
        extension=".func.gii",
        suffix='bold'),
                    iterfield=['source_file', 'in_file'],
                    name='datasink')
    ds.inputs.base_directory = op.join(bids_folder, 'derivatives')
    ds.inputs.desc = 'smoothed'

    wf.connect(input_node, 'surface_files', ds, 'source_file')
    wf.connect(smoother, 'out_file', ds, 'in_file')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_procs})
Ejemplo n.º 11
0
def runpipeline(parser):
    # Parse inputs
    args = parser.parse_args()
    print(args)

    # 1) parse required inputs
    bids_dir = args.bids_dir
    participant = args.participant_label

    # 2) parse optional inputs
    nthreads = int(args.nthreads)
    bet_thr = float(args.bet_thr)
    small_fov = bool(args.small_fov)
    read_task_SNR = bool(args.taskSNR)

    # 2a) Need BIDS directory if no subjects chosen, use BIDSDataGrabber for this
    layout = BIDSLayout(bids_dir)

    if args.subjects:
        subject_id = [s for s in args.subjects.split(",")]
        print(subject_id)
    else:
        subject_id = layout.get_subjects()

    # 2b) set directories for results
    deriv_dir = op.join(op.realpath(bids_dir), "derivatives")

    # Set work & crash directories
    if args.work_dir:
        work_dir = op.realpath(args.work_dir)
        crash_dir = op.join(op.realpath(args.work_dir), "crash")
    else:
        work_dir = op.join(bids_dir, "derivatives/work")
        crash_dir = op.join(op.join(op.realpath(bids_dir), "derivatives/work"),
                            "crash")
    if len(subject_id) == 1:
        work_dir = op.join(work_dir, subject_id[0])
        crash_dir = op.join(work_dir, "crash")

    if not op.exists(work_dir):
        os.makedirs(work_dir)
    if not op.exists(crash_dir):
        os.makedirs(crash_dir)

    # 2c) set output directories
    if args.out_dir:
        out_dir = op.realpath(args.out_dir)
    else:
        out_dir = op.join(deriv_dir, "phaseprep")

    config.update_config({
        "logging": {
            "log_directory": work_dir,
            "log_to_file": True,
        },
        "execution": {
            "crashdump_dir": crash_dir,
            "crashfile_format": "txt",
            "hash_method": "content",
            "remove_unnecessary_outputs": False,
        },
    })

    logging.update_logging(config)

    phaseprep = pe.Workflow(name="phaseprep")
    phaseprep.base_dir = work_dir
    sink_dict = {}

    infosource = pe.Node(interface=ul.IdentityInterface(fields=["subject_id"]),
                         name="infosource")
    infosource.iterables = [("subject_id", subject_id)]

    filegrabber = pe.Node(
        ul.Function(
            function=get_magandphase,
            input_names=["bids_dir", "subject_id"],
            output_names=["maglist", "phaselist"],
        ),
        name="filegrabber",
    )
    filegrabber.inputs.bids_dir = bids_dir

    phaseprep.connect([(infosource, filegrabber, [("subject_id", "subject_id")
                                                  ])])

    # Step two will be magnitude preprocessing
    preproc_mag_wf = create_preprocess_mag_wf()
    preproc_mag_wf.inputs.inputspec.frac = bet_thr
    preproc_mag_wf.inputs.extractor.robust = small_fov

    sink_dict["procmag"] = pe.MapNode(
        DerivativesDataSink(
            desc="procmag",
            out_path_base="phaseprep",
            base_directory="out_dir",
            compress=True,
        ),
        name="dsink_procmag",
        iterfield=["in_file", "source_file"],
    )

    phaseprep.connect([
        (
            filegrabber,
            preproc_mag_wf,
            [
                ("maglist", "inputspec.input_mag"),
                (("maglist", get_tasklength), "inputspec.task"),
                (("maglist", get_restlength), "inputspec.rest"),
            ],
        ),
        (
            preproc_mag_wf,
            sink_dict["procmag"],
            [("outputspec.proc_mag", "in_file")],
        ),
        (filegrabber, sink_dict["procmag"], [("maglist", "source_file")]),
    ])

    # Step three will be phase preprocessing
    preproc_phase_wf = create_preprocess_phase_wf()

    sink_dict["procphase"] = sink_dict["procmag"].clone("procphase")
    sink_dict["procphase"].inputs.desc = "procphase"

    phaseprep.connect([
        (
            filegrabber,
            preproc_phase_wf,
            [
                ("phaselist", "inputspec.input_phase"),
                ("maglist", "inputspec.input_mag"),
                (("phaselist", get_tasklength), "inputspec.task"),
                (("phaselist", get_restlength), "inputspec.rest"),
            ],
        ),
        (
            preproc_mag_wf,
            preproc_phase_wf,
            [
                ("outputspec.motion_par", "inputspec.motion_par"),
                ("outputspec.mask_file", "inputspec.mask_file"),
            ],
        ),
        (
            preproc_phase_wf,
            sink_dict["procphase"],
            [("outputspec.proc_phase", "in_file")],
        ),
        (filegrabber, sink_dict["procphase"], [("phaselist", "source_file")]),
    ])

    # Regress ge magnitude and phase
    phaseregress = pe.MapNode(
        interface=PhaseFitOdr.PhaseFitOdr(),
        name="phaseregressodr",
        iterfield=["phase", "mag", "TR"],
    )
    phaseregress.iterables = ("noise_lb", [0.1, 0.15, 0.25, 0.4])
    phaseregress.inputs.n_threads = 1

    sink_dict["micro"] = sink_dict["procmag"].clone("micro")
    sink_dict["micro"].inputs.desc = "micro"
    sink_dict["macro"] = sink_dict["procmag"].clone("macro")
    sink_dict["macro"].inputs.desc = "macro"
    sink_dict["r2"] = sink_dict["procmag"].clone("r2")
    sink_dict["r2"].inputs.desc = "r2"
    sink_dict["beta"] = sink_dict["procmag"].clone("beta")
    sink_dict["beta"].inputs.desc = "beta"

    phaseprep.connect([
        (
            preproc_mag_wf,
            phaseregress,
            [
                ("outputspec.proc_mag", "mag"),
                (("outputspec.proc_mag", get_TR), "TR"),
            ],
        ),
        (preproc_phase_wf, phaseregress, [("outputspec.proc_phase", "phase")]),
        (phaseregress, sink_dict["macro"], [("filt", "in_file")]),
        (filegrabber, sink_dict["macro"], [("maglist", "source_file")]),
        (phaseregress, sink_dict["micro"], [("sim", "in_file")]),
        (filegrabber, sink_dict["micro"], [("maglist", "source_file")]),
        (phaseregress, sink_dict["r2"], [("corr", "in_file")]),
        (filegrabber, sink_dict["r2"], [("maglist", "source_file")]),
        (phaseregress, sink_dict["beta"], [("beta", "in_file")]),
        (filegrabber, sink_dict["beta"], [("maglist", "source_file")]),
    ])

    # if tcompcor is true run with tcompcor applied as well
    if args.tcompcor:
        tcompcor = pe.MapNode(interface=TCompCor(),
                              name="tcompcor",
                              iterfield=["realigned_file"])

        stripheader_tcompcor = pe.MapNode(
            interface=ul.Function(
                function=stripheader,
                input_names=["filename"],
                output_names=["new_filename"],
            ),
            name="stripheader_tcompcor",
            iterfield=["filename"],
        )

        phaseregress_multi = pe.MapNode(
            interface=PhaseFitOdr.PhaseFitOdr(),
            name="phaseregress_multi",
            iterfield=["phase", "mag", "global_regressors", "TR"],
        )
        phaseregress_multi.inputs.n_threads = 1
        phaseregress_multi.iterables = ("noise_lb", [0.1, 0.15, 0.25, 0.4])

        sink_dict["micro_tcomp"] = sink_dict["procmag"].clone("micro_tcomp")
        sink_dict["micro_tcomp"].inputs.desc = "micro_tcomp"
        sink_dict["macro_tcomp"] = sink_dict["procmag"].clone("macro_tcomp")
        sink_dict["macro_tcomp"].inputs.desc = "macro_tcomp"
        sink_dict["r2_tcomp"] = sink_dict["procmag"].clone("r2_tcomp")
        sink_dict["r2_tcomp"].inputs.desc = "r2_tcomp"
        sink_dict["beta_tcomp"] = sink_dict["procmag"].clone("beta_tcomp")
        sink_dict["beta_tcomp"].inputs.desc = "beta_tcomp"

        phaseprep.connect([
            (
                preproc_mag_wf,
                tcompcor,
                [
                    ("outputspec.proc_mag", "realigned_file"),
                    ("outputspec.mask_file", "mask_files"),
                ],
            ),
            (tcompcor, stripheader_tcompcor, [("components_file", "filename")
                                              ]),
            (
                stripheader_tcompcor,
                phaseregress_multi,
                [("new_filename", "global_regressors")],
            ),
            (
                preproc_mag_wf,
                phaseregress_multi,
                [
                    ("outputspec.proc_mag", "mag"),
                    (("outputspec.proc_mag", get_TR), "TR"),
                ],
            ),
            (
                preproc_phase_wf,
                phaseregress_multi,
                [("outputspec.proc_phase", "phase")],
            ),
            (phaseregress_multi, sink_dict["macro_tcomp"], [("filt", "in_file")
                                                            ]),
            (filegrabber, sink_dict["macro_tcomp"], [("maglist", "source_file")
                                                     ]),
            (phaseregress_multi, sink_dict["micro_tcomp"], [("sim", "in_file")
                                                            ]),
            (filegrabber, sink_dict["micro_tcomp"], [("maglist", "source_file")
                                                     ]),
            (phaseregress_multi, sink_dict["r2_tcomp"], [("corr", "in_file")]),
            (filegrabber, sink_dict["r2_tcomp"], [("maglist", "source_file")]),
            (phaseregress, sink_dict["beta_tcomp"], [("beta", "in_file")]),
            (filegrabber, sink_dict["beta_tcomp"], [("maglist", "source_file")
                                                    ]),
        ])
    # Step five will be ongoing during the previous steps ensuring correct sinking
    # Step six will be running this into a report

    print("setup pipline succesfully")
    if not args.test:
        print("running pipeline")
        starttime = time.time()
        phaseprep.write_graph(format="png")
        phaseprep.run(plugin="MultiProc", plugin_args={"n_procs": nthreads})
        print("completed pipeline in ", time.time() - starttime, " seconds.")
#                                                 name='modelestimate',
#                                                 iterfield = ['design_file',
#                                                               'in_file',
#                                                               'tcon_file'])

feat_select = pe.Node(nio.SelectFiles({
    'cope': 'stats/cope*.nii.gz',
    'pe': 'stats/pe[0-9][0-9].nii.gz',
    'tstat': 'stats/tstat*.nii.gz',
    'varcope': 'stats/varcope*.nii.gz',
    'zstat': 'stats/zstat*.nii.gz',
}),
                      name='feat_select')

ds_cope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                      keep_dtype=False,
                                      suffix='cope',
                                      desc='intask'),
                  name='ds_cope',
                  run_without_submitting=True)

ds_varcope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                         keep_dtype=False,
                                         suffix='varcope',
                                         desc='intask'),
                     name='ds_varcope',
                     run_without_submitting=True)

ds_zstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                       keep_dtype=False,
                                       suffix='zstat',
                                       desc='intask'),
Ejemplo n.º 13
0
def init_sdc_unwarp_wf(omp_nthreads, fmap_demean, debug, name='sdc_unwarp_wf'):
    """
    Apply the warping given by a displacements fieldmap.

    This workflow takes in a displacements fieldmap and calculates the corresponding
    displacements field (in other words, an ANTs-compatible warp file).

    It also calculates a new mask for the input dataset that takes into account the distortions.
    The mask is restricted to the field of view of the fieldmap since outside of it corrections
    could not be performed.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from sdcflows.workflows.unwarp import init_sdc_unwarp_wf
        wf = init_sdc_unwarp_wf(omp_nthreads=8,
                                fmap_demean=True,
                                debug=False)


    Inputs

        in_reference
            the reference image
        in_reference_brain
            the reference image (skull-stripped)
        in_mask
            a brain mask corresponding to ``in_reference``
        metadata
            metadata associated to the ``in_reference`` EPI input
        fmap
            the fieldmap in Hz
        fmap_ref
            the reference (anatomical) image corresponding to ``fmap``
        fmap_mask
            a brain mask corresponding to ``fmap``


    Outputs

        out_reference
            the ``in_reference`` after unwarping
        out_reference_brain
            the ``in_reference`` after unwarping and skullstripping
        out_warp
            the corresponding :abbr:`DFM (displacements field map)` compatible with
            ANTs
        out_jacobian
            the jacobian of the field (for drop-out alleviation)
        out_mask
            mask of the unwarped input file

    """
    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_reference', 'in_reference_brain', 'in_mask', 'metadata',
        'fmap_ref', 'fmap_mask', 'fmap'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'out_reference', 'out_reference_brain', 'out_warp', 'out_mask',
        'out_jacobian'
    ]),
                         name='outputnode')

    # Register the reference of the fieldmap to the reference
    # of the target image (the one that shall be corrected)
    ants_settings = pkgr.resource_filename('sdcflows',
                                           'data/fmap-any_registration.json')
    if debug:
        ants_settings = pkgr.resource_filename(
            'sdcflows', 'data/fmap-any_registration_testing.json')
    fmap2ref_reg = pe.Node(ANTSRegistrationRPT(
        generate_report=True,
        from_file=ants_settings,
        output_inverse_warped_image=True,
        output_warped_image=True),
                           name='fmap2ref_reg',
                           n_procs=omp_nthreads)

    ds_report_reg = pe.Node(DerivativesDataSink(desc='magnitude',
                                                suffix='bold'),
                            name='ds_report_reg',
                            mem_gb=0.01,
                            run_without_submitting=True)

    # Map the VSM into the EPI space
    fmap2ref_apply = pe.Node(ANTSApplyTransformsRPT(generate_report=True,
                                                    dimension=3,
                                                    interpolation='BSpline',
                                                    float=True),
                             name='fmap2ref_apply')

    fmap_mask2ref_apply = pe.Node(ANTSApplyTransformsRPT(
        generate_report=False,
        dimension=3,
        interpolation='MultiLabel',
        float=True),
                                  name='fmap_mask2ref_apply')

    ds_report_vsm = pe.Node(DerivativesDataSink(desc='fieldmap',
                                                suffix='bold'),
                            name='ds_report_vsm',
                            mem_gb=0.01,
                            run_without_submitting=True)

    # Fieldmap to rads and then to voxels (VSM - voxel shift map)
    torads = pe.Node(FieldToRadS(fmap_range=0.5), name='torads')

    get_ees = pe.Node(niu.Function(function=_get_ees, output_names=['ees']),
                      name='get_ees')

    gen_vsm = pe.Node(fsl.FUGUE(save_unmasked_shift=True), name='gen_vsm')
    # Convert the VSM into a DFM (displacements field map)
    # or: FUGUE shift to ANTS warping.
    vsm2dfm = pe.Node(itk.FUGUEvsm2ANTSwarp(), name='vsm2dfm')
    jac_dfm = pe.Node(ants.CreateJacobianDeterminantImage(
        imageDimension=3, outputImage='jacobian.nii.gz'),
                      name='jac_dfm')

    unwarp_reference = pe.Node(ANTSApplyTransformsRPT(
        dimension=3,
        generate_report=False,
        float=True,
        interpolation='LanczosWindowedSinc'),
                               name='unwarp_reference')

    fieldmap_fov_mask = pe.Node(FilledImageLike(dtype='uint8'),
                                name='fieldmap_fov_mask')

    fmap_fov2ref_apply = pe.Node(ANTSApplyTransformsRPT(
        generate_report=False,
        dimension=3,
        interpolation='NearestNeighbor',
        float=True),
                                 name='fmap_fov2ref_apply')

    apply_fov_mask = pe.Node(fsl.ApplyMask(), name="apply_fov_mask")

    enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf(
        omp_nthreads=omp_nthreads, pre_mask=True)

    workflow.connect([
        (inputnode, fmap2ref_reg, [('fmap_ref', 'moving_image')]),
        (inputnode, fmap2ref_apply, [('in_reference', 'reference_image')]),
        (fmap2ref_reg, fmap2ref_apply, [('composite_transform', 'transforms')
                                        ]),
        (inputnode, fmap_mask2ref_apply, [('in_reference', 'reference_image')
                                          ]),
        (fmap2ref_reg, fmap_mask2ref_apply, [('composite_transform',
                                              'transforms')]),
        (fmap2ref_apply, ds_report_vsm, [('out_report', 'in_file')]),
        (inputnode, fmap2ref_reg, [('in_reference_brain', 'fixed_image')]),
        (fmap2ref_reg, ds_report_reg, [('out_report', 'in_file')]),
        (inputnode, fmap2ref_apply, [('fmap', 'input_image')]),
        (inputnode, fmap_mask2ref_apply, [('fmap_mask', 'input_image')]),
        (fmap2ref_apply, torads, [('output_image', 'in_file')]),
        (inputnode, get_ees, [('in_reference', 'in_file'),
                              ('metadata', 'in_meta')]),
        (fmap_mask2ref_apply, gen_vsm, [('output_image', 'mask_file')]),
        (get_ees, gen_vsm, [('ees', 'dwell_time')]),
        (inputnode, gen_vsm, [(('metadata', _get_pedir_fugue),
                               'unwarp_direction')]),
        (inputnode, vsm2dfm, [(('metadata', _get_pedir_bids), 'pe_dir')]),
        (torads, gen_vsm, [('out_file', 'fmap_in_file')]),
        (vsm2dfm, unwarp_reference, [('out_file', 'transforms')]),
        (inputnode, unwarp_reference, [('in_reference', 'reference_image')]),
        (inputnode, unwarp_reference, [('in_reference', 'input_image')]),
        (vsm2dfm, outputnode, [('out_file', 'out_warp')]),
        (vsm2dfm, jac_dfm, [('out_file', 'deformationField')]),
        (inputnode, fieldmap_fov_mask, [('fmap_ref', 'in_file')]),
        (fieldmap_fov_mask, fmap_fov2ref_apply, [('out_file', 'input_image')]),
        (inputnode, fmap_fov2ref_apply, [('in_reference', 'reference_image')]),
        (fmap2ref_reg, fmap_fov2ref_apply, [('composite_transform',
                                             'transforms')]),
        (fmap_fov2ref_apply, apply_fov_mask, [('output_image', 'mask_file')]),
        (unwarp_reference, apply_fov_mask, [('output_image', 'in_file')]),
        (apply_fov_mask, enhance_and_skullstrip_bold_wf,
         [('out_file', 'inputnode.in_file')]),
        (fmap_mask2ref_apply, enhance_and_skullstrip_bold_wf,
         [('output_image', 'inputnode.pre_mask')]),
        (apply_fov_mask, outputnode, [('out_file', 'out_reference')]),
        (enhance_and_skullstrip_bold_wf, outputnode,
         [('outputnode.mask_file', 'out_mask'),
          ('outputnode.skull_stripped_file', 'out_reference_brain')]),
        (jac_dfm, outputnode, [('jacobian_image', 'out_jacobian')]),
    ])

    if fmap_demean:
        # Demean within mask
        demean = pe.Node(DemeanImage(), name='demean')

        workflow.connect([
            (gen_vsm, demean, [('shift_out_file', 'in_file')]),
            (fmap_mask2ref_apply, demean, [('output_image', 'in_mask')]),
            (demean, vsm2dfm, [('out_file', 'in_file')]),
        ])

    else:
        workflow.connect([
            (gen_vsm, vsm2dfm, [('shift_out_file', 'in_file')]),
        ])

    return workflow
Ejemplo n.º 14
0
    new_fn = op.abspath('{}_inverse{}'.format(fn, ext))
    inverse.to_filename(new_fn)

    return new_fn


invert_tsnr = pe.MapNode(niu.Function(function=invert,
                                      input_names=['in_file'],
                                      output_names=['out_file']),
                         iterfield=['in_file'],
                         name='invert_tsnr')
wf.connect(masker, 'out_file', invert_tsnr, 'in_file')

ds_tsnr = pe.MapNode(DerivativesDataSink(base_directory=derivatives,
                                         suffix='tsnr',
                                         out_path_base='tsnr'),
                     iterfield=['in_file', 'source_file'],
                     name='datasink_tsnr')

wf.connect(inputnode, 'preproc', ds_tsnr, 'source_file')
wf.connect(masker, 'out_file', ds_tsnr, 'in_file')

ds_std = pe.MapNode(DerivativesDataSink(base_directory=derivatives,
                                        suffix='stddev',
                                        out_path_base='tsnr'),
                    iterfield=['in_file', 'source_file'],
                    name='datasink_std')

wf.connect(inputnode, 'preproc', ds_std, 'source_file')
wf.connect(tsnr, 'stddev_file', ds_std, 'in_file')
Ejemplo n.º 15
0
grab_func.inputs.base_dir = os.path.join(project_dir, data_dir)
grab_func.inputs.output_query = {'bold': dict(extension=['nii.gz'], suffix='bold')}
grab_func.iterables = [('subject', subject_list), ('run', runs)]

sel2 = Node(Select(), name='select2')
sel2.inputs.index = 0

wf.connect(grab_func, 'bold', sel2, 'inlist')

"""
============================================
Output of Anatomical and Functional Images
============================================
"""
dsink = MapNode(DerivativesDataSink(), name='dsink', iterfield=["in_file", "source_file"])
dsink.inputs.base_directory = os.getcwd()
dsink.inputs.desc = 'preprocessed'

wf.connect(sel1, 'out', dsink, 'in_file')
wf.connect(sel1, 'out', dsink, 'source_file')

dsink2 = MapNode(DerivativesDataSink(), name='dsink2', iterfield=["in_file", "source_file"])
dsink2.inputs.base_directory = os.getcwd()
dsink2.inputs.desc = 'preprocessed'

wf.connect(sel2, 'out', dsink2, 'in_file')
wf.connect(sel2, 'out', dsink2, 'source_file')

wf.run()
Ejemplo n.º 16
0
    def make_registration_wf(input_file,
                             name,
                             subject=subject,
                             target=target,
                             target_mask=target_mask,
                             init_reg=init_reg,
                             t1w_to_mni_transform=t1w_to_mni_transform,
                             t1w_in_mni=t1w_in_mni,
                             mni_brain_mask=mni_brain_mask,
                             ants_numthreads=8):

        workflow = pe.Workflow(base_dir='/tmp/workflow_folders', name=name)

        input_node = pe.Node(niu.IdentityInterface(fields=[
            'input_file', 'target', 'target_mask', 't1w_to_mni_transform',
            't1w_in_mni', 'mni_brain_mask'
        ]),
                             name='inputspec')
        input_node.inputs.input_file = input_file
        input_node.inputs.target = target
        input_node.inputs.target_mask = target_mask
        input_node.inputs.init_reg = init_reg
        input_node.inputs.t1w_to_mni_transform = t1w_to_mni_transform
        input_node.inputs.t1w_in_mni = t1w_in_mni
        input_node.inputs.mni_brain_mask = mni_brain_mask

        convert_dtype = pe.Node(fsl.maths.MathsCommand(), name='convert_dtype')
        convert_dtype.inputs.output_datatype = 'double'

        workflow.connect(input_node, 'input_file', convert_dtype, 'in_file')

        inu_n4 = pe.Node(
            N4BiasFieldCorrection(
                dimension=3,
                save_bias=True,
                num_threads=ants_numthreads,
                rescale_intensities=True,
                copy_header=True,
            ),
            n_procs=ants_numthreads,
            name="inu_n4",
        )

        workflow.connect(convert_dtype, 'out_file', inu_n4, 'input_image')

        register = pe.Node(Registration(from_file=registration_scheme,
                                        num_threads=ants_numthreads,
                                        verbose=True),
                           name='registration')

        workflow.connect(inu_n4, 'output_image', register, 'moving_image')

        if init_reg:
            workflow.connect(input_node, 'init_reg', register,
                             'initial_moving_transform')

        workflow.connect(input_node, 'target', register, 'fixed_image')
        workflow.connect(input_node, 'target_mask', register,
                         'fixed_image_masks')

        def get_mask(input_image):
            from nilearn import image
            from nipype.utils.filemanip import split_filename
            import os.path as op

            _, fn, _ = split_filename(input_image)
            mask = image.math_img('im != 0', im=input_image)
            new_fn = op.abspath(fn + '_mask.nii.gz')
            mask.to_filename(new_fn)

            return new_fn

        mask_node = pe.Node(niu.Function(function=get_mask,
                                         input_names=['input_image'],
                                         output_names=['mask']),
                            name='mask_node')

        workflow.connect(register, 'warped_image', mask_node, 'input_image')

        gen_grid_node = pe.Node(GenerateSamplingReference(),
                                name='gen_grid_node')

        workflow.connect(mask_node, 'mask', gen_grid_node, 'fov_mask')
        workflow.connect(inu_n4, 'output_image', gen_grid_node, 'moving_image')
        workflow.connect(input_node, 'target', gen_grid_node, 'fixed_image')

        datasink_image_t1w = pe.Node(DerivativesDataSink(
            out_path_base='registration',
            compress=True,
            base_directory=op.join(bids_folder, 'derivatives')),
                                     name='datasink_image_t1w')
        workflow.connect(input_node, 'input_file', datasink_image_t1w,
                         'source_file')
        datasink_image_t1w.inputs.space = 'T1w'
        datasink_image_t1w.inputs.desc = 'registered'

        datasink_report_t1w = pe.Node(DerivativesDataSink(
            out_path_base='registration',
            space='T1w',
            base_directory=op.join(bids_folder, 'derivatives'),
            datatype='figures'),
                                      name='datasink_report_t1w')

        workflow.connect(input_node, 'input_file', datasink_report_t1w,
                         'source_file')
        datasink_report_t1w.inputs.space = 'T1w'

        transformer = pe.Node(ApplyTransforms(
            interpolation='LanczosWindowedSinc',
            generate_report=True,
            num_threads=ants_numthreads),
                              n_procs=ants_numthreads,
                              name='transformer')
        workflow.connect(transformer, 'output_image', datasink_image_t1w,
                         'in_file')
        workflow.connect(transformer, 'out_report', datasink_report_t1w,
                         'in_file')
        workflow.connect(inu_n4, 'output_image', transformer, 'input_image')
        workflow.connect(gen_grid_node, 'out_file', transformer,
                         'reference_image')
        workflow.connect(register, 'composite_transform', transformer,
                         'transforms')

        concat_transforms = pe.Node(niu.Merge(2), name='concat_transforms')

        workflow.connect(register, 'composite_transform', concat_transforms,
                         'in2')
        workflow.connect(input_node, 't1w_to_mni_transform', concat_transforms,
                         'in1')

        transformer_to_mni1 = pe.Node(ApplyTransforms(
            interpolation='LanczosWindowedSinc',
            generate_report=False,
            num_threads=ants_numthreads),
                                      n_procs=ants_numthreads,
                                      name='transformer_to_mni1')
        workflow.connect(inu_n4, 'output_image', transformer_to_mni1,
                         'input_image')
        workflow.connect(input_node, 't1w_in_mni', transformer_to_mni1,
                         'reference_image')
        workflow.connect(concat_transforms, 'out', transformer_to_mni1,
                         'transforms')

        mask_node_mni = pe.Node(niu.Function(function=get_mask,
                                             input_names=['input_image'],
                                             output_names=['mask']),
                                name='mask_node_mni')
        workflow.connect(transformer_to_mni1, 'output_image', mask_node_mni,
                         'input_image')

        def join_masks(mask1, mask2):
            from nilearn import image
            from nipype.utils.filemanip import split_filename
            import os.path as op

            _, fn, _ = split_filename(mask1)

            new_mask = image.math_img('(im1 > 0) & (im2 > 0)',
                                      im1=mask1,
                                      im2=mask2)

            new_fn = op.abspath(fn + '_jointmask' + '.nii.gz')

            new_mask.to_filename(new_fn)

            return new_fn

        combine_masks_node = pe.Node(niu.Function(
            function=join_masks,
            input_names=['mask1', 'mask2'],
            output_names=['combined_mask']),
                                     name='combine_mask_node')

        workflow.connect(mask_node_mni, 'mask', combine_masks_node, 'mask1')
        workflow.connect(input_node, 'mni_brain_mask', combine_masks_node,
                         'mask2')

        gen_grid_node_mni = pe.Node(GenerateSamplingReference(),
                                    name='gen_grid_node_mni')
        workflow.connect(combine_masks_node, 'combined_mask',
                         gen_grid_node_mni, 'fov_mask')
        workflow.connect(inu_n4, 'output_image', gen_grid_node_mni,
                         'moving_image')
        workflow.connect(input_node, 't1w_in_mni', gen_grid_node_mni,
                         'fixed_image')

        transformer_to_mni2 = pe.Node(ApplyTransforms(
            interpolation='LanczosWindowedSinc',
            generate_report=False,
            num_threads=ants_numthreads),
                                      n_procs=ants_numthreads,
                                      name='transformer_to_mni2')
        workflow.connect(inu_n4, 'output_image', transformer_to_mni2,
                         'input_image')
        workflow.connect(gen_grid_node_mni, 'out_file', transformer_to_mni2,
                         'reference_image')
        workflow.connect(concat_transforms, 'out', transformer_to_mni2,
                         'transforms')

        datasink_image_mni = pe.Node(DerivativesDataSink(
            out_path_base='registration',
            compress=True,
            base_directory=op.join(bids_folder, 'derivatives')),
                                     name='datasink_mni')
        datasink_image_mni.inputs.source_file = input_file
        datasink_image_mni.inputs.space = 'MNI152NLin2009cAsym'
        datasink_image_mni.inputs.desc = 'registered'

        workflow.connect(input_node, 'input_file', datasink_image_mni,
                         'source_file')
        workflow.connect(transformer_to_mni2, 'output_image',
                         datasink_image_mni, 'in_file')

        return workflow
Ejemplo n.º 17
0
def run_bianca_wf(masterfile,
                  out_dir,
                  wd_dir,
                  crash_dir,
                  df,
                  training_subject_idx,
                  query_subject_idx,
                  name="bianca",
                  n_cpu=4,
                  save_classifier=False,
                  trained_classifier_file=None):
    """

    :param masterfile: str
    :param out_dir:
    :param wd_dir:
    :param crash_dir:
    :param df: df
    :param training_subject_idx: training_subject_idx: list of ints, python-style 0-based; training subjects in df
    :param query_subject_idx: list of ints, python-style 0-based; querysubjects in df
    :param name:
    :param n_cpu:
    :param save_classifier: bool
    :param trained_classifier_file: file previously saved with save_classifier; if given, training subjects
    are ignored and classifier file is used in prediction
    :return: None
    """

    if save_classifier and trained_classifier_file:
        raise RuntimeError(
            "save_classifier and trained_classifier_file cannot be set at the same time"
        )
    if trained_classifier_file:
        trained_classifier_file = str(trained_classifier_file)
    #####
    # masterfile information
    expected_header = [
        'flair', 't1w', 'manual_mask', 'mat', 'subject', 'session'
    ]
    assert df.columns.tolist(
    ) == expected_header, f"masterfile columns are off. columns should be \
    {expected_header} but are {df.columns}"

    featuresubset = "1,2"
    brainmaskfeaturenum = "2"
    labelfeaturenum = "3"
    matfeaturenum = "4"

    ######
    # workflow
    wf = Workflow(name=name)

    ######
    # subject info
    inputnode = Node(niu.IdentityInterface(fields=['query_subject_idx']),
                     name='inputnode')
    inputnode.iterables = [("query_subject_idx", query_subject_idx)]
    inputnode.synchronize = True

    def get_query_info_fnc(df, query_subject_idx):
        def get_subjects_info(df, idx):
            return df.iloc[idx].subject.tolist()[0], df.iloc[
                idx].session.tolist()[0], df.iloc[idx].flair.tolist()[0]

        query_subject, query_session, query_flair = get_subjects_info(
            df, [query_subject_idx])
        query_subject_num = query_subject_idx + 1
        return query_subject, query_session, query_flair, query_subject_num

    query_info = Node(niu.Function(input_names=["df", "query_subject_idx"],
                                   output_names=[
                                       'query_subject', 'query_session',
                                       'query_flair', 'query_subject_num'
                                   ],
                                   function=get_query_info_fnc),
                      name="query_info")
    query_info.inputs.df = df
    wf.connect(inputnode, "query_subject_idx", query_info, "query_subject_idx")

    def get_training_info_fnc(df, query_subject_idx, training_subject_idx):
        import numpy as np
        training_subject_idx_clean = training_subject_idx.tolist()
        if query_subject_idx in training_subject_idx_clean:
            training_subject_idx_clean.remove(query_subject_idx)
        training_subjects = df.iloc[training_subject_idx_clean].subject.tolist(
        )
        training_sessions = df.iloc[training_subject_idx_clean].session.tolist(
        )
        training_subject_nums_str = ",".join(
            (np.array(training_subject_idx_clean) + 1).astype(str).tolist())
        return training_subject_idx_clean, training_subject_nums_str, training_subjects, training_sessions

    training_info = Node(niu.Function(
        input_names=["df", "query_subject_idx", "training_subject_idx"],
        output_names=[
            "training_subject_idx", "training_subject_nums_str",
            "training_subjects", "training_sessions"
        ],
        function=get_training_info_fnc),
                         name="training_info")
    training_info.inputs.df = df
    training_info.inputs.training_subject_idx = training_subject_idx
    wf.connect(inputnode, "query_subject_idx", training_info,
               "query_subject_idx")

    bianca = Node(BIANCA(), name="bianca")
    bianca.inputs.masterfile = str(masterfile)
    bianca.inputs.featuresubset = featuresubset
    bianca.inputs.brainmaskfeaturenum = brainmaskfeaturenum
    bianca.inputs.matfeaturenum = matfeaturenum
    bianca.inputs.save_classifier = save_classifier
    wf.connect(query_info, "query_subject_num", bianca, "querysubjectnum")

    if trained_classifier_file:
        bianca.inputs.trained_classifier_file = trained_classifier_file
    else:
        bianca.inputs.labelfeaturenum = labelfeaturenum
        wf.connect(training_info, "training_subject_nums_str", bianca,
                   "trainingnums")

    def classifier_info_fct(masterfile,
                            query_subject,
                            query_session,
                            query_flair,
                            training_subjects=None,
                            training_sessions=None,
                            classifier_file=None):
        d = {
            "masterfile": str(masterfile),
            "query_subject_session": [query_subject, query_session],
            "query_flair": query_flair,
        }
        if training_subjects:
            d["training_subjects_sessions"] = list(
                zip(training_subjects, training_sessions))
        else:
            d["classifier_file"] = classifier_file
        return d

    classifier_info = Node(niu.Function(input_names=[
        "masterfile", "query_subject", "query_session", "query_flair",
        "training_subjects", "training_sessions", "classifier_file"
    ],
                                        output_names=["meta_dict"],
                                        function=classifier_info_fct),
                           name="classifier_info")
    classifier_info.inputs.masterfile = masterfile
    wf.connect(query_info, "query_subject", classifier_info, "query_subject")
    wf.connect(query_info, "query_session", classifier_info, "query_session")
    wf.connect(query_info, "query_flair", classifier_info, "query_flair")
    if trained_classifier_file:
        classifier_info.inputs.classifier_file = trained_classifier_file
    else:
        wf.connect(training_info, "training_subjects", classifier_info,
                   "training_subjects")
        wf.connect(training_info, "training_sessions", classifier_info,
                   "training_sessions")

    ds = Node(DerivativesDataSink(base_directory=str(out_dir.parent),
                                  out_path_base=str(out_dir.name)),
              name="ds")
    ds.inputs.suffix = "LPM"
    wf.connect(bianca, "out_file", ds, "in_file")
    wf.connect(query_info, "query_flair", ds, "source_file")
    wf.connect(classifier_info, "meta_dict", ds, "meta_dict")

    if save_classifier:
        ds_clf = Node(DerivativesDataSink(base_directory=str(out_dir.parent),
                                          out_path_base=str(out_dir.name)),
                      name="ds_clf")
        ds_clf.inputs.suffix = "classifier"
        wf.connect(bianca, "classifier_file", ds_clf, "in_file")
        wf.connect(query_info, "query_flair", ds_clf, "source_file")

        ds_clf_labels = Node(DerivativesDataSink(
            base_directory=str(out_dir.parent),
            out_path_base=str(out_dir.name)),
                             name="ds_clf_labels")
        ds_clf_labels.inputs.suffix = "classifier_labels"
        wf.connect(bianca, "classifier_labels_file", ds_clf_labels, "in_file")
        wf.connect(query_info, "query_flair", ds_clf_labels, "source_file")

    wf.base_dir = wd_dir
    wf.config.remove_unnecessary_outputs = False
    wf.config["execution"]["crashdump_dir"] = crash_dir
    wf.config["monitoring"]["enabled"] = "true"
    # wf.write_graph("workflow_graph.png", graph2use="exec")
    # wf.write_graph("workflow_graph_c.png", graph2use="colored")
    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_cpu})
Ejemplo n.º 18
0
def init_fmap_unwarp_report_wf(name='fmap_unwarp_report_wf', forcedsyn=False):
    """
    Save a reportlet showing how SDC unwarping performed.

    This workflow generates and saves a reportlet showing the effect of fieldmap
    unwarping a BOLD image.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from sdcflows.workflows.unwarp import init_fmap_unwarp_report_wf
        wf = init_fmap_unwarp_report_wf()

    **Parameters**

        name : str, optional
            Workflow name (default: fmap_unwarp_report_wf)
        forcedsyn : bool, optional
            Whether SyN-SDC was forced.

    **Inputs**

        in_pre
            Reference image, before unwarping
        in_post
            Reference image, after unwarping
        in_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        in_xfm
            Affine transform from T1 space to BOLD space (ITK format)

    """
    from niworkflows.interfaces import SimpleBeforeAfter
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from niworkflows.interfaces.images import extract_wm

    DEFAULT_MEMORY_MIN_GB = 0.01

    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_pre', 'in_post', 'in_seg', 'in_xfm']),
                        name='inputnode')

    map_seg = pe.Node(ApplyTransforms(dimension=3,
                                      float=True,
                                      interpolation='MultiLabel'),
                      name='map_seg',
                      mem_gb=0.3)

    sel_wm = pe.Node(niu.Function(function=extract_wm),
                     name='sel_wm',
                     mem_gb=DEFAULT_MEMORY_MIN_GB)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name='bold_rpt', mem_gb=0.1)
    ds_report_sdc = pe.Node(DerivativesDataSink(
        desc='sdc' if not forcedsyn else 'forcedsyn', suffix='bold'),
                            name='ds_report_sdc',
                            mem_gb=DEFAULT_MEMORY_MIN_GB,
                            run_without_submitting=True)

    workflow.connect([
        (inputnode, bold_rpt, [('in_post', 'after'), ('in_pre', 'before')]),
        (bold_rpt, ds_report_sdc, [('out_report', 'in_file')]),
        (inputnode, map_seg, [('in_post', 'reference_image'),
                              ('in_seg', 'input_image'),
                              ('in_xfm', 'transforms')]),
        (map_seg, sel_wm, [('output_image', 'in_seg')]),
        (sel_wm, bold_rpt, [('out', 'wm_seg')]),
    ])

    return workflow
Ejemplo n.º 19
0
def init_timeseries_wf(
    out_dir,
    out_path_base,
    source_file,
    dt,
    work_dir=None,
    name='timeseries_wf',
):
    """
    Calculate timeseries of interest for a bold image in standard space.

    Parameters
    ----------

    out_dir: str
        the output directory
    out_path_base: str
        the new directory for the  output, to be created within out_dir
    source_file: str
        a filename for output naming puroses
    dt: float
        repetition time
    work_dir: str
        the working directory for the workflow
    name: str
        the workflow name

    Returns
    -------

    workflow: nipype workflow

    Inputs
    ------

    bold_std
        BOLD series NIfTI file in MNI152NLin6Asym space
    bold_mask_std
        BOLD mask for MNI152NLin6Asym space
    movpar_file
        movement parameter file
    skip_vols
        number of non steady state volumes
    csf_mask
        csk mask in MNI 2mm space
    wm_mask
        wm mask in MNI 2mm space
    cortical_gm_mask
        gm mask in MNI 2mm space

    Outputs
    -------

    NONE
    """

    DerivativesDataSink.out_path_base = out_path_base

    workflow = Workflow(name=name, base_dir=work_dir)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_std', 'bold_mask_std', 'movpar_file', 'skip_vols', 'csf_mask',
        'wm_mask', 'cortical_gm_mask'
    ]),
                        name='inputnode')

    bold_confs_wf = init_bold_confs_wf(out_dir,
                                       out_path_base,
                                       source_file,
                                       mem_gb=1,
                                       regressors_all_comps=False,
                                       regressors_dvars_th=1.5,
                                       regressors_fd_th=0.5)

    ica_aroma_wf = init_ica_aroma_wf(dt, err_on_aroma_warn=True)

    join = pe.Node(niu.Function(output_names=["out_file"], function=_to_join),
                   name='aroma_confounds')

    merge_metadata = pe.Node(niu.Merge(2),
                             name='merge_metadata',
                             run_without_submitting=True)

    merge_metadata2 = pe.Node(DictMerge(),
                              name='merge_metadata2',
                              run_without_submitting=True)

    ds_timeseries = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                desc='confounds',
                                                source_file=source_file,
                                                suffix='timeseries'),
                            name='ds_confounds')

    ds_aroma_noise_ics = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                     source_file=source_file,
                                                     suffix='AROMAnoiseICs'),
                                 name='ds_aroma_noise_ics')

    ds_melodic_mix = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                 desc='MELODIC',
                                                 source_file=source_file,
                                                 suffix='mixing'),
                             name='ds_melodic_mix')

    ds_aroma_report = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                  desc='mixing',
                                                  source_file=source_file,
                                                  suffix='reportlet'),
                              name='ds_aroma_report')

    workflow.connect([
        (inputnode, bold_confs_wf, [('bold_std', 'inputnode.bold'),
                                    ('bold_mask_std', 'inputnode.bold_mask'),
                                    ('movpar_file', 'inputnode.movpar_file'),
                                    ('skip_vols', 'inputnode.skip_vols'),
                                    ('csf_mask', 'inputnode.csf_mask'),
                                    ('wm_mask', 'inputnode.wm_mask'),
                                    ('cortical_gm_mask',
                                     'inputnode.cortical_gm_mask')]),
        (inputnode, ica_aroma_wf, [('bold_std', 'inputnode.bold_std'),
                                   ('bold_mask_std',
                                    'inputnode.bold_mask_std'),
                                   ('movpar_file', 'inputnode.movpar_file'),
                                   ('skip_vols', 'inputnode.skip_vols')]),

        # merge tsvs
        (bold_confs_wf, join, [('outputnode.confounds_file', 'in_file')]),
        (ica_aroma_wf, join, [('outputnode.aroma_confounds', 'join_file')]),

        # merge metadata
        (bold_confs_wf, merge_metadata, [('outputnode.confounds_metadata',
                                          'in1')]),
        (ica_aroma_wf, merge_metadata, [('outputnode.aroma_metadata', 'in2')]),
        (merge_metadata, merge_metadata2, [('out', 'in_dicts')]),

        # derivatives
        (join, ds_timeseries, [('out_file', 'in_file')]),
        (merge_metadata2, ds_timeseries, [('out_dict', 'meta_dict')]),
        (ica_aroma_wf, ds_aroma_noise_ics, [('outputnode.aroma_noise_ics',
                                             'in_file')]),
        (ica_aroma_wf, ds_melodic_mix, [('outputnode.melodic_mix', 'in_file')
                                        ]),
        (ica_aroma_wf, ds_aroma_report, [('outputnode.out_report', 'in_file')
                                         ]),
    ])

    return workflow
Ejemplo n.º 20
0
def init_phdiff_wf(omp_nthreads, name='phdiff_wf'):
    """
    Estimates the fieldmap using a phase-difference image and one or more
    magnitude images corresponding to two or more :abbr:`GRE (Gradient Echo sequence)`
    acquisitions. The `original code was taken from nipype
    <https://github.com/nipy/nipype/blob/master/nipype/workflows/dmri/fsl/artifacts.py#L514>`_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from sdcflows.workflows.phdiff import init_phdiff_wf
        wf = init_phdiff_wf(omp_nthreads=1)


    Outputs::

      outputnode.fmap_ref - The average magnitude image, skull-stripped
      outputnode.fmap_mask - The brain mask applied to the fieldmap
      outputnode.fmap - The estimated fieldmap in Hz


    """

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on a field map that was co-registered to the BOLD reference,
using a custom workflow of *fMRIPrep* derived from D. Greve's `epidewarp.fsl`
[script](http://www.nmr.mgh.harvard.edu/~greve/fbirn/b0/epidewarp.fsl) and
further improvements of HCP Pipelines [@hcppipelines].
"""

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['magnitude', 'phasediff']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
        name='outputnode')

    def _pick1st(inlist):
        return inlist[0]

    # Read phasediff echo times
    meta = pe.Node(ReadSidecarJSON(bids_validate=False),
                   name='meta',
                   mem_gb=0.01)

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                 name='n4',
                 n_procs=omp_nthreads)
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_report_fmap_mask = pe.Node(DerivativesDataSink(desc='brain',
                                                      suffix='mask'),
                                  name='ds_report_fmap_mask',
                                  mem_gb=0.01,
                                  run_without_submitting=True)
    # uses mask from bet; outputs a mask
    # dilate = pe.Node(fsl.maths.MathsCommand(
    #     nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate')

    # phase diff -> radians
    pha2rads = pe.Node(niu.Function(function=siemens2rads), name='pha2rads')

    # FSL PRELUDE will perform phase-unwrapping
    prelude = pe.Node(fsl.PRELUDE(), name='prelude')

    denoise = pe.Node(fsl.SpatialFilter(operation='median',
                                        kernel_shape='sphere',
                                        kernel_size=3),
                      name='denoise')

    demean = pe.Node(niu.Function(function=demean_image), name='demean')

    cleanup_wf = cleanup_edge_pipeline(name="cleanup_wf")

    compfmap = pe.Node(Phasediff2Fieldmap(), name='compfmap')

    # The phdiff2fmap interface is equivalent to:
    # rad2rsec (using rads2radsec from nipype.workflows.dmri.fsl.utils)
    # pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='ComputeFieldmapFUGUE')
    # rsec2hz (divide by 2pi)

    workflow.connect([
        (inputnode, meta, [('phasediff', 'in_file')]),
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (magmrg, n4, [('out_avg', 'input_image')]),
        (n4, prelude, [('output_image', 'magnitude_file')]),
        (n4, bet, [('output_image', 'in_file')]),
        (bet, prelude, [('mask_file', 'mask_file')]),
        (inputnode, pha2rads, [('phasediff', 'in_file')]),
        (pha2rads, prelude, [('out', 'phase_file')]),
        (meta, compfmap, [('out_dict', 'metadata')]),
        (prelude, denoise, [('unwrapped_phase_file', 'in_file')]),
        (denoise, demean, [('out_file', 'in_file')]),
        (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
        (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
        (cleanup_wf, compfmap, [('outputnode.out_file', 'in_file')]),
        (compfmap, outputnode, [('out_file', 'fmap')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (inputnode, ds_report_fmap_mask, [('phasediff', 'source_file')]),
        (bet, ds_report_fmap_mask, [('out_report', 'in_file')]),
    ])

    return workflow
Ejemplo n.º 21
0
def init_fmap_wf(omp_nthreads, fmap_bspline, name='fmap_wf'):
    """
    Fieldmap workflow - when we have a sequence that directly measures the fieldmap
    we just need to mask it (using the corresponding magnitude image) to remove the
    noise in the surrounding air region, and ensure that units are Hz.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from sdcflows.workflows.fmap import init_fmap_wf
        wf = init_fmap_wf(omp_nthreads=6, fmap_bspline=False)

    """

    workflow = Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['magnitude', 'fieldmap']),
        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
        name='outputnode')

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')
    # Merge input fieldmap images
    fmapmrg = pe.Node(IntraModalMerge(zero_based_avg=False, hmc=False),
                      name='fmapmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3,
                                                    copy_header=True),
                         name='n4_correct',
                         n_procs=omp_nthreads)
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_report_fmap_mask = pe.Node(DerivativesDataSink(desc='brain',
                                                      suffix='mask'),
                                  name='ds_report_fmap_mask',
                                  run_without_submitting=True)

    workflow.connect([
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (inputnode, fmapmrg, [('fieldmap', 'in_files')]),
        (magmrg, n4_correct, [('out_file', 'input_image')]),
        (n4_correct, bet, [('output_image', 'in_file')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (inputnode, ds_report_fmap_mask, [('fieldmap', 'source_file')]),
        (bet, ds_report_fmap_mask, [('out_report', 'in_file')]),
    ])

    if fmap_bspline:
        # despike_threshold=1.0, mask_erode=1),
        fmapenh = pe.Node(FieldEnhance(unwrap=False, despike=False),
                          name='fmapenh',
                          mem_gb=4,
                          n_procs=omp_nthreads)

        workflow.connect([
            (bet, fmapenh, [('mask_file', 'in_mask'),
                            ('out_file', 'in_magnitude')]),
            (fmapmrg, fmapenh, [('out_file', 'in_file')]),
            (fmapenh, outputnode, [('out_file', 'fmap')]),
        ])

    else:
        torads = pe.Node(FieldToRadS(), name='torads')
        prelude = pe.Node(fsl.PRELUDE(), name='prelude')
        tohz = pe.Node(FieldToHz(), name='tohz')

        denoise = pe.Node(fsl.SpatialFilter(operation='median',
                                            kernel_shape='sphere',
                                            kernel_size=3),
                          name='denoise')
        demean = pe.Node(niu.Function(function=demean_image), name='demean')
        cleanup_wf = cleanup_edge_pipeline(name='cleanup_wf')

        applymsk = pe.Node(fsl.ApplyMask(), name='applymsk')

        workflow.connect([
            (bet, prelude, [('mask_file', 'mask_file'),
                            ('out_file', 'magnitude_file')]),
            (fmapmrg, torads, [('out_file', 'in_file')]),
            (torads, tohz, [('fmap_range', 'range_hz')]),
            (torads, prelude, [('out_file', 'phase_file')]),
            (prelude, tohz, [('unwrapped_phase_file', 'in_file')]),
            (tohz, denoise, [('out_file', 'in_file')]),
            (denoise, demean, [('out_file', 'in_file')]),
            (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
            (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
            (cleanup_wf, applymsk, [('outputnode.out_file', 'in_file')]),
            (bet, applymsk, [('mask_file', 'mask_file')]),
            (applymsk, outputnode, [('out_file', 'fmap')]),
        ])

    return workflow
Ejemplo n.º 22
0
def test_pepolar_wf1(bids_layouts, output_path, dataset, workdir):
    """Test preparation workflow."""
    layout = bids_layouts[dataset]

    if dataset == 'testdata':
        bold = layout.get(suffix='bold',
                          dir='LR',
                          direction='LR',
                          extension=['.nii.gz', '.nii'])[0]
        boldref = layout.get(suffix='boldref',
                             dir='LR',
                             direction='LR',
                             desc='brain',
                             extension=['.nii.gz', '.nii'])[0]
    elif dataset == 'ds001600':
        bold = layout.get(suffix='bold',
                          acquisition='AP',
                          extension=['.nii.gz', '.nii'])[0]

    epidata = layout.get(suffix='epi',
                         desc=None,
                         extension=['.nii.gz', '.nii'])

    matched_pe = check_pes(
        [(im.path, im.get_metadata()['PhaseEncodingDirection'])
         for im in epidata],
        bold.get_metadata()['PhaseEncodingDirection'])

    wf = init_pepolar_unwarp_wf(omp_nthreads=cpu_count(),
                                matched_pe=matched_pe)
    wf.inputs.inputnode.fmaps_epi = [
        (im.path, im.get_metadata()['PhaseEncodingDirection'])
        for im in epidata
    ]
    wf.inputs.inputnode.bold_pe_dir = bold.get_metadata(
    )['PhaseEncodingDirection']

    if output_path:
        from nipype.interfaces import utility as niu
        from ..pepolar import Workflow
        from ...interfaces.reportlets import FieldmapReportlet

        boiler = Workflow(name='boiler_%s' % dataset)

        split_field = pe.Node(niu.Function(function=_split_field),
                              name='split_field')

        if dataset == 'ds001600':
            from niworkflows.func.util import init_bold_reference_wf
            gen_ref = init_bold_reference_wf(omp_nthreads=cpu_count(),
                                             bold_file=bold.path)
            boiler.connect([(gen_ref, wf, [
                ('outputnode.ref_image', 'inputnode.in_reference'),
                ('outputnode.ref_image_brain', 'inputnode.in_reference_brain')
            ])])
        else:
            wf.inputs.inputnode.in_reference_brain = boldref.path
            wf.inputs.inputnode.in_reference = boldref.path

        rep = pe.Node(FieldmapReportlet(), 'simple_report')
        dsink = pe.Node(DerivativesDataSink(base_directory=str(output_path),
                                            keep_dtype=True,
                                            desc='pepolar'),
                        name='dsink')
        dsink.interface.out_path_base = 'sdcflows'
        dsink.inputs.source_file = epidata[0].path

        boiler.connect([
            (wf, split_field, [('inputnode.bold_pe_dir', 'pe_dir'),
                               ('outputnode.out_warp', 'in_field')]),
            (split_field, rep, [('out', 'fieldmap')]),
            (
                wf,
                rep,
                [
                    # ('outputnode.out_warp', 'fieldmap'),
                    ('outputnode.out_reference_brain', 'reference'),
                    ('outputnode.out_mask', 'mask')
                ]),
            (rep, dsink, [('out_report', 'in_file')]),
        ])

        if workdir:
            boiler.base_dir = str(workdir)
        boiler.run(plugin='MultiProc', plugin_args={'n_proc': cpu_count()})
Ejemplo n.º 23
0
def get_ds_wf(out_dir, name="get_ds_wf"):
    out_dir = Path(out_dir)
    wf = Workflow(name=name)

    base_directory = str(out_dir.parent)
    out_path_base = str(out_dir.name)

    inputnode = Node(niu.IdentityInterface(fields=[
        'flair_biascorr', 't1w_brain', 'brainmask', 'wm_mask', 'vent_mask',
        'distancemap', 'perivent_mask', 'deepWM_mask', 'bids_flair_file',
        "generic_bids_file", "space", "t1w_to_flair", "flair_mniSp",
        "flair_to_mni"
    ]),
                     name='inputnode')

    ds_flair_biascorr = Node(DerivativesDataSink(base_directory=base_directory,
                                                 out_path_base=out_path_base),
                             name="ds_flair_biascorr")
    ds_flair_biascorr.inputs.suffix = "FLAIR_biascorr"
    wf.connect(inputnode, "flair_biascorr", ds_flair_biascorr, "in_file")
    wf.connect(inputnode, "bids_flair_file", ds_flair_biascorr, "source_file")

    ds_wmmask = Node(DerivativesDataSink(base_directory=base_directory,
                                         out_path_base=out_path_base),
                     name="ds_wmmask")
    ds_wmmask.inputs.desc = "wmmask"
    wf.connect(inputnode, "wm_mask", ds_wmmask, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_wmmask, "source_file")
    wf.connect(inputnode, "space", ds_wmmask, "space")

    ds_ventmask = Node(DerivativesDataSink(base_directory=base_directory,
                                           out_path_base=out_path_base),
                       name="ds_ventmask")
    ds_ventmask.inputs.desc = "ventmask"
    wf.connect(inputnode, "vent_mask", ds_ventmask, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_ventmask, "source_file")
    wf.connect(inputnode, "space", ds_ventmask, "space")

    ds_distancemap = Node(DerivativesDataSink(base_directory=base_directory,
                                              out_path_base=out_path_base),
                          name="ds_distancemap")
    ds_distancemap.inputs.desc = "distanceVent"
    wf.connect(inputnode, "distancemap", ds_distancemap, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_distancemap, "source_file")
    wf.connect(inputnode, "space", ds_distancemap, "space")

    ds_perivent_mask = Node(DerivativesDataSink(base_directory=base_directory,
                                                out_path_base=out_path_base),
                            name="ds_perivent_mask")
    ds_perivent_mask.inputs.desc = "periventmask"
    wf.connect(inputnode, "perivent_mask", ds_perivent_mask, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_perivent_mask, "source_file")
    wf.connect(inputnode, "space", ds_perivent_mask, "space")

    ds_deepWM_mask = Node(DerivativesDataSink(base_directory=base_directory,
                                              out_path_base=out_path_base),
                          name="ds_deepWM_mask")
    ds_deepWM_mask.inputs.desc = "deepWMmask"
    wf.connect(inputnode, "deepWM_mask", ds_deepWM_mask, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_deepWM_mask, "source_file")
    wf.connect(inputnode, "space", ds_deepWM_mask, "space")

    ds_t1w_brain = Node(DerivativesDataSink(base_directory=base_directory,
                                            out_path_base=out_path_base),
                        name="ds_t1w_brain")
    ds_t1w_brain.inputs.desc = "t1w_brain"
    wf.connect(inputnode, "t1w_brain", ds_t1w_brain, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_t1w_brain, "source_file")
    wf.connect(inputnode, "space", ds_t1w_brain, "space")

    ds_brainmask = Node(DerivativesDataSink(base_directory=base_directory,
                                            out_path_base=out_path_base),
                        name="ds_brainmask")
    ds_brainmask.inputs.desc = "brainmask"
    wf.connect(inputnode, "brainmask", ds_brainmask, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_brainmask, "source_file")
    wf.connect(inputnode, "space", ds_brainmask, "space")

    ds_t1w_to_flair = Node(DerivativesDataSink(base_directory=base_directory,
                                               out_path_base=out_path_base,
                                               allowed_entities=['from', 'to'],
                                               **{'from': 't1w'}),
                           name="ds_t1w_to_flair")

    wf.connect(inputnode, "t1w_to_flair", ds_t1w_to_flair, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_t1w_to_flair, "source_file")
    wf.connect(inputnode, "space", ds_t1w_to_flair, "to")

    # MNI outputs
    ds_flair_mniSp = Node(DerivativesDataSink(base_directory=base_directory,
                                              out_path_base=out_path_base),
                          name="ds_flair_mniSp")
    ds_flair_mniSp.inputs.suffix = "FLAIR"
    ds_flair_mniSp.inputs.space = "MNI"
    ds_flair_mniSp.inputs.desc = "12dof"
    wf.connect(inputnode, "flair_mniSp", ds_flair_mniSp, "in_file")
    wf.connect(inputnode, "bids_flair_file", ds_flair_mniSp, "source_file")

    ds_flair_to_mni = Node(DerivativesDataSink(base_directory=base_directory,
                                               out_path_base=out_path_base,
                                               allowed_entities=['from', 'to'],
                                               **{'to': 'MNI'}),
                           name="ds_flair_to_mni")
    ds_flair_to_mni.inputs.desc = "12dof"
    wf.connect(inputnode, "flair_to_mni", ds_flair_to_mni, "in_file")
    wf.connect(inputnode, "generic_bids_file", ds_flair_to_mni, "source_file")
    wf.connect(inputnode, "space", ds_flair_to_mni, "from")
    return wf
Ejemplo n.º 24
0
def init_fmap2field_wf(omp_nthreads,
                       debug,
                       name='fmap2field_wf',
                       generate_report=True):
    """
    Convert the estimated fieldmap in Hz into a displacements field.

    This workflow takes in a fieldmap and calculates the corresponding
    displacements field (in other words, an ANTs-compatible warp file).

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fmap import init_fmap2field_wf
            wf = init_fmap2field_wf(omp_nthreads=8,
                                    debug=False)

    Parameters
    ----------
    omp_nthreads : int
        Maximum number of threads an individual process may use.
    debug : bool
        Run fast configurations of registrations.
    name : str
        Unique name of this workflow.

    Inputs
    ------
    in_reference
        the reference image
    in_reference_brain
        the reference image (skull-stripped)
    metadata
        metadata associated to the ``in_reference`` EPI input
    fmap
        the fieldmap in Hz
    fmap_ref
        the reference (anatomical) image corresponding to ``fmap``
    fmap_mask
        a brain mask corresponding to ``fmap``


    Outputs
    -------
    out_reference
        the ``in_reference`` after unwarping
    out_reference_brain
        the ``in_reference`` after unwarping and skullstripping
    out_warp
        the corresponding :abbr:`DFM (displacements field map)` compatible with
        ANTs
    out_jacobian
        the jacobian of the field (for drop-out alleviation)
    out_mask
        mask of the unwarped input file

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The *fieldmap* was then co-registered to the target EPI (echo-planar imaging)
reference run and converted to a displacements field map (amenable to registration
tools such as ANTs) with FSL's `fugue` and other *SDCflows* tools.
"""
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_reference', 'in_reference_brain', 'metadata', 'fmap_ref',
        'fmap_mask', 'fmap'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_warp']),
                         name='outputnode')

    # Register the reference of the fieldmap to the reference
    # of the target image (the one that shall be corrected)
    ants_settings = pkgr.resource_filename('sdcflows',
                                           'data/fmap-any_registration.json')
    if debug:
        ants_settings = pkgr.resource_filename(
            'sdcflows', 'data/fmap-any_registration_testing.json')

    fmap2ref_reg = pe.Node(ANTSRegistrationRPT(
        generate_report=False,
        from_file=ants_settings,
        output_inverse_warped_image=True,
        output_warped_image=True),
                           name='fmap2ref_reg',
                           n_procs=omp_nthreads)

    # Map the VSM into the EPI space
    fmap2ref_apply = pe.Node(ANTSApplyTransformsRPT(generate_report=False,
                                                    dimension=3,
                                                    interpolation='BSpline',
                                                    float=True),
                             name='fmap2ref_apply')

    fmap_mask2ref_apply = pe.Node(ANTSApplyTransformsRPT(
        generate_report=False,
        dimension=3,
        interpolation='MultiLabel',
        float=True),
                                  name='fmap_mask2ref_apply')

    # Fieldmap to rads and then to voxels (VSM - voxel shift map)
    torads = pe.Node(FieldToRadS(fmap_range=0.5), name='torads')

    get_ees = pe.Node(niu.Function(function=_get_ees, output_names=['ees']),
                      name='get_ees')

    gen_vsm = pe.Node(fsl.FUGUE(save_unmasked_shift=True), name='gen_vsm')
    # Convert the VSM into a DFM (displacements field map)
    # or: FUGUE shift to ANTS warping.
    vsm2dfm = pe.Node(FUGUEvsm2ANTSwarp(), name='vsm2dfm')

    workflow.connect([
        (inputnode, fmap2ref_reg, [('fmap_ref', 'moving_image'),
                                   ('in_reference_brain', 'fixed_image')]),
        (inputnode, fmap2ref_apply, [('fmap', 'input_image'),
                                     ('in_reference', 'reference_image')]),
        (inputnode, fmap_mask2ref_apply, [('in_reference', 'reference_image'),
                                          ('fmap_mask', 'input_image')]),
        (inputnode, get_ees, [('in_reference', 'in_file'),
                              ('metadata', 'in_meta')]),
        (inputnode, gen_vsm, [(('metadata', _get_pedir_fugue),
                               'unwarp_direction')]),
        (inputnode, vsm2dfm, [(('metadata', _get_pedir_bids), 'pe_dir')]),
        (fmap2ref_reg, fmap2ref_apply, [('composite_transform', 'transforms')
                                        ]),
        (fmap2ref_reg, fmap_mask2ref_apply, [('composite_transform',
                                              'transforms')]),
        (fmap2ref_apply, torads, [('output_image', 'in_file')]),
        (fmap_mask2ref_apply, gen_vsm, [('output_image', 'mask_file')]),
        (gen_vsm, vsm2dfm, [('shift_out_file', 'in_file')]),
        (get_ees, gen_vsm, [('ees', 'dwell_time')]),
        (torads, gen_vsm, [('out_file', 'fmap_in_file')]),
        (vsm2dfm, outputnode, [('out_file', 'out_warp')]),
    ])

    if generate_report:
        from niworkflows.interfaces.bids import DerivativesDataSink
        from ..interfaces.reportlets import FieldmapReportlet

        fmap_rpt = pe.Node(FieldmapReportlet(reference_label='EPI Reference',
                                             moving_label='Magnitude',
                                             show='both'),
                           name='fmap_rpt')
        ds_report_sdc = pe.Node(DerivativesDataSink(desc='fieldmap',
                                                    suffix='bold',
                                                    datatype='figures'),
                                name='ds_report_fmap',
                                mem_gb=0.01,
                                run_without_submitting=True)

        workflow.connect([
            (inputnode, fmap_rpt, [('in_reference', 'reference')]),
            (fmap2ref_reg, fmap_rpt, [('warped_image', 'moving')]),
            (fmap_mask2ref_apply, fmap_rpt, [('output_image', 'mask')]),
            (vsm2dfm, fmap_rpt, [('fieldmap', 'fieldmap')]),
            (fmap_rpt, ds_report_sdc, [('out_report', 'in_file')]),
        ])

    return workflow
Ejemplo n.º 25
0
from bids.layout import BIDSLayout
#from nipype import Workflow, Node, Function, MapNode
from niworkflows.interfaces.bids import DerivativesDataSink
import os

# Set project variables and set up workflow
project_dir = os.path.abspath('../..')
data_dir = 'data/ds000171/'
#wf = Workflow(name='wf', base_dir = os.getcwd())

# Leverage BIDS to get subject list
layout = BIDSLayout(os.path.join(project_dir, data_dir))
subject_list = layout.get_subjects()

a_path = layout.get(subject='control01', suffix='T1w')[0].path

dsink = DerivativesDataSink()
dsink.inputs.base_directory = os.getcwd()
dsink.inputs.desc = 'preprocessed'
dsink.inputs.out_path_base = "fmri-rep"

dsink.inputs.source_file = a_path
dsink.inputs.in_file = a_path
res = dsink.run()

#wf.connect(grab_anat, 'T1w', dsink, 'source_file')
#wf.connect(grab_anat, 'T1w', dsink, 'in_file')
Ejemplo n.º 26
0
#                                                 mask_size=5,
#                                                 threshold=1000),
#                                                 name='modelestimate',
#                                                 iterfield = ['design_file',
#                                                              'in_file',
#                                                              'tcon_file'])
feat_select = pe.Node(nio.SelectFiles({
    'cope': 'stats/cope*.nii.gz',
    'pe': 'stats/pe[0-9][0-9].nii.gz',
    'tstat': 'stats/tstat*.nii.gz',
    'varcope': 'stats/varcope*.nii.gz',
    'zstat': 'stats/zstat*.nii.gz',
}), name='feat_select')

ds_cope = pe.Node(DerivativesDataSink(
    base_directory=str(output_dir), keep_dtype=False, suffix='cope',
    desc='intask'), name='ds_cope', run_without_submitting=True)

ds_varcope = pe.Node(DerivativesDataSink(
    base_directory=str(output_dir), keep_dtype=False, suffix='varcope',
    desc='intask'), name='ds_varcope', run_without_submitting=True)

ds_zstat = pe.Node(DerivativesDataSink(
    base_directory=str(output_dir), keep_dtype=False, suffix='zstat',
    desc='intask'), name='ds_zstat', run_without_submitting=True)

ds_tstat = pe.Node(DerivativesDataSink(
    base_directory=str(output_dir), keep_dtype=False, suffix='tstat',
    desc='intask'), name='ds_tstat', run_without_submitting=True)

workflow.connect([
Ejemplo n.º 27
0
def bianca_threshold(bianca_dir,
                     mask_dir,
                     flair_prep_dir,
                     wd_dir,
                     crash_dir,
                     out_dir,
                     subjects_sessions,
                     flair_acq,
                     thresholds,
                     n_cpu=1,
                     run_BiancaOverlapMeasures=True):
    out_dir.mkdir(exist_ok=True, parents=True)

    wf = Workflow(name="bianca_threshold")
    wf.base_dir = wd_dir
    wf.config.remove_unnecessary_outputs = False
    wf.config["execution"]["crashdump_dir"] = crash_dir
    wf.config["monitoring"]["enabled"] = "true"

    def format_t(s):
        return f"thresh{s}"

    base_directory = str(out_dir.parent)
    out_path_base = str(out_dir.name)

    subjects, sessions = list(zip(*subjects_sessions))
    infosource = Node(niu.IdentityInterface(fields=["subject", "session"]),
                      name="infosource")
    infosource.iterables = [
        ("subject", subjects),
        ("session", sessions),
    ]
    infosource.synchronize = True

    threshsource = Node(niu.IdentityInterface(fields=["threshold"]),
                        name="threshsource")
    threshsource.iterables = [("threshold", thresholds)]

    def subject_info_fnc(bianca_dir, mask_dir, flair_prep_dir, subject,
                         session, flair_acq, run_BiancaOverlapMeasures):
        from pathlib import Path
        sub_ses = f"sub-{subject}_ses-{session}"
        bianca_lpm = list(
            Path(bianca_dir).glob(
                f"sub-{subject}/ses-{session}/anat/{sub_ses}_acq-{flair_acq}_*_FLAIR_LPM.nii.gz"
            ))[0]

        if run_BiancaOverlapMeasures:
            manual_mask = list(
                Path(mask_dir).glob(
                    f"sub-{subject}/ses-{session}/{sub_ses}_acq-{flair_acq}_*_FLAIR_mask_goldstandard_new.nii.gz"
                ))[0]
        else:
            manual_mask = None

        wm_mask = list(
            Path(flair_prep_dir).glob(
                f"sub-{subject}/ses-{session}/anat/{sub_ses}_space-flair{flair_acq}_desc-wmmask.nii.gz"
            ))[0]
        deepwm_mask = list(
            Path(flair_prep_dir).glob(
                f"sub-{subject}/ses-{session}/anat/{sub_ses}_space-flair{flair_acq}_desc-deepWMmask.nii.gz"
            ))[0]
        pervent_mask = list(
            Path(flair_prep_dir).glob(
                f"sub-{subject}/ses-{session}/anat/{sub_ses}_space-flair{flair_acq}_desc-periventmask.nii.gz"
            ))[0]
        out_list = [
            bianca_lpm, manual_mask, wm_mask, deepwm_mask, pervent_mask
        ]
        return [str(o) for o in out_list]  # as Path is not taken everywhere

    grabber = Node(niu.Function(input_names=[
        "bianca_dir", "mask_dir", "flair_prep_dir", "subject", "session",
        "flair_acq", "run_BiancaOverlapMeasures"
    ],
                                output_names=[
                                    "bianca_lpm", "manual_mask", "wm_mask",
                                    "deepwm_mask", "pervent_mask"
                                ],
                                function=subject_info_fnc),
                   name="grabber")
    grabber.inputs.bianca_dir = bianca_dir
    grabber.inputs.mask_dir = mask_dir
    grabber.inputs.flair_prep_dir = flair_prep_dir
    grabber.inputs.flair_acq = flair_acq
    grabber.inputs.run_BiancaOverlapMeasures = run_BiancaOverlapMeasures

    wf.connect([(infosource, grabber, [
        ("subject", "subject"),
        ("session", "session"),
    ])])
    # threshold lpm
    bianca_lpm_masked = Node(fsl.ApplyMask(), name="bianca_lpm_masked")
    wf.connect(grabber, "bianca_lpm", bianca_lpm_masked, "in_file")
    wf.connect(grabber, "wm_mask", bianca_lpm_masked, "mask_file")

    thresholded_bianca_lpm_mask = Node(fsl.Threshold(),
                                       name="thresholded_bianca_lpm_mask")
    wf.connect(bianca_lpm_masked, "out_file", thresholded_bianca_lpm_mask,
               "in_file")
    wf.connect(threshsource, "threshold", thresholded_bianca_lpm_mask,
               "thresh")
    thresholded_bianca_lpm_mask.inputs.args = "-bin"

    ds_masked = Node(DerivativesDataSink(base_directory=base_directory,
                                         out_path_base=out_path_base),
                     name="ds_masked")
    ds_masked.inputs.desc = "biancamasked"
    wf.connect(bianca_lpm_masked, "out_file", ds_masked, "in_file")
    wf.connect(grabber, "bianca_lpm", ds_masked, "source_file")

    ds_masked_thr_bin = Node(DerivativesDataSink(base_directory=base_directory,
                                                 out_path_base=out_path_base),
                             name="ds_masked_thr_bin")
    ds_masked_thr_bin.inputs.suffix = "biancaLPMmaskedThrBin"
    wf.connect(threshsource, ("threshold", format_t), ds_masked_thr_bin,
               "desc")
    wf.connect(thresholded_bianca_lpm_mask, "out_file", ds_masked_thr_bin,
               "in_file")
    wf.connect(grabber, "bianca_lpm", ds_masked_thr_bin, "source_file")

    def str_to_file_fct(s):
        from pathlib import Path
        out_file = Path.cwd() / "out.txt"
        out_file.write_text(s)
        return str(out_file)

    # volume extraction
    ## total
    cluster_stats_total = Node(BiancaClusterStats(),
                               name="cluster_stats_total")
    cluster_stats_total.inputs.min_cluster_size = 0
    wf.connect(bianca_lpm_masked, "out_file", cluster_stats_total,
               "bianca_output_map")
    wf.connect(threshsource, "threshold", cluster_stats_total, "threshold")
    wf.connect(grabber, "wm_mask", cluster_stats_total, "mask_file")

    str_to_file_total = Node(niu.Function(input_names=["s"],
                                          output_names=["out_file"],
                                          function=str_to_file_fct),
                             name="str_to_file_total")
    wf.connect(cluster_stats_total, "out_stat", str_to_file_total, "s")

    ds_cluster_stats_total = Node(DerivativesDataSink(
        base_directory=base_directory, out_path_base=out_path_base),
                                  name="ds_cluster_stats_total")
    ds_cluster_stats_total.inputs.suffix = "ClusterStatsTotal"
    wf.connect(threshsource, ("threshold", format_t), ds_cluster_stats_total,
               "desc")
    wf.connect(str_to_file_total, "out_file", ds_cluster_stats_total,
               "in_file")
    wf.connect(grabber, "bianca_lpm", ds_cluster_stats_total, "source_file")

    ## deep wm
    cluster_stats_deepwm = Node(BiancaClusterStats(),
                                name="cluster_stats_deepwm")
    cluster_stats_deepwm.inputs.min_cluster_size = 0
    wf.connect(bianca_lpm_masked, "out_file", cluster_stats_deepwm,
               "bianca_output_map")
    wf.connect(threshsource, "threshold", cluster_stats_deepwm, "threshold")
    wf.connect(grabber, "deepwm_mask", cluster_stats_deepwm, "mask_file")

    str_to_file_deepwm = Node(niu.Function(input_names=["s"],
                                           output_names=["out_file"],
                                           function=str_to_file_fct),
                              name="str_to_file_deepwm")
    wf.connect(cluster_stats_deepwm, "out_stat", str_to_file_deepwm, "s")

    ds_cluster_stats_deepwm = Node(DerivativesDataSink(
        base_directory=base_directory, out_path_base=out_path_base),
                                   name="ds_cluster_stats_deepwm")
    ds_cluster_stats_deepwm.inputs.suffix = "ClusterStatsdeepwm"
    wf.connect(threshsource, ("threshold", format_t), ds_cluster_stats_deepwm,
               "desc")
    wf.connect(str_to_file_deepwm, "out_file", ds_cluster_stats_deepwm,
               "in_file")
    wf.connect(grabber, "bianca_lpm", ds_cluster_stats_deepwm, "source_file")

    ## perivent wm
    cluster_stats_perventwm = Node(BiancaClusterStats(),
                                   name="cluster_stats_perventwm")
    cluster_stats_perventwm.inputs.min_cluster_size = 0
    wf.connect(bianca_lpm_masked, "out_file", cluster_stats_perventwm,
               "bianca_output_map")
    wf.connect(threshsource, "threshold", cluster_stats_perventwm, "threshold")
    wf.connect(grabber, "pervent_mask", cluster_stats_perventwm, "mask_file")

    str_to_file_perventwm = Node(niu.Function(input_names=["s"],
                                              output_names=["out_file"],
                                              function=str_to_file_fct),
                                 name="str_to_file_perventwm")
    wf.connect(cluster_stats_perventwm, "out_stat", str_to_file_perventwm, "s")

    ds_cluster_stats_perventwm = Node(DerivativesDataSink(
        base_directory=base_directory, out_path_base=out_path_base),
                                      name="ds_cluster_stats_perventwm")
    ds_cluster_stats_perventwm.inputs.suffix = "ClusterStatsperventwm"
    wf.connect(threshsource, ("threshold", format_t),
               ds_cluster_stats_perventwm, "desc")
    wf.connect(str_to_file_perventwm, "out_file", ds_cluster_stats_perventwm,
               "in_file")
    wf.connect(grabber, "bianca_lpm", ds_cluster_stats_perventwm,
               "source_file")

    if run_BiancaOverlapMeasures:
        overlap = Node(BiancaOverlapMeasures(), name="overlap")
        wf.connect(bianca_lpm_masked, "out_file", overlap, "lesionmask")
        wf.connect(grabber, "manual_mask", overlap, "manualmask")
        wf.connect(threshsource, "threshold", overlap, "threshold")
        overlap.inputs.saveoutput = 1

        ds_overlap = Node(DerivativesDataSink(base_directory=base_directory,
                                              out_path_base=out_path_base),
                          name="ds_overlap")
        ds_overlap.inputs.suffix = "overlap"
        wf.connect(threshsource, ("threshold", format_t), ds_overlap, "desc")
        wf.connect(overlap, "out_file", ds_overlap, "in_file")
        wf.connect(grabber, "bianca_lpm", ds_overlap, "source_file")

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_cpu})
Ejemplo n.º 28
0
def prepare_flair_intNorm(flair_prep_dir, out_dir, wd_dir, crash_dir, subjects_sessions, flair_acq, n_cpu=-1):
    out_dir.mkdir(exist_ok=True, parents=True)
    export_version(out_dir)

    wf = Workflow(name="prepare_flair_intNorm")
    wf.base_dir = wd_dir
    wf.config.remove_unnecessary_outputs = False
    wf.config["execution"]["crashdump_dir"] = crash_dir
    wf.config["monitoring"]["enabled"] = "true"

    subjects, sessions = list(zip(*subjects_sessions))
    infosource = Node(niu.IdentityInterface(fields=["subject", "session", "flair_acq"]), name="infosource")
    infosource.iterables = [("subject", subjects),
                            ("session", sessions),
                            ]
    infosource.synchronize = True

    def subject_info_fnc(flair_prep_dir, subject, session, flair_acq):
        from pathlib import Path

        sub_ses = f"sub-{subject}_ses-{session}"
        flair_files = list(Path(flair_prep_dir).glob(
            f"sub-{subject}/ses-{session}/anat/{sub_ses}_acq-{flair_acq}_*_FLAIR_biascorr.nii.gz"))
        assert len(flair_files) == 1, f"Expected one file, but found {flair_files}"
        flair_file = flair_files[0]

        brain_masks = list(Path(flair_prep_dir).glob(
            f"sub-{subject}/ses-{session}/anat/{sub_ses}_space-flair{flair_acq}_desc-brainmask.nii.gz"))
        assert len(brain_masks) > 0, f"Expected one file, but found {brain_masks}"
        brain_mask = brain_masks[0]

        out_list = [flair_file, brain_mask]
        return [str(o) for o in out_list]  # as Path is not taken everywhere

    grabber = Node(niu.Function(input_names=["flair_prep_dir", "subject", "session", "flair_acq"],
                                output_names=["flair_file", "brain_mask"],
                                function=subject_info_fnc),
                   name="grabber"
                   )
    grabber.inputs.flair_prep_dir = flair_prep_dir
    grabber.inputs.flair_acq = flair_acq

    wf.connect([(infosource, grabber, [("subject", "subject"),
                                       ("session", "session"),
                                       ]
                 )
                ]
               )

    # adapted from https://gist.github.com/lebedov/94f1caf8a792d80cd91e7b99c1a0c1d7
    # Intensity normalization - subtract minimum, then divide by difference of maximum and minimum:
    img_range = Node(interface=fsl.ImageStats(op_string='-k %s -R'), name='img_range')
    wf.connect(grabber, "flair_file", img_range, "in_file")
    wf.connect(grabber, "brain_mask", img_range, "mask_file")

    def func(in_stat):
        min_val, max_val = in_stat
        return '-sub %s -div %s' % (min_val, (max_val - min_val))

    stat_to_op_string = Node(interface=niu.Function(input_names=['in_stat'],
                                                    output_names=['op_string'],
                                                    function=func),
                             name='stat_to_op_string', iterfield=['in_stat'])
    wf.connect(img_range, "out_stat", stat_to_op_string, "in_stat")

    flair_normalized = Node(interface=fsl.ImageMaths(), name='flair_normalized')
    wf.connect(stat_to_op_string, "op_string", flair_normalized, "op_string")
    wf.connect(grabber, "flair_file", flair_normalized, "in_file")

    base_directory = str(out_dir.parent)
    out_path_base = str(out_dir.name)
    ds_flair_biascorr_intNorm = Node(DerivativesDataSink(base_directory=base_directory, out_path_base=out_path_base),
                                     name="ds_flair_biascorr_intNorm")
    ds_flair_biascorr_intNorm.inputs.suffix = "FLAIR_biascorrIntNorm"
    wf.connect(flair_normalized, "out_file", ds_flair_biascorr_intNorm, "in_file")
    wf.connect(grabber, "flair_file", ds_flair_biascorr_intNorm, "source_file")

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_cpu})
Ejemplo n.º 29
0
def main(derivatives, ds):

    if ds == 'ds-01':
        subjects = ['{:02d}'.format(s) for s in range(1, 20)]
    elif ds == 'ds-02':
        subjects = ['{:02d}'.format(s) for s in range(1, 16)]
        subjects.pop(3)  # Remove 4

    wf_folder = '/tmp/workflow_folders'

    identity = pe.Node(niu.IdentityInterface(fields=['mask']), name='identity')

    templates = {
        'pca_map':
        op.join(derivatives, ds, 'pca_mni', '{mask}_pca.nii.gz'),
        't1w':
        op.join(derivatives, ds, 'fmriprep', 'sub-{subject}', 'anat',
                'sub-{subject}_desc-preproc_T1w.nii.gz'),
        'mni2t1w':
        op.join(
            derivatives, ds, 'fmriprep', 'sub-{subject}', 'anat',
            'sub-{subject}_from-MNI152NLin2009cAsym_to-T1w_mode-image_xfm.h5')
    }

    if ds == 'ds-01':
        templates['individual_mask'] = op.join(
            derivatives, ds, 'conjunct_masks', 'sub-{subject}', 'anat',
            'sub-{subject}_space-FLASH_desc-{mask}_space-T1w.nii.gz')

    elif ds == 'ds-02':
        templates['individual_mask'] = op.join(
            derivatives, ds, 'conjunct_masks', 'sub-{subject}', 'anat',
            'sub-{subject}_desc-{mask}_mask.nii.gz')

    wf = pe.Workflow(name='make_pca_masks_{}'.format(ds), base_dir=wf_folder)

    selector = pe.Node(nio.SelectFiles(templates), name='selector')
    selector.iterables = [('mask', ['stnl', 'stnr']), ('subject', subjects)]

    individual_pca_map = pe.Node(ants.ApplyTransforms(num_threads=4),
                                 name='individual_pca_map')

    wf.connect(selector, 't1w', individual_pca_map, 'reference_image')
    wf.connect(selector, 'pca_map', individual_pca_map, 'input_image')
    wf.connect(selector, 'mni2t1w', individual_pca_map, 'transforms')

    def make_pca_mask(pca_map, mask):
        from nilearn import image
        from nipype.utils.filemanip import split_filename
        import os.path as op

        _, fn, ext = split_filename(mask)

        pca_map = image.load_img(pca_map)
        mask = image.load_img(mask)

        pca_map = image.resample_to_img(pca_map, mask, interpolation='nearest')

        new_mask = image.math_img('pca_map * (mask > 0)',
                                  pca_map=pca_map,
                                  mask=mask)

        tmp = new_mask.get_data()
        tmp[tmp != 0] -= tmp[tmp != 0].min() - 1e-4
        tmp[tmp != 0] /= tmp[tmp != 0].max()

        new_mask = image.new_img_like(new_mask, tmp)

        new_mask.to_filename(op.abspath('{}_map{}'.format(fn, ext)))

        return new_mask.get_filename()

    make_mask = pe.Node(niu.Function(function=make_pca_mask,
                                     input_names=['pca_map', 'mask'],
                                     output_names=['mask']),
                        name='make_mask')

    wf.connect(individual_pca_map, 'output_image', make_mask, 'pca_map')
    wf.connect(selector, 'individual_mask', make_mask, 'mask')

    def make_submask(mask):
        from nilearn import image
        import numpy as np
        import os.path as op
        from nipype.utils.filemanip import split_filename

        _, fn, ext = split_filename(mask)

        im = image.load_img(mask)

        data = im.get_data()
        percentiles = np.percentile(data[data != 0], [33, 66])

        mask1 = image.math_img('(im > 0) & (im < {})'.format(percentiles[0]),
                               im=im)
        mask2 = image.math_img('(im > {}) & (im < {})'.format(*percentiles),
                               im=im)
        mask3 = image.math_img('(im > {})'.format(percentiles[1]), im=im)

        fn1 = op.abspath('{}_maskA{}'.format(fn, ext))
        fn2 = op.abspath('{}_maskB{}'.format(fn, ext))
        fn3 = op.abspath('{}_maskC{}'.format(fn, ext))

        mask1.to_filename(fn1)
        mask2.to_filename(fn2)
        mask3.to_filename(fn3)

        return fn3, fn2, fn1

    make_submasksnode = pe.Node(niu.Function(function=make_submask,
                                             input_names=['mask'],
                                             output_names=['submasks']),
                                name='make_submasks')

    wf.connect(make_mask, 'mask', make_submasksnode, 'mask')

    datasink_whole_mask = pe.Node(DerivativesDataSink(
        base_directory=op.join(derivatives, ds),
        space='T1w',
        suffix='roi',
        out_path_base='pca_masks'),
                                  name='datasink_whole_mask')
    datasink_whole_mask.base_path = 'pca_masks'

    def remove_space(input):
        return input.replace('_space-FLASH', '')

    wf.connect(selector, ('individual_mask', remove_space),
               datasink_whole_mask, 'source_file')
    wf.connect(make_mask, 'mask', datasink_whole_mask, 'in_file')

    datasink_submasks = pe.MapNode(DerivativesDataSink(
        base_directory=op.join(derivatives, ds),
        space='T1w',
        out_path_base='pca_masks'),
                                   iterfield=['suffix', 'in_file'],
                                   name='datasink_submasks')
    datasink_submasks.base_path = 'pca_masks'
    datasink_submasks.inputs.suffix = [
        'subroi-A_roi', 'subroi-B_roi', 'subroi-C_roi'
    ]
    wf.connect(selector, ('individual_mask', remove_space), datasink_submasks,
               'source_file')
    wf.connect(make_submasksnode, 'submasks', datasink_submasks, 'in_file')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': 8})