コード例 #1
0
def test_tstat():
    input_map = dict(
        args=dict(argstr='%s', ),
        environ=dict(usedefault=True, ),
        ignore_exception=dict(usedefault=True, ),
        in_file=dict(
            argstr='%s',
            mandatory=True,
        ),
        out_file=dict(argstr='-prefix %s', ),
        outputtype=dict(),
    )
    instance = afni.TStat()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(instance.inputs.traits()[key],
                                        metakey), value
コード例 #2
0
ファイル: gradients.py プロジェクト: badamifs/qsiprep
    def _run_interface(self, runtime):
        output_fname = fname_presuffix(self.inputs.dwi_series,
                                       suffix='_b0_series',
                                       use_ext=True,
                                       newpath=runtime.cwd)
        output_mean_fname = fname_presuffix(output_fname,
                                            suffix='_mean',
                                            use_ext=True,
                                            newpath=runtime.cwd)
        img = nb.load(self.inputs.dwi_series)
        indices = np.array(self.inputs.b0_indices).astype(np.int)
        new_data = img.get_data()[..., indices]
        nb.Nifti1Image(new_data, img.affine,
                       img.header).to_filename(output_fname)
        self._results['b0_series'] = output_fname
        average_img = afni.TStat(args='-mean',
                                 in_file=output_fname,
                                 outputtype='NIFTI_GZ',
                                 out_file=output_mean_fname)
        average_img.run()
        self._results['b0_average'] = output_mean_fname

        return runtime
コード例 #3
0
    inputnode2 = pe.Node(
        interface=util.IdentityInterface(fields=['drifter_result']),
        name='inputspec2')
    outputnode2 = pe.Node(
        interface=util.IdentityInterface(fields=['result_func']),
        name='outputspec2')

    inputnode2.inputs.drifter_result = results_path + '/' + data + '_1/drifter/drifter_corrected.nii.gz'

    # Call fslcpgeom source dest, source is reorient output nii.gz file and dest is drifter folder nii.gz file
    reoriented_file = results_path + '/' + data + '_1/reorient/corr_epi_reoriented.nii.gz'
    drifted_file = results_path + '/' + data + '_1/drifter/drifter_corrected.nii.gz'
    call(["fslcpgeom", reoriented_file, drifted_file])

    # AFNI skullstrip and mean image skullstrip
    tstat1 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"),
                     name='tstat1')
    automask = pe.Node(interface=afni.Automask(dilate=1,
                                               outputtype="NIFTI_GZ"),
                       name='automask')
    skullstrip = pe.Node(interface=afni.Calc(expr='a*b',
                                             outputtype="NIFTI_GZ"),
                         name='skullstrip')
    tstat2 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"),
                     name='tstat2')

    workflow2.connect(inputnode2, 'drifter_result', tstat1, 'in_file')
    workflow2.connect(tstat1, 'out_file', automask, 'in_file')
    workflow2.connect(automask, 'out_file', skullstrip, 'in_file_b')
    workflow2.connect(inputnode2, 'drifter_result', skullstrip, 'in_file_a')
    workflow2.connect(skullstrip, 'out_file', tstat2, 'in_file')
コード例 #4
0
def init_dwi_hmc_wf(hmc_transform,
                    hmc_model,
                    hmc_align_to,
                    source_file,
                    num_model_iterations=2,
                    mem_gb=3,
                    omp_nthreads=1,
                    sloppy=False,
                    name="dwi_hmc_wf"):
    """Perform head motion correction and susceptibility distortion correction.

    This workflow uses antsRegistration and an iteratively updated signal model to perform
    motion correction.

    **Parameters**

        hmc_transform: 'Rigid' or 'Affine'
            How many degrees of freedom to incorporate into motion correction
        hmc_model: '3dSHORE', 'none' or 'SH'
            Which model to use for generating signal predictions for hmc. '3dSHORE' requires
            multiple b-values, 'none' will only use b0 images for motion correction and
            'SH' uses spherical harmonics (not implemented yet).
        hmc_align_to: 'first' or 'iterative'
            Which volume should be used to determine the motion-corrected space?
        source_file: str
            Path to one of the original dwi files (used for reportlets)
        rpe_b0: str
            Path to a reverse phase encoding image to be used for 3dQWarp's TOPUP-style
            correction
        num_model_iterations: int
            If ``hmc_model`` is ``'3dSHORE'`` or ``'SH'`` determines the number of times the
            model is updated and motion corretion is estimated. Default: 2.

    **Inputs**

        dwi_files: list
            List of single-volume files across all DWI series
        b0_indices: list
            Indexes into ``dwi_files`` that correspond to b=0 volumes
        bvecs: list
            List of paths to single-line bvec files
        bvals: list
            List of paths to single-line bval files
        b0_images: list
            List of single b=0 volumes
        original_files: list
            List of the files from which each DWI volume came from.

    **Outputs**

        final_template: str
            Path to the mean of the coregistered b0 images
        forward_transforms: list
            List of ITK transforms that motion-correct the images in ``dwi_files``
        noise_free_dwis: list
            Model-predicted images reverse-transformed into alignment with ``dwi_files``
        cnr_image: str
            If hmc_model is 'none' this is the tsnr of the b=0 images. Otherwise it is the
            model fit divided by the model error in each voxel.
        optimization_data: str
            CSV file tracking the motion estimates across shoreline iterations
    """

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'dwi_files', 'b0_indices', 'bvecs', 'bvals', 'b0_images',
        'original_files', 't1_brain', 't1_mask', 't1_seg'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        "final_template", "forward_transforms", "noise_free_dwis", "cnr_image",
        "optimization_data", "final_template_brain", "final_template_mask"
    ]),
                         name='outputnode')

    workflow = Workflow(name=name)
    # Unbiased align the b0s
    b0_hmc_wf = init_b0_hmc_wf(align_to=hmc_align_to,
                               transform=hmc_transform,
                               sloppy=sloppy)
    # Tile the transforms so each non-b0 gets the transform from the nearest b0
    match_transforms = pe.Node(MatchTransforms(), name="match_transforms")
    # Make a mask from the output template. It is bias-corrected, so good for masking
    # but bad for using as a b=0 for modeling.
    b0_template_mask = init_dwi_reference_wf(register_t1=True,
                                             name="b0_template_mask",
                                             gen_report=False,
                                             source_file=source_file)

    workflow.connect([
        (inputnode, match_transforms, [('dwi_files', 'dwi_files'),
                                       ('b0_indices', 'b0_indices')]),
        (inputnode, b0_hmc_wf, [('b0_images', 'inputnode.b0_images')]),
        (b0_hmc_wf, outputnode, [('outputnode.final_template',
                                  'final_template')]),
        (b0_hmc_wf, match_transforms, [(('outputnode.forward_transforms',
                                         _list_squeeze), 'transforms')]),
        (inputnode, b0_template_mask, [('t1_brain', 'inputnode.t1_brain'),
                                       ('t1_mask', 'inputnode.t1_mask'),
                                       ('t1_seg', 'inputnode.t1_seg')]),
        (b0_hmc_wf, b0_template_mask, [('outputnode.final_template',
                                        'inputnode.b0_template')]),
        (b0_template_mask, outputnode,
         [('outputnode.ref_image_brain', 'final_template_brain'),
          ('outputnode.dwi_mask', 'final_template_mask')])
    ])

    # If we're just aligning based on the b=0 images, compute the b=0 tsnr as the cnr
    if hmc_model.lower() == "none":
        workflow.__postdesc__ = "Each b>0 image was transformed based on the registration " \
                                " of the nearest b=0 image. "

        concat_b0s = pe.Node(afni.TCat(outputtype="NIFTI_GZ"),
                             name="concat_b0s")
        b0_tsnr = pe.Node(afni.TStat(options=' -cvarinvNOD ',
                                     outputtype='NIFTI_GZ'),
                          name='b0_tsnr')
        workflow.connect([(match_transforms, outputnode,
                           [('transforms', 'forward_transforms')]),
                          (b0_hmc_wf, concat_b0s,
                           [('outputnode.aligned_images', 'in_files')]),
                          (concat_b0s, b0_tsnr, [('out_file', 'in_file')]),
                          (b0_tsnr, outputnode, [('out_file', 'cnr_image')])])
        return workflow

    # Do model-based motion correction
    dwi_model_hmc_wf = init_dwi_model_hmc_wf(hmc_model,
                                             hmc_transform,
                                             mem_gb,
                                             omp_nthreads,
                                             num_iters=num_model_iterations)

    # Warp the modeled images into non-motion-corrected space
    uncorrect_model_images = pe.MapNode(
        ants.ApplyTransforms(invert_transform_flags=[True],
                             interpolation='LanczosWindowedSinc'),
        iterfield=['input_image', 'reference_image', 'transforms'],
        name='uncorrect_model_images')
    workflow.__postdesc__ = "Model-generated images were transformed into alignment with each " \
                            "b>0 image. Both slicewise and whole-brain QC measures (cross " \
                            "correlation and R^2) were calculated."
    workflow.connect([
        (b0_hmc_wf, dwi_model_hmc_wf, [('outputnode.aligned_images',
                                        'inputnode.warped_b0_images')]),
        (b0_template_mask, dwi_model_hmc_wf, [('outputnode.dwi_mask',
                                               'inputnode.warped_b0_mask')]),
        (inputnode, dwi_model_hmc_wf, [('dwi_files', 'inputnode.dwi_files'),
                                       ('b0_indices', 'inputnode.b0_indices'),
                                       ('bvecs', 'inputnode.bvec_files'),
                                       ('bvals', 'inputnode.bval_files')]),
        (match_transforms, dwi_model_hmc_wf,
         [('transforms', 'inputnode.initial_transforms')]),
        (dwi_model_hmc_wf, outputnode,
         [('outputnode.hmc_transforms', 'forward_transforms'),
          ('outputnode.optimization_data', 'optimization_data'),
          ('outputnode.cnr_image', 'cnr_image')]),
        (dwi_model_hmc_wf, uncorrect_model_images,
         [('outputnode.model_predicted_images', 'input_image'),
          ('outputnode.hmc_transforms', 'transforms')]),
        (inputnode, uncorrect_model_images, [('dwi_files', 'reference_image')
                                             ]),
        (uncorrect_model_images, outputnode, [('output_image',
                                               'noise_free_dwis')])
    ])
    datasinks = [
        node for node in workflow.list_node_names()
        if node.split(".")[-1].startswith("ds_")
    ]

    for ds in datasinks:
        workflow.get_node(ds).inputs.source_file = source_file

    return workflow
コード例 #5
0
ファイル: func.py プロジェクト: sammba-mri/sammba-mri
def coregister_fmri_session(session_data,
                            t_r,
                            write_dir,
                            brain_volume,
                            use_rats_tool=True,
                            slice_timing=True,
                            prior_rigid_body_registration=False,
                            caching=False,
                            voxel_size_x=.1,
                            voxel_size_y=.1,
                            verbose=True,
                            **environ_kwargs):
    """
    Coregistration of the subject's functional and anatomical images.
    The functional volume is aligned to the anatomical, first with a rigid body
    registration and then on a per-slice basis (only a fine correction, this is
    mostly for correction of EPI distortion).


    Parameters
    ----------
    session_data : sammba.registration.SessionData
        Single animal data, giving paths to its functional and anatomical
        image, as well as it identifier.

    t_r : float
        Repetition time for the EPI, in seconds.

    write_dir : str
        Directory to save the output and temporary images.

    brain_volume : int
        Volume of the brain in mm3 used for brain extraction.
        Typically 400 for mouse and 1800 for rat.

    use_rats_tool : bool, optional
        If True, brain mask is computed using RATS Mathematical Morphology.
        Otherwise, a histogram-based brain segmentation is used.

    prior_rigid_body_registration : bool, optional
        If True, a rigid-body registration of the anat to the func is performed
        prior to the warp. Useful if the images headers have missing/wrong
        information.

    voxel_size_x : float, optional
        Resampling resolution for the x-axis, in mm.

    voxel_size_y : float, optional
        Resampling resolution for the y-axis, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.

    environ_kwargs : extra arguments keywords
        Extra arguments keywords, passed to interfaces environ variable.

    Returns
    -------
    the same sequence with each animal_data updated: the following attributes
    are added
        - `output_dir_` : str
                          Path to the output directory.
        - `coreg_func_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_anat_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_transform_` : str
                               Path to the transform from anat to func.

    Notes
    -----
    If `use_rats_tool` is turned on, RATS tool is used for brain extraction
    and has to be cited. For more information, see
    `RATS <http://www.iibi.uiowa.edu/content/rats-overview/>`_
    """
    func_filename = session_data.func
    anat_filename = session_data.anat

    environ = {'AFNI_DECONFLICT': 'OVERWRITE'}
    for (key, value) in environ_kwargs.items():
        environ[key] = value

    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if use_rats_tool:
        if segmentation.interfaces.Info().version() is None:
            raise ValueError('Can not locate RATS')
        else:
            ComputeMask = segmentation.MathMorphoMask
    else:
        ComputeMask = segmentation.HistogramMask

    if ants.base.Info().version is None:
        raise ValueError('Can not locate ANTS')

    if caching:
        memory = Memory(write_dir)
        tshift = memory.cache(afni.TShift)
        clip_level = memory.cache(afni.ClipLevel)
        volreg = memory.cache(afni.Volreg)
        allineate = memory.cache(afni.Allineate)
        tstat = memory.cache(afni.TStat)
        compute_mask = memory.cache(ComputeMask)
        calc = memory.cache(afni.Calc)
        allineate = memory.cache(afni.Allineate)
        allineate2 = memory.cache(afni.Allineate)
        unifize = memory.cache(afni.Unifize)
        bias_correct = memory.cache(ants.N4BiasFieldCorrection)
        catmatvec = memory.cache(afni.CatMatvec)
        warp = memory.cache(afni.Warp)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(afni.ZCutUp)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(afni.Zcat)
        copy_geom = memory.cache(fsl.CopyGeom)
        overwrite = False
        for step in [
                tshift, volreg, allineate, allineate2, tstat, compute_mask,
                calc, unifize, resample, slicer, warp_apply, qwarp, merge
        ]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        tshift = afni.TShift(terminal_output=terminal_output).run
        clip_level = afni.ClipLevel().run
        volreg = afni.Volreg(terminal_output=terminal_output).run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        allineate2 = afni.Allineate(terminal_output=terminal_output
                                    ).run  # TODO: remove after fixed bug
        tstat = afni.TStat(terminal_output=terminal_output).run
        compute_mask = ComputeMask().run
        calc = afni.Calc(terminal_output=terminal_output).run
        unifize = afni.Unifize(terminal_output=terminal_output).run
        bias_correct = ants.N4BiasFieldCorrection(
            terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        warp = afni.Warp().run
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = afni.ZCutUp(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = afni.Zcat(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        overwrite = True

    session_data._check_inputs()
    output_dir = os.path.join(os.path.abspath(write_dir),
                              session_data.animal_id)
    session_data._set_output_dir_(output_dir)
    current_dir = os.getcwd()
    os.chdir(output_dir)
    output_files = []

    #######################################
    # Correct functional for slice timing #
    #######################################
    if slice_timing:
        out_tshift = tshift(in_file=func_filename,
                            outputtype='NIFTI_GZ',
                            tpattern='altplus',
                            tr=str(t_r),
                            environ=environ)
        func_filename = out_tshift.outputs.out_file
        output_files.append(func_filename)

    ################################################
    # Register functional volumes to the first one #
    ################################################
    # XXX why do you need a thresholded image ?
    out_clip_level = clip_level(in_file=func_filename)
    out_calc_threshold = calc(in_file_a=func_filename,
                              expr='ispositive(a-{0}) * a'.format(
                                  out_clip_level.outputs.clip_val),
                              outputtype='NIFTI_GZ')
    thresholded_filename = out_calc_threshold.outputs.out_file

    out_volreg = volreg(  # XXX dfile not saved
        in_file=thresholded_filename,
        outputtype='NIFTI_GZ',
        environ=environ,
        oned_file=fname_presuffix(thresholded_filename,
                                  suffix='Vr.1Dfile.1D',
                                  use_ext=False),
        oned_matrix_save=fname_presuffix(thresholded_filename,
                                         suffix='Vr.aff12.1D',
                                         use_ext=False))

    # Apply the registration to the whole head
    out_allineate = allineate(in_file=func_filename,
                              master=func_filename,
                              in_matrix=out_volreg.outputs.oned_matrix_save,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='Av'),
                              environ=environ)

    # 3dAllineate removes the obliquity. This is not a good way to readd it as
    # removes motion correction info in the header if it were an AFNI file...as
    # it happens it's NIfTI which does not store that so irrelevant!
    out_copy_geom = copy_geom(dest_file=out_allineate.outputs.out_file,
                              in_file=out_volreg.outputs.out_file)

    allineated_filename = out_copy_geom.outputs.out_file

    # Create a (hopefully) nice mean image for use in the registration
    out_tstat = tstat(in_file=allineated_filename,
                      args='-mean',
                      outputtype='NIFTI_GZ',
                      environ=environ)

    # Update outputs
    output_files.extend([
        thresholded_filename, out_volreg.outputs.oned_matrix_save,
        out_volreg.outputs.out_file, out_volreg.outputs.md1d_file,
        allineated_filename, out_tstat.outputs.out_file
    ])

    ###########################################
    # Corret anat and func for intensity bias #
    ###########################################
    # Correct the functional average for intensities bias
    out_bias_correct = bias_correct(input_image=out_tstat.outputs.out_file)
    unbiased_func_filename = out_bias_correct.outputs.output_image

    # Bias correct the antomical image
    out_unifize = unifize(in_file=anat_filename,
                          outputtype='NIFTI_GZ',
                          environ=environ)
    unbiased_anat_filename = out_unifize.outputs.out_file

    # Update outputs
    output_files.extend([unbiased_func_filename, unbiased_anat_filename])

    #############################################
    # Rigid-body registration anat -> mean func #
    #############################################
    if prior_rigid_body_registration:
        # Mask the mean functional volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_func_filename)
        out_compute_mask_func = compute_mask(
            in_file=unbiased_func_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_func = calc(in_file_a=unbiased_func_filename,
                             in_file_b=out_compute_mask_func.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Mask the anatomical volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_anat_filename)
        out_compute_mask_anat = compute_mask(
            in_file=unbiased_anat_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_anat = calc(in_file_a=unbiased_anat_filename,
                             in_file_b=out_compute_mask_anat.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Compute the transformation from functional to anatomical brain
        # XXX: why in this sense
        out_allineate = allineate2(
            in_file=out_cacl_func.outputs.out_file,
            reference=out_cacl_anat.outputs.out_file,
            out_matrix=fname_presuffix(out_cacl_func.outputs.out_file,
                                       suffix='_shr.aff12.1D',
                                       use_ext=False),
            center_of_mass='',
            warp_type='shift_rotate',
            out_file=fname_presuffix(out_cacl_func.outputs.out_file,
                                     suffix='_shr'),
            environ=environ)
        rigid_transform_file = out_allineate.outputs.out_matrix
        output_files.extend([
            out_compute_mask_func.outputs.out_file,
            out_cacl_func.outputs.out_file,
            out_compute_mask_anat.outputs.out_file,
            out_cacl_anat.outputs.out_file, rigid_transform_file,
            out_allineate.outputs.out_file
        ])

        # apply the inverse transform to register the anatomical to the func
        catmatvec_out_file = fname_presuffix(rigid_transform_file,
                                             suffix='INV')
        out_catmatvec = catmatvec(in_file=[(rigid_transform_file, 'I')],
                                  oneline=True,
                                  out_file=catmatvec_out_file)
        output_files.append(out_catmatvec.outputs.out_file)
        out_allineate = allineate(in_file=unbiased_anat_filename,
                                  master=unbiased_func_filename,
                                  in_matrix=out_catmatvec.outputs.out_file,
                                  out_file=fname_presuffix(
                                      unbiased_anat_filename,
                                      suffix='_shr_in_func_space'),
                                  environ=environ)
        allineated_anat_filename = out_allineate.outputs.out_file
        output_files.append(allineated_anat_filename)
    else:
        allineated_anat_filename = unbiased_anat_filename

    ############################################
    # Nonlinear registration anat -> mean func #
    ############################################
    # 3dWarp doesn't put the obliquity in the header, so do it manually
    # This step generates one file per slice and per time point, so we are
    # making sure they are removed at the end
    out_warp = warp(in_file=allineated_anat_filename,
                    oblique_parent=unbiased_func_filename,
                    interp='quintic',
                    gridset=unbiased_func_filename,
                    outputtype='NIFTI_GZ',
                    verbose=True,
                    environ=environ)
    registered_anat_filename = out_warp.outputs.out_file
    registered_anat_oblique_filename = fix_obliquity(registered_anat_filename,
                                                     unbiased_func_filename,
                                                     verbose=verbose)

    # Concatenate all the anat to func tranforms
    mat_filename = fname_presuffix(registered_anat_filename,
                                   suffix='_warp.mat',
                                   use_ext=False)
    # XXX Handle this correctly according to caching
    if not os.path.isfile(mat_filename):
        np.savetxt(mat_filename, [out_warp.runtime.stdout], fmt='%s')
        output_files.append(mat_filename)

    transform_filename = fname_presuffix(registered_anat_filename,
                                         suffix='_anat_to_func.aff12.1D',
                                         use_ext=False)
    if prior_rigid_body_registration:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE'),
                               (rigid_transform_file, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)
    else:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)

    ##################################################
    # Per-slice non-linear registration func -> anat #
    ##################################################
    # Slice anatomical image
    anat_img = nibabel.load(registered_anat_oblique_filename)
    anat_n_slices = anat_img.header.get_data_shape()[2]
    sliced_registered_anat_filenames = []
    for slice_n in range(anat_n_slices):
        out_slicer = slicer(in_file=registered_anat_oblique_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(
                                registered_anat_oblique_filename,
                                suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      registered_anat_oblique_filename,
                                      verbose=verbose)
        sliced_registered_anat_filenames.append(oblique_slice)

    # Slice mean functional
    sliced_bias_corrected_filenames = []
    img = nibabel.load(func_filename)
    n_slices = img.header.get_data_shape()[2]
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=unbiased_func_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(unbiased_func_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      unbiased_func_filename,
                                      verbose=verbose)
        sliced_bias_corrected_filenames.append(oblique_slice)

    # Below line is to deal with slices where there is no signal (for example
    # rostral end of some anatomicals)

    # The inverse warp frequently fails, Resampling can help it work better
    # XXX why specifically .1 in voxel_size ?
    voxel_size_z = anat_img.header.get_zooms()[2]
    resampled_registered_anat_filenames = []
    for sliced_registered_anat_filename in sliced_registered_anat_filenames:
        out_resample = resample(in_file=sliced_registered_anat_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_registered_anat_filenames.append(
            out_resample.outputs.out_file)

    resampled_bias_corrected_filenames = []
    for sliced_bias_corrected_filename in sliced_bias_corrected_filenames:
        out_resample = resample(in_file=sliced_bias_corrected_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_bias_corrected_filenames.append(
            out_resample.outputs.out_file)

    # single slice non-linear functional to anatomical registration
    warped_slices = []
    warp_filenames = []
    for (resampled_bias_corrected_filename,
         resampled_registered_anat_filename) in zip(
             resampled_bias_corrected_filenames,
             resampled_registered_anat_filenames):
        warped_slice = fname_presuffix(resampled_bias_corrected_filename,
                                       suffix='_qw')
        out_qwarp = qwarp(
            in_file=resampled_bias_corrected_filename,
            base_file=resampled_registered_anat_filename,
            iwarp=True,  # XXX: is this necessary
            noneg=True,
            blur=[0],
            nmi=True,
            noXdis=True,
            allineate=True,
            allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 '
            '-parfix 4 0 -parfix 5 0 -parfix 6 0 '
            '-parfix 7 0 -parfix 9 0 '
            '-parfix 10 0 -parfix 12 0',
            out_file=warped_slice,
            environ=environ)
        warped_slices.append(out_qwarp.outputs.warped_source)
        warp_filenames.append(out_qwarp.outputs.source_warp)
        output_files.append(out_qwarp.outputs.base_warp)
        # There are files geenrated by the allineate option
        output_files.extend([
            fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin'),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.nii',
                            use_ext=False),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.aff12.1D',
                            use_ext=False)
        ])

    # Resample the mean volume back to the initial resolution,
    voxel_size = nibabel.load(unbiased_func_filename).header.get_zooms()
    resampled_warped_slices = []
    for warped_slice in warped_slices:
        out_resample = resample(in_file=warped_slice,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_warped_slices.append(out_resample.outputs.out_file)

    # fix the obliquity
    resampled_warped_slices_oblique = []
    for (sliced_registered_anat_filename,
         resampled_warped_slice) in zip(sliced_registered_anat_filenames,
                                        resampled_warped_slices):
        oblique_slice = fix_obliquity(resampled_warped_slice,
                                      sliced_registered_anat_filename,
                                      verbose=verbose)
        resampled_warped_slices_oblique.append(oblique_slice)

    # slice functional
    sliced_func_filenames = []
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=allineated_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(allineated_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      allineated_filename,
                                      verbose=verbose)
        sliced_func_filenames.append(oblique_slice)

    # Apply the precomputed warp slice by slice
    warped_func_slices = []
    for (sliced_func_filename, warp_filename) in zip(sliced_func_filenames,
                                                     warp_filenames):
        out_warp_apply = warp_apply(in_file=sliced_func_filename,
                                    master=sliced_func_filename,
                                    warp=warp_filename,
                                    out_file=fname_presuffix(
                                        sliced_func_filename, suffix='_qw'),
                                    environ=environ)
        warped_func_slices.append(out_warp_apply.outputs.out_file)

    # Finally, merge all slices !
    out_merge_func = merge(in_files=warped_func_slices,
                           outputtype='NIFTI_GZ',
                           environ=environ)

    # Fix the obliquity
    merged_oblique = fix_obliquity(out_merge_func.outputs.out_file,
                                   allineated_filename,
                                   verbose=verbose)

    # Update the fmri data
    setattr(session_data, "coreg_func_", merged_oblique)
    setattr(session_data, "coreg_anat_", registered_anat_oblique_filename)
    setattr(session_data, "coreg_transform_", transform_filename)
    os.chdir(current_dir)

    # Collect the outputs
    output_files.extend(sliced_registered_anat_filenames +
                        sliced_bias_corrected_filenames +
                        resampled_registered_anat_filenames +
                        resampled_bias_corrected_filenames + warped_slices +
                        warp_filenames + resampled_warped_slices_oblique +
                        sliced_func_filenames + warped_func_slices)
    if not caching:
        for out_file in output_files:
            if os.path.isfile(out_file):
                os.remove(out_file)
コード例 #6
0
ファイル: func.py プロジェクト: sammba-mri/sammba-mri
def _realign(func_filename,
             write_dir,
             caching=False,
             terminal_output='allatonce',
             environ=None):
    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        clip_level = memory.cache(afni.ClipLevel)
        threshold = memory.cache(fsl.Threshold)
        volreg = memory.cache(afni.Volreg)
        allineate = memory.cache(afni.Allineate)
        copy = memory.cache(afni.Copy)
        copy_geom = memory.cache(fsl.CopyGeom)
        tstat = memory.cache(afni.TStat)
        for step in [threshold, volreg, allineate, tstat, copy, copy_geom]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        clip_level = afni.ClipLevel().run
        threshold = fsl.Threshold(terminal_output=terminal_output).run
        volreg = afni.Volreg(terminal_output=terminal_output).run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        copy = afni.Copy(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        tstat = afni.TStat(terminal_output=terminal_output).run

    out_clip_level = clip_level(in_file=func_filename)

    out_threshold = threshold(in_file=func_filename,
                              thresh=out_clip_level.outputs.clip_val,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='_thresholded',
                                                       newpath=write_dir))
    thresholded_filename = out_threshold.outputs.out_file

    out_volreg = volreg(  # XXX dfile not saved
        in_file=thresholded_filename,
        out_file=fname_presuffix(thresholded_filename,
                                 suffix='_volreg',
                                 newpath=write_dir),
        environ=environ,
        oned_file=fname_presuffix(thresholded_filename,
                                  suffix='_volreg.1Dfile.1D',
                                  use_ext=False,
                                  newpath=write_dir),
        oned_matrix_save=fname_presuffix(thresholded_filename,
                                         suffix='_volreg.aff12.1D',
                                         use_ext=False,
                                         newpath=write_dir))

    # Apply the registration to the whole head
    out_allineate = allineate(in_file=func_filename,
                              master=func_filename,
                              in_matrix=out_volreg.outputs.oned_matrix_save,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='_volreg',
                                                       newpath=write_dir),
                              environ=environ)

    # 3dAllineate removes the obliquity. This is not a good way to readd it as
    # removes motion correction info in the header if it were an AFNI file...as
    # it happens it's NIfTI which does not store that so irrelevant!
    out_copy = copy(in_file=out_allineate.outputs.out_file,
                    out_file=fname_presuffix(out_allineate.outputs.out_file,
                                             suffix='_oblique',
                                             newpath=write_dir),
                    environ=environ)
    out_copy_geom = copy_geom(dest_file=out_copy.outputs.out_file,
                              in_file=out_volreg.outputs.out_file)

    oblique_allineated_filename = out_copy_geom.outputs.out_file

    # Create a (hopefully) nice mean image for use in the registration
    out_tstat = tstat(in_file=oblique_allineated_filename,
                      args='-mean',
                      out_file=fname_presuffix(oblique_allineated_filename,
                                               suffix='_tstat',
                                               newpath=write_dir),
                      environ=environ)

    # Remove intermediate outputs
    if not caching:
        for output_file in [
                thresholded_filename, out_volreg.outputs.oned_matrix_save,
                out_volreg.outputs.out_file, out_volreg.outputs.md1d_file,
                out_allineate.outputs.out_file
        ]:
            os.remove(output_file)
    return (oblique_allineated_filename, out_tstat.outputs.out_file,
            out_volreg.outputs.oned_file)
コード例 #7
0
def create_pipeline_graph(pipeline_name, graph_file,
                          graph_kind='hierarchical'):
    """Creates pipeline graph for a given piepline.

    Parameters
    ----------
    pipeline_name : one of {'anat_to_common_rigid', 'anat_to_common_affine',
        'anat_to_common_nonlinear'}
        Pipeline name.

    graph_file : str.
        Path to save the graph image to.

    graph_kind : one of {'orig', 'hierarchical', 'flat', 'exec', 'colored'}, optional.
        The kind of the graph, passed to
        nipype.pipeline.workflows.Workflow().write_graph
    """
    pipeline_names = ['anats_to_common_rigid', 'anats_to_common_affine',
                      'anats_to_common_nonlinear']
    if pipeline_name not in pipeline_names:
        raise NotImplementedError(
            'Pipeline name must be one of {0}, you entered {1}'.format(
                pipeline_names, pipeline_name))
    graph_kinds = ['orig', 'hierarchical', 'flat', 'exec', 'colored']
    if graph_kind not in graph_kinds:
        raise ValueError(
            'Graph kind must be one of {0}, you entered {1}'.format(
                graph_kinds, graph_kind))

    workflow = pe.Workflow(name=pipeline_name)

    #######################################################################
    # Specify rigid body registration pipeline steps
    unifize = pe.Node(interface=afni.Unifize(), name='bias_correct')
    clip_level = pe.Node(interface=afni.ClipLevel(),
                         name='compute_mask_threshold')
    compute_mask = pe.Node(interface=interfaces.MathMorphoMask(),
                           name='compute_brain_mask')
    apply_mask = pe.Node(interface=afni.Calc(), name='apply_brain_mask')
    center_mass = pe.Node(interface=afni.CenterMass(),
                          name='compute_and_set_cm_in_header')
    refit_copy = pe.Node(afni.Refit(), name='copy_cm_in_header')
    tcat1 = pe.Node(afni.TCat(), name='concatenate_across_individuals1')
    tstat1 = pe.Node(afni.TStat(), name='compute_average1')
    undump = pe.Node(afni.Undump(), name='create_empty_template')
    refit_set = pe.Node(afni.Refit(), name='set_cm_in_header')
    resample1 = pe.Node(afni.Resample(), name='resample1')
    resample2 = pe.Node(afni.Resample(), name='resample2')
    shift_rotate = pe.Node(afni.Allineate(), name='shift_rotate')
    apply_allineate1 = pe.Node(afni.Allineate(), name='apply_transform1')
    tcat2 = pe.Node(afni.TCat(), name='concatenate_across_individuals2')
    tstat2 = pe.Node(afni.TStat(), name='compute_average2')
    tcat3 = pe.Node(afni.TCat(), name='concatenate_across_individuals3')
    tstat3 = pe.Node(afni.TStat(), name='compute_average3')

    workflow.add_nodes([unifize, clip_level, compute_mask, apply_mask,
                        center_mass,
                        refit_copy, tcat1, tstat1, undump, refit_set,
                        resample1, resample2, shift_rotate, apply_allineate1,
                        tcat2, tstat2, tcat3, tstat3])

    #######################################################################
    # and connections
    workflow.connect(unifize, 'out_file', clip_level, 'in_file')
    workflow.connect(clip_level, 'clip_val',
                     compute_mask, 'intensity_threshold')
    workflow.connect(unifize, 'out_file', compute_mask, 'in_file')
    workflow.connect(compute_mask, 'out_file', apply_mask, 'in_file_a')
    workflow.connect(unifize, 'out_file', apply_mask, 'in_file_b')
    workflow.connect(apply_mask, 'out_file',
                     center_mass, 'in_file')
    workflow.connect(unifize, 'out_file', refit_copy, 'in_file')
    workflow.connect(center_mass, 'out_file',
                     refit_copy, 'duporigin_file')
    workflow.connect(center_mass, 'out_file', tcat1, 'in_files')
    workflow.connect(tcat1, 'out_file', tstat1, 'in_file')
    workflow.connect(tstat1, 'out_file', undump, 'in_file')
    workflow.connect(undump, 'out_file', refit_set, 'in_file')
    workflow.connect(refit_set, 'out_file', resample1, 'master')
    workflow.connect(refit_copy, 'out_file', resample1, 'in_file')
    workflow.connect(refit_set, 'out_file', resample2, 'master')
    workflow.connect(center_mass, 'out_file', resample2, 'in_file')
    workflow.connect(resample2, 'out_file', tcat2, 'in_files')
    workflow.connect(tcat2, 'out_file', tstat2, 'in_file')
    workflow.connect(tstat2, 'out_file', shift_rotate, 'reference')
    workflow.connect(resample2, 'out_file', shift_rotate, 'in_file')
    workflow.connect(tstat2, 'out_file', apply_allineate1, 'master')
    workflow.connect(resample1, 'out_file',
                     apply_allineate1, 'in_file')
    workflow.connect(shift_rotate, 'out_matrix',
                     apply_allineate1, 'in_matrix')
    workflow.connect(apply_allineate1, 'out_file', tcat3, 'in_files')
    workflow.connect(tcat3, 'out_file', tstat3, 'in_file')
    if pipeline_name in ['anats_to_common_affine',
                         'anat_to_common_nonlinear']:
        mask = pe.Node(afni.MaskTool(), name='generate_count_mask')
        allineate = pe.Node(afni.Allineate(), name='allineate')
        catmatvec = pe.Node(afni.CatMatvec(), name='concatenate_transforms')
        apply_allineate2 = pe.Node(afni.Allineate(), name='apply_transform2')
        tcat3 = pe.Node(
            afni.TCat(), name='concatenate_across_individuals4')
        tstat3 = pe.Node(afni.TStat(), name='compute_average4')

        workflow.add_nodes([mask, allineate, catmatvec, apply_allineate2,
                            tcat3, tstat3])

        workflow.connect(tcat2, 'out_file', mask, 'in_file')
        workflow.connect(mask, 'out_file', allineate, 'weight')
        workflow.connect(apply_allineate1, 'out_file',
                         allineate, 'in_file')
        workflow.connect(allineate, 'out_matrix',
                         catmatvec, 'in_file')
        #XXX how can we enter multiple files ? 
        workflow.connect(catmatvec, 'out_file',
                         apply_allineate2, 'in_matrix')
        workflow.connect(resample1, 'out_file',
                         apply_allineate2, 'in_file')
        workflow.connect(apply_allineate2, 'out_file', tcat3, 'in_files')
        workflow.connect(tcat3, 'out_file', tstat3, 'in_file')

    if pipeline_name == 'anats_to_common_nonlinear':
        pass

    graph_file_root, graph_file_ext = os.path.splitext(graph_file)
    if graph_file_ext:
        _ = workflow.write_graph(graph2use=graph_kind,
                                 format=graph_file_ext[1:],
                                 dotfilename=graph_file_root)
    else:
        _ = workflow.write_graph(graph2use=graph_kind,
                                 dotfilename=graph_file_root)
コード例 #8
0
def hmc_afni(name='fMRI_HMC_afni',
             st_correct=False,
             despike=False,
             deoblique=False,
             start_idx=None,
             stop_idx=None):
    """
    A :abbr:`HMC (head motion correction)` workflow for
    functional scans

    .. workflow::

      from mriqc.workflows.functional import hmc_afni
      wf = hmc_afni()

    """

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_fd']),
                         name='outputnode')

    if (start_idx is not None) or (stop_idx is not None):
        drop_trs = pe.Node(afni.Calc(expr='a', outputtype='NIFTI_GZ'),
                           name='drop_trs')
        workflow.connect([
            (inputnode, drop_trs, [('in_file', 'in_file_a'),
                                   ('start_idx', 'start_idx'),
                                   ('stop_idx', 'stop_idx')]),
        ])
    else:
        drop_trs = pe.Node(niu.IdentityInterface(fields=['out_file']),
                           name='drop_trs')
        workflow.connect([
            (inputnode, drop_trs, [('in_file', 'out_file')]),
        ])

    get_mean_RPI = pe.Node(afni.TStat(options='-mean', outputtype='NIFTI_GZ'),
                           name='get_mean_RPI')

    # calculate hmc parameters
    hmc = pe.Node(afni.Volreg(args='-Fourier -twopass',
                              zpad=4,
                              outputtype='NIFTI_GZ'),
                  name='motion_correct')

    get_mean_motion = get_mean_RPI.clone('get_mean_motion')
    hmc_A = hmc.clone('motion_correct_A')
    hmc_A.inputs.md1d_file = 'max_displacement.1D'

    # Compute the frame-wise displacement
    fdnode = pe.Node(nac.FramewiseDisplacement(normalize=False,
                                               parameter_source="AFNI"),
                     name='ComputeFD')

    workflow.connect([
        (inputnode, fdnode, [('fd_radius', 'radius')]),
        (get_mean_RPI, hmc, [('out_file', 'basefile')]),
        (hmc, get_mean_motion, [('out_file', 'in_file')]),
        (get_mean_motion, hmc_A, [('out_file', 'basefile')]),
        (hmc_A, outputnode, [('out_file', 'out_file')]),
        (hmc_A, fdnode, [('oned_file', 'in_file')]),
        (fdnode, outputnode, [('out_file', 'out_fd')]),
    ])

    # Slice timing correction, despiking, and deoblique

    st_corr = pe.Node(afni.TShift(outputtype='NIFTI_GZ'), name='TimeShifts')

    deoblique_node = pe.Node(afni.Refit(deoblique=True), name='deoblique')

    despike_node = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike')

    if st_correct and despike and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
            (deoblique_node, hmc_A, [('out_file', 'in_file')]),
        ])

    elif st_correct and despike:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, get_mean_RPI, [('out_file', 'in_file')]),
            (despike_node, hmc, [('out_file', 'in_file')]),
            (despike_node, hmc_A, [('out_file', 'in_file')]),
        ])

    elif st_correct and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
            (deoblique_node, hmc_A, [('out_file', 'in_file')]),
        ])

    elif st_correct:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, get_mean_RPI, [('out_file', 'in_file')]),
            (st_corr, hmc, [('out_file', 'in_file')]),
            (st_corr, hmc_A, [('out_file', 'in_file')]),
        ])

    elif despike and deoblique:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
            (deoblique_node, hmc_A, [('out_file', 'in_file')]),
        ])

    elif despike:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, get_mean_RPI, [('out_file', 'in_file')]),
            (despike_node, hmc, [('out_file', 'in_file')]),
            (despike_node, hmc_A, [('out_file', 'in_file')]),
        ])

    elif deoblique:

        workflow.connect([
            (drop_trs, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
            (deoblique_node, hmc_A, [('out_file', 'in_file')]),
        ])

    else:

        workflow.connect([
            (drop_trs, get_mean_RPI, [('out_file', 'in_file')]),
            (drop_trs, hmc, [('out_file', 'in_file')]),
            (drop_trs, hmc_A, [('out_file', 'in_file')]),
        ])

    return workflow
コード例 #9
0
ファイル: qc.py プロジェクト: hillhillll/C-PAC
def create_qc_snr(wf_name='qc_snr'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=[
        'functional_preprocessed', 'functional_brain_mask',
        'functional_to_anat_linear_xfm', 'anatomical_brain',
        'mean_functional_in_anat'
    ]),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=[
        'snr_axial_image', 'snr_sagittal_image', 'snr_histogram_image',
        'snr_mean'
    ]),
                          name='outputspec')

    std_dev = pe.Node(afni.TStat(args='-stdev'), name='std_dev')

    std_dev.inputs.outputtype = 'NIFTI_GZ'
    wf.connect(input_node, 'functional_preprocessed', std_dev, 'in_file')
    wf.connect(input_node, 'functional_brain_mask', std_dev, 'mask')

    std_dev_anat = pe.Node(fsl.ApplyWarp(interp='trilinear'),
                           name='std_dev_anat')
    wf.connect(input_node, 'functional_to_anat_linear_xfm', std_dev_anat,
               'premat')
    wf.connect(std_dev, 'out_file', std_dev_anat, 'in_file')
    wf.connect(input_node, 'anatomical_brain', std_dev_anat, 'ref_file')

    snr = pe.Node(afni.Calc(expr='b/a'), name='snr')
    snr.inputs.outputtype = 'NIFTI_GZ'
    wf.connect(input_node, 'mean_functional_in_anat', snr, 'in_file_b')
    wf.connect(std_dev_anat, 'out_file', snr, 'in_file_a')

    snr_val = pe.Node(Function(input_names=['measure_file'],
                               output_names=['snr_storefl'],
                               function=cal_snr_val,
                               as_module=True),
                      name='snr_val')

    wf.connect(snr, 'out_file', snr_val, 'measure_file')

    hist_snr = pe.Node(Function(input_names=['measure_file', 'measure'],
                                output_names=['hist_path'],
                                function=gen_histogram,
                                as_module=True),
                       name='hist_snr')

    hist_snr.inputs.measure = 'snr'

    wf.connect(snr, 'out_file', hist_snr, 'measure_file')

    snr_drop_percent = pe.Node(Function(
        input_names=['measure_file', 'percent'],
        output_names=['modified_measure_file'],
        function=drop_percent,
        as_module=True),
                               name='dp_snr')

    snr_drop_percent.inputs.percent = 99

    wf.connect(snr, 'out_file', snr_drop_percent, 'measure_file')

    montage_snr = create_montage('montage_snr', 'red_to_blue', 'snr')

    wf.connect(snr_drop_percent, 'modified_measure_file', montage_snr,
               'inputspec.overlay')
    wf.connect(input_node, 'anatomical_brain', montage_snr,
               'inputspec.underlay')

    wf.connect(montage_snr, 'outputspec.axial_png', output_node,
               'snr_axial_image')
    wf.connect(montage_snr, 'outputspec.sagittal_png', output_node,
               'snr_sagittal_image')
    wf.connect(hist_snr, 'hist_path', output_node, 'snr_histogram_image')
    wf.connect(snr_val, 'snr_storefl', output_node, 'snr_mean')

    return wf
コード例 #10
0
ファイル: functional.py プロジェクト: jdkent/mriqc
def fmri_qc_workflow(dataset, settings, name='funcMRIQC'):
    """
    The fMRI qc workflow

    .. workflow::

      import os.path as op
      from mriqc.workflows.functional import fmri_qc_workflow
      datadir = op.abspath('data')
      wf = fmri_qc_workflow([op.join(datadir, 'sub-001/func/sub-001_task-rest_bold.nii.gz')],
                            settings={'bids_dir': datadir,
                                      'output_dir': op.abspath('out')})


    """

    workflow = pe.Workflow(name=name)

    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    # Define workflow, inputs and outputs
    # 0. Get data, put it in RAS orientation
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    WFLOGGER.info(
        'Building fMRI QC workflow, datasets list: %s',
        sorted([d.replace(settings['bids_dir'] + '/', '') for d in dataset]))
    inputnode.iterables = [('in_file', dataset)]

    meta = pe.Node(ReadSidecarJSON(), name='metadata')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['qc', 'mosaic', 'out_group', 'out_dvars', 'out_fd']),
                         name='outputnode')

    reorient_and_discard = pe.Node(niu.Function(
        input_names=['in_file', 'float32'],
        output_names=['exclude_index', 'out_file'],
        function=reorient_and_discard_non_steady),
                                   name='reorient_and_discard')

    reorient_and_discard.inputs.float32 = settings.get("float32",
                                                       DEFAULTS['float32'])
    reorient_and_discard.interface.estimated_memory_gb = 4.0 * biggest_file_gb

    # Workflow --------------------------------------------------------

    # 1. HMC: head motion correct
    if settings.get('hmc_fsl', False):
        hmcwf = hmc_mcflirt(settings)
    else:
        hmcwf = hmc_afni(settings,
                         st_correct=settings.get('correct_slice_timing',
                                                 False),
                         despike=settings.get('despike', False),
                         deoblique=settings.get('deoblique', False),
                         start_idx=settings.get('start_idx', None),
                         stop_idx=settings.get('stop_idx', None))

    # Set HMC settings
    hmcwf.inputs.inputnode.fd_radius = settings.get('fd_radius',
                                                    DEFAULT_FD_RADIUS)

    mean = pe.Node(
        afni.TStat(  # 2. Compute mean fmri
            options='-mean', outputtype='NIFTI_GZ'),
        name='mean')
    mean.interface.estimated_memory_gb = biggest_file_gb * 1.5
    skullstrip_epi = fmri_bmsk_workflow(use_bet=True)

    # EPI to MNI registration
    ema = epi_mni_align(settings)

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(nac.TSNR(), name='compute_tsnr')
    tsnr.interface.estimated_memory_gb = biggest_file_gb * 4.5

    # 7. Compute IQMs
    iqmswf = compute_iqms(settings)
    # Reports
    repwf = individual_reports(settings)
    # Upload metrics
    upldwf = upload_wf(settings)

    workflow.connect([
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, reorient_and_discard, [('in_file', 'in_file')]),
        (reorient_and_discard, hmcwf, [('out_file', 'inputnode.in_file')]),
        (mean, skullstrip_epi, [('out_file', 'inputnode.in_file')]),
        (hmcwf, mean, [('outputnode.out_file', 'in_file')]),
        (hmcwf, tsnr, [('outputnode.out_file', 'in_file')]),
        (mean, ema, [('out_file', 'inputnode.epi_mean')]),
        (skullstrip_epi, ema, [('outputnode.out_file', 'inputnode.epi_mask')]),
        (meta, iqmswf, [('subject_id', 'inputnode.subject_id'),
                        ('session_id', 'inputnode.session_id'),
                        ('task_id', 'inputnode.task_id'),
                        ('acq_id', 'inputnode.acq_id'),
                        ('rec_id', 'inputnode.rec_id'),
                        ('run_id', 'inputnode.run_id'),
                        ('out_dict', 'inputnode.metadata')]),
        (reorient_and_discard, iqmswf, [('out_file', 'inputnode.orig')]),
        (mean, iqmswf, [('out_file', 'inputnode.epi_mean')]),
        (hmcwf, iqmswf, [('outputnode.out_file', 'inputnode.hmc_epi'),
                         ('outputnode.out_fd', 'inputnode.hmc_fd')]),
        (skullstrip_epi, iqmswf, [('outputnode.out_file',
                                   'inputnode.brainmask')]),
        (tsnr, iqmswf, [('tsnr_file', 'inputnode.in_tsnr')]),
        (reorient_and_discard, repwf, [('out_file', 'inputnode.orig')]),
        (mean, repwf, [('out_file', 'inputnode.epi_mean')]),
        (tsnr, repwf, [('stddev_file', 'inputnode.in_stddev')]),
        (skullstrip_epi, repwf, [('outputnode.out_file', 'inputnode.brainmask')
                                 ]),
        (hmcwf, repwf, [('outputnode.out_fd', 'inputnode.hmc_fd'),
                        ('outputnode.out_file', 'inputnode.hmc_epi')]),
        (ema, repwf, [('outputnode.epi_parc', 'inputnode.epi_parc'),
                      ('outputnode.report', 'inputnode.mni_report')]),
        (reorient_and_discard, repwf, [('exclude_index',
                                        'inputnode.exclude_index')]),
        (iqmswf, repwf, [('outputnode.out_file', 'inputnode.in_iqms'),
                         ('outputnode.out_dvars', 'inputnode.in_dvars'),
                         ('outputnode.outliers', 'inputnode.outliers')]),
        (iqmswf, upldwf, [('outputnode.out_file', 'inputnode.in_iqms')]),
        (hmcwf, outputnode, [('outputnode.out_fd', 'out_fd')]),
    ])

    if settings.get('fft_spikes_detector', False):
        workflow.connect([
            (iqmswf, repwf, [('outputnode.out_spikes', 'inputnode.in_spikes'),
                             ('outputnode.out_fft', 'inputnode.in_fft')]),
        ])

    if settings.get('ica', False):
        melodic = pe.Node(nws.MELODICRPT(no_bet=True,
                                         no_mask=True,
                                         no_mm=True,
                                         generate_report=True),
                          name="ICA")
        melodic.interface.estimated_memory_gb = biggest_file_gb * 5
        workflow.connect([
            (reorient_and_discard, melodic, [('out_file', 'in_files')]),
            (skullstrip_epi, melodic, [('outputnode.out_file', 'report_mask')
                                       ]),
            (melodic, repwf, [('out_report', 'inputnode.ica_report')])
        ])

    return workflow
コード例 #11
0
def init_fsl_hmc_wf(scan_groups,
                    b0_threshold,
                    impute_slice_threshold,
                    fmap_demean,
                    fmap_bspline,
                    eddy_config,
                    mem_gb=3,
                    omp_nthreads=1,
                    dwi_metadata=None,
                    slice_quality='outlier_n_sqr_stdev_map',
                    sloppy=False,
                    name="fsl_hmc_wf"):
    """
    This workflow controls the dwi preprocessing stages using FSL tools.

    I couldn't get this to work reliably unless everything was oriented in LAS+ before going to
    TOPUP and eddy. For this reason, if TOPUP is going to be used (for an epi fieldmap or an
    RPE series) or there is no fieldmap correction, operations occurring before eddy are done in
    LAS+. The fieldcoefs are applied during eddy's run and the corrected series comes out.
    This is finally converted to LPS+ and sent to the rest of the pipeline.

    If a GRE fieldmap is available, the correction is applied to eddy's outputs after they have
    been converted back to LPS+.

    Finally, if SyN is chosen, it is applied to the LPS+ converted, eddy-resampled data.


    **Parameters**

        scan_groups: dict
            dictionary with fieldmaps and warp space information for the dwis
        impute_slice_threshold: float
            threshold for a slice to be replaced with imputed values. Overrides the
            parameter in ``eddy_config`` if set to a number > 0.
        do_topup: bool
            Should topup be performed before eddy? requires an rpe series or an
            rpe_b0.
        eddy_config: str
            Path to a JSON file containing settings for the call to ``eddy``.


    **Inputs**

        dwi_files: list
            List of single-volume files across all DWI series
        b0_indices: list
            Indexes into ``dwi_files`` that correspond to b=0 volumes
        bvecs: list
            List of paths to single-line bvec files
        bvals: list
            List of paths to single-line bval files
        b0_images: list
            List of single b=0 volumes
        original_files: list
            List of the files from which each DWI volume came.


    """

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['dwi_files', 'b0_indices', 'bvec_files', 'bval_files', 'b0_images',
                    'original_files', 't1_brain', 't1_2_mni_reverse_transform']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["b0_template", "b0_template_mask", "pre_sdc_template",
                    "hmc_optimization_data", "sdc_method", 'slice_quality', 'motion_params',
                    "cnr_map", "bvec_files_to_transform", "dwi_files_to_transform", "b0_indices",
                    "to_dwi_ref_affines", "to_dwi_ref_warps", "rpe_b0_info"]),
        name='outputnode')

    workflow = Workflow(name=name)
    gather_inputs = pe.Node(GatherEddyInputs(), name="gather_inputs")
    if eddy_config is None:
        # load from the defaults
        eddy_cfg_file = pkgr_fn('qsiprep.data', 'eddy_params.json')
    else:
        eddy_cfg_file = eddy_config

    with open(eddy_cfg_file, "r") as f:
        eddy_args = json.load(f)

    # Use run in parallel if possible
    LOGGER.info("Using %d threads in eddy", omp_nthreads)
    eddy_args["num_threads"] = omp_nthreads
    eddy = pe.Node(ExtendedEddy(**eddy_args), name="eddy")
    # These should be in LAS+
    dwi_merge = pe.Node(MergeDWIs(), name="dwi_merge")
    spm_motion = pe.Node(Eddy2SPMMotion(), name="spm_motion")
    # Convert eddy outputs back to LPS+, split them
    pre_topup_lps = pe.Node(ConformDwi(orientation="LPS"), name='pre_topup_lps')
    pre_topup_enhance = init_enhance_and_skullstrip_dwi_wf(name='pre_topup_enhance')
    back_to_lps = pe.Node(ConformDwi(orientation="LPS"), name='back_to_lps')
    cnr_lps = pe.Node(ConformDwi(orientation="LPS"), name='cnr_lps')
    split_eddy_lps = pe.Node(SplitDWIs(b0_threshold=b0_threshold), name="split_eddy_lps")
    mean_b0_lps = pe.Node(ants.AverageImages(dimension=3, normalize=True), name='mean_b0_lps')
    lps_b0_enhance = init_enhance_and_skullstrip_dwi_wf(name='lps_b0_enhance')

    workflow.connect([
        # These images and gradients should be in LAS+
        (inputnode, gather_inputs, [
            ('dwi_files', 'dwi_files'),
            ('bval_files', 'bval_files'),
            ('bvec_files', 'bvec_files'),
            ('b0_indices', 'b0_indices'),
            ('b0_images', 'b0_images'),
            ('original_files', 'original_files')]),
        # Re-concatenate
        (inputnode, dwi_merge, [
            ('dwi_files', 'dwi_files'),
            ('bval_files', 'bval_files'),
            ('bvec_files', 'bvec_files'),
            ('original_files', 'bids_dwi_files')]),
        (gather_inputs, eddy, [
            ('eddy_indices', 'in_index'),
            ('eddy_acqp', 'in_acqp')]),
        (dwi_merge, eddy, [
            ('out_dwi', 'in_file'),
            ('out_bval', 'in_bval'),
            ('out_bvec', 'in_bvec')]),
        (gather_inputs, pre_topup_lps, [
            ('pre_topup_image', 'dwi_file')]),
        (gather_inputs, outputnode, [('forward_transforms', 'to_dwi_ref_affines')]),
        (pre_topup_lps, pre_topup_enhance, [
            ('dwi_file', 'inputnode.in_file')]),
        (pre_topup_enhance, outputnode, [
            ('outputnode.bias_corrected_file', 'pre_sdc_template')]),
        (eddy, back_to_lps, [
            ('out_corrected', 'dwi_file'),
            ('out_rotated_bvecs', 'bvec_file')]),
        (dwi_merge, back_to_lps, [('out_bval', 'bval_file')]),
        (back_to_lps, split_eddy_lps, [
            ('dwi_file', 'dwi_file'),
            ('bval_file', 'bval_file'),
            ('bvec_file', 'bvec_file')]),
        (dwi_merge, outputnode, [
            ('original_images', 'original_files')]),
        (split_eddy_lps, outputnode, [
            ('dwi_files', 'dwi_files_to_transform'),
            ('bvec_files', 'bvec_files_to_transform')]),
        (split_eddy_lps, mean_b0_lps, [('b0_images', 'images')]),
        (mean_b0_lps, lps_b0_enhance, [('output_average_image', 'inputnode.in_file')]),
        (eddy, cnr_lps, [('out_cnr_maps', 'dwi_file')]),
        (cnr_lps, outputnode, [('dwi_file', 'cnr_map')]),
        (eddy, outputnode, [
            (slice_quality, 'slice_quality'),
            (slice_quality, 'hmc_optimization_data')]),
        (eddy, spm_motion, [('out_parameter', 'eddy_motion')]),
        (spm_motion, outputnode, [('spm_motion_file', 'motion_params')])
    ])

    # Fieldmap correction to be done in LAS+: TOPUP for rpe series or epi fieldmap
    # If a topupref is provided, use it for TOPUP
    rpe_b0 = None
    fieldmap_type = scan_groups['fieldmap_info']['suffix']
    if fieldmap_type == 'epi':
        rpe_b0 = scan_groups['fieldmap_info']['epi']
    elif fieldmap_type == 'rpe_series':
        rpe_b0 = scan_groups['fieldmap_info']['rpe_series']
    using_topup = rpe_b0 is not None

    if using_topup:
        outputnode.inputs.sdc_method = "TOPUP"
        # Whether an rpe series (from dwi/) or an epi fmap (in fmap/) extract just the
        # b=0s for topup
        prepare_rpe_b0 = pe.Node(
            B0RPEFieldmap(b0_file=rpe_b0, orientation='LAS', output_3d_images=False),
            name="prepare_rpe_b0")
        topup = pe.Node(fsl.TOPUP(out_field="fieldmap_HZ.nii.gz"), name="topup")
        # Enhance and skullstrip the TOPUP output to get a mask for eddy
        unwarped_mean = pe.Node(afni.TStat(outputtype='NIFTI_GZ'), name='unwarped_mean')
        unwarped_enhance = init_enhance_and_skullstrip_dwi_wf(name='unwarped_enhance')

        workflow.connect([
            (prepare_rpe_b0, outputnode, [('fmap_info', 'inputnode.rpe_b0_info')]),
            (prepare_rpe_b0, gather_inputs, [('fmap_file', 'rpe_b0')]),
            (gather_inputs, topup, [
                ('topup_datain', 'encoding_file'),
                ('topup_imain', 'in_file'),
                ('topup_config', 'config')]),
            (gather_inputs, outputnode, [('forward_warps', 'to_dwi_ref_warps')]),
            (topup, unwarped_mean, [('out_corrected', 'in_file')]),
            (unwarped_mean, unwarped_enhance, [('out_file', 'inputnode.in_file')]),
            (unwarped_enhance, eddy, [('outputnode.mask_file', 'in_mask')]),
            (topup, eddy, [
                ('out_field', 'field')]),
            (lps_b0_enhance, outputnode, [
                ('outputnode.bias_corrected_file', 'b0_template'),
                ('outputnode.mask_file', 'b0_template_mask')]),
            ])
        return workflow

    # Enhance and skullstrip the TOPUP output to get a mask for eddy
    distorted_enhance = init_enhance_and_skullstrip_dwi_wf(name='distorted_enhance')
    workflow.connect([
        # Use the distorted mask for eddy
        (gather_inputs, distorted_enhance, [('pre_topup_image', 'inputnode.in_file')]),
        (distorted_enhance, eddy, [('outputnode.mask_file', 'in_mask')]),
    ])
    if fieldmap_type in ('fieldmap', 'phasediff', 'phase', 'syn'):

        outputnode.inputs.sdc_method = fieldmap_type
        b0_sdc_wf = init_sdc_wf(
            scan_groups['fieldmap_info'], dwi_metadata, omp_nthreads=omp_nthreads,
            fmap_demean=fmap_demean, fmap_bspline=fmap_bspline)

        workflow.connect([
            # Calculate distortion correction on eddy-corrected data
            (lps_b0_enhance, b0_sdc_wf, [
                ('outputnode.bias_corrected_file', 'inputnode.b0_ref'),
                ('outputnode.skull_stripped_file', 'inputnode.b0_ref_brain'),
                ('outputnode.mask_file', 'inputnode.b0_mask')]),
            (inputnode, b0_sdc_wf, [
                ('t1_brain', 'inputnode.t1_brain'),
                ('t1_2_mni_reverse_transform',
                 'inputnode.t1_2_mni_reverse_transform')]),

            # These deformations will be applied later, use the unwarped image now
            (b0_sdc_wf, outputnode, [
                ('outputnode.out_warp', 'to_dwi_ref_warps'),
                ('outputnode.method', 'sdc_method'),
                ('outputnode.b0_ref', 'b0_template'),
                ('outputnode.b0_mask', 'b0_template_mask')])])

    else:
        outputnode.inputs.sdc_method = "None"
        workflow.connect([
            (lps_b0_enhance, outputnode, [
                ('outputnode.skull_stripped_file', 'b0_template'),
                ('outputnode.mask_file', 'b0_template_mask')]),
            ])

    return workflow
コード例 #12
0
ファイル: __init__.py プロジェクト: soichih/C-PAC
def temporal_variance_mask(threshold, by_slice=False, erosion=False, degree=1):

    threshold_method = "VAR"

    if isinstance(threshold, str):
        regex_match = {
            "SD": r"([0-9]+(\.[0-9]+)?)\s*SD",
            "PCT": r"([0-9]+(\.[0-9]+)?)\s*PCT",
        }

        for method, regex in regex_match.items():
            matched = re.match(regex, threshold)
            if matched:
                threshold_method = method
                threshold_value = matched.groups()[0]

    try:
        threshold_value = float(threshold_value)
    except:
        raise ValueError(
            "Error converting threshold value {0} from {1} to a "
            "floating point number. The threshold value can "
            "contain SD or PCT for selecting a threshold based on "
            "the variance distribution, otherwise it should be a "
            "floating point number.".format(threshold_value, threshold))

    if threshold_value < 0:
        raise ValueError(
            "Threshold value should be positive, instead of {0}.".format(
                threshold_value))

    if threshold_method is "PCT" and threshold_value >= 100.0:
        raise ValueError(
            "Percentile should be less than 100, received {0}.".format(
                threshold_value))

    threshold = threshold_value

    wf = pe.Workflow(name='tcompcor')

    input_node = pe.Node(util.IdentityInterface(
        fields=['functional_file_path', 'mask_file_path']),
                         name='inputspec')
    output_node = pe.Node(util.IdentityInterface(fields=['mask']),
                          name='outputspec')

    # C-PAC default performs linear regression while nipype performs quadratic regression
    detrend = pe.Node(afni.Detrend(args='-polort {0}'.format(degree),
                                   outputtype='NIFTI'),
                      name='detrend')
    wf.connect(input_node, 'functional_file_path', detrend, 'in_file')

    std = pe.Node(afni.TStat(args='-nzstdev', outputtype='NIFTI'), name='std')
    wf.connect(input_node, 'mask_file_path', std, 'mask')
    wf.connect(detrend, 'out_file', std, 'in_file')

    var = pe.Node(afni.Calc(expr='a*a', outputtype='NIFTI'), name='var')
    wf.connect(std, 'out_file', var, 'in_file_a')

    if by_slice:
        slices = pe.Node(fsl.Slice(), name='slicer')
        wf.connect(var, 'out_file', slices, 'in_file')

        mask_slices = pe.Node(fsl.Slice(), name='mask_slicer')
        wf.connect(input_node, 'mask_file_path', mask_slices, 'in_file')

        mapper = pe.MapNode(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper',
            iterfield=['out_file', 'mask_file'])
        wf.connect(slices, 'out_files', mapper, 'out_file')
        wf.connect(mask_slices, 'out_files', mapper, 'mask_file')

    else:
        mapper_list = pe.Node(util.Merge(1), name='slice_mapper_list')
        wf.connect(var, 'out_file', mapper_list, 'in1')

        mask_mapper_list = pe.Node(util.Merge(1),
                                   name='slice_mask_mapper_list')
        wf.connect(input_node, 'mask_file_path', mask_mapper_list, 'in1')

        mapper = pe.Node(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper')
        wf.connect(mapper_list, 'out', mapper, 'out_file')
        wf.connect(mask_mapper_list, 'out', mapper, 'mask_file')

    if threshold_method is "PCT":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_pct'],
            output_names=['threshold'],
            function=compute_pct_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_pct = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    elif threshold_method is "SD":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_sd'],
            output_names=['threshold'],
            function=compute_sd_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_sd = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    else:
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold'],
            output_names=['threshold'],
            function=compute_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    threshold_mask = pe.MapNode(interface=fsl.maths.Threshold(),
                                name='threshold',
                                iterfield=['in_file', 'thresh'])
    threshold_mask.inputs.args = '-bin'
    wf.connect(mapper, 'out_file', threshold_mask, 'in_file')
    wf.connect(threshold_node, 'threshold', threshold_mask, 'thresh')

    merge_slice_masks = pe.Node(interface=fsl.Merge(),
                                name='merge_slice_masks')
    merge_slice_masks.inputs.dimension = 'z'
    wf.connect(threshold_mask, 'out_file', merge_slice_masks, 'in_files')

    wf.connect(merge_slice_masks, 'merged_file', output_node, 'mask')

    return wf
def prepro_func(i):
    try:
        subj = i
        for s in (['session2']):

            # Define input files: 2xfMRI + 1xMPRAGE
            func1 = data_path + subj + '/Functional_scans/' + s[:-2] + s[
                -1] + '_a/epi.nii.gz'  #choose this for patients
            func2 = data_path + subj + '/Functional_scans/' + s[:-2] + s[
                -1] + '_b/epi.nii.gz'  #choose this for patients
            #anat = glob.glob(anat_path + subj +'/'+ s + '/anat/reorient/anat_*.nii.gz') #choose this for session 1
            lesion_mask_file = anat_path + subj + '/session1/anat/reorient/lesion_seg.nii.gz'
            old_lesion_mask_file = glob.glob(
                anat_path + subj +
                '/session1/anat/reorient/old_lesion_seg.nii.gz'
            )  #choose this for ones with no old lesion
            #old_lesion_mask_file = anat_path + subj +'/session1/anat/reorient/old_lesion_seg.nii.gz' #choose this for ones with old lesion
            anat = glob.glob(anat_path + subj + '/' + s +
                             '/anat/anat2hr/anat_*.nii.gz'
                             )  #choose this for sessions 2 and 3
            anat_CSF = glob.glob(
                anat_path + subj +
                '/session1/seg_anat/segmentation/anat_*_pve_0.nii.gz'
            )  # don't change, same for all sessions
            anat_WM = glob.glob(
                anat_path + subj +
                '/session1/seg_anat/segmentation/anat_*_pve_2.nii.gz'
            )  # don't change, same for all sessions
            anat_GM = glob.glob(
                anat_path + subj +
                '/session1/seg_anat/segmentation/anat_*_pve_1.nii.gz'
            )  # don't change, same for all sessions
            anat2MNI_fieldwarp = glob.glob(
                anat_path + subj +
                '/session1/anat/nonlinear_reg/anat_*_fieldwarp.nii.gz'
            )  # don't change, same for all sessions

            if not os.path.isdir(data_path + subj + '/' + s):  # No data exists
                continue

            if not os.path.isfile(func1):
                print '1. functional file ' + func1 + ' not found. Skipping!'
                continue

            if not os.path.isfile(func2):
                print '2. functional file ' + func2 + ' not found. Skipping!'
                continue

            if not anat:
                print 'Preprocessed anatomical file not found. Skipping!'
                continue
            if len(anat) > 1:
                print 'WARNING: found multiple files of preprocessed anatomical image!'
                continue
            anat = anat[0]

            if not anat2MNI_fieldwarp:
                print 'Anatomical registration to MNI152-space field file not found. Skipping!'
                continue
            if len(anat2MNI_fieldwarp) > 1:
                print 'WARNING: found multiple files of anat2MNI fieldwarp!'
                continue
            anat2MNI_fieldwarp = anat2MNI_fieldwarp[0]

            if not anat_CSF:
                anat_CSF = glob.glob(
                    anat_path + subj + '/' + s +
                    '/seg_anat/segmentation/anat_*_pve_0.nii.gz')
                if not anat_CSF:
                    print 'Anatomical segmentation CSF file not found. Skipping!'
                    continue
            if len(anat_CSF) > 1:
                print 'WARNING: found multiple files of anatomical CSF file!'
                continue
            anat_CSF = anat_CSF[0]

            if not anat_WM:
                anat_WM = glob.glob(
                    anat_path + subj + '/' + s +
                    '/seg_anat/segmentation/anat_*_pve_2.nii.gz')
                if not anat_WM:
                    print 'Anatomical segmentation WM file not found. Skipping!'
                    continue
            if len(anat_WM) > 1:
                print 'WARNING: found multiple files of anatomical WM file!'
                continue
            anat_WM = anat_WM[0]

            if not anat_GM:
                anat_GM = glob.glob(
                    anat_path + subj + '/' + s +
                    '/seg_anat/segmentation/anat_*_pve_1.nii.gz')
                if not anat_GM:
                    print 'Anatomical segmentation GM file not found. Skipping!'
                    continue
            if len(anat_GM) > 1:
                print 'WARNING: found multiple files of anatomical GM file!'
                continue
            anat_GM = anat_GM[0]

            if not os.path.isdir(results_path + subj):
                os.mkdir(results_path + subj)

            if not os.path.isdir(results_path + subj + '/' + s):
                os.mkdir(results_path + subj + '/' + s)

            for data in acquisitions:

                os.chdir(results_path + subj + '/' + s)
                print "Currently processing subject: " + subj + '/' + s + ' ' + data

                #Initialize workflows
                workflow = pe.Workflow(name=data)

                workflow.base_dir = '.'
                inputnode = pe.Node(
                    interface=util.IdentityInterface(fields=['source_file']),
                    name='inputspec')
                outputnode = pe.Node(
                    interface=util.IdentityInterface(fields=['result_func']),
                    name='outputspec')

                if data == 'func1':
                    inputnode.inputs.source_file = func1
                else:
                    inputnode.inputs.source_file = func2

                # Remove n_dummies first volumes
                trim = pe.Node(interface=Trim(begin_index=n_dummies),
                               name='trim')
                workflow.connect(inputnode, 'source_file', trim, 'in_file')

                # Motion correction + slice timing correction
                realign4d = pe.Node(interface=SpaceTimeRealigner(),
                                    name='realign4d')
                #realign4d.inputs.ignore_exception=True
                realign4d.inputs.slice_times = 'asc_alt_siemens'

                realign4d.inputs.slice_info = 2  # horizontal slices
                realign4d.inputs.tr = mytr  # TR in seconds
                workflow.connect(trim, 'out_file', realign4d, 'in_file')

                # Reorient
                #deoblique = pe.Node(interface=afni.Warp(deoblique=True, outputtype='NIFTI_GZ'), name='deoblique') #leave out if you don't need this
                #workflow.connect(realign4d, 'out_file', deoblique, 'in_file')
                reorient = pe.Node(
                    interface=fsl.Reorient2Std(output_type='NIFTI_GZ'),
                    name='reorient')
                workflow.connect(realign4d, 'out_file', reorient, 'in_file')

                # AFNI skullstrip and mean image skullstrip
                tstat1 = pe.Node(interface=afni.TStat(args='-mean',
                                                      outputtype="NIFTI_GZ"),
                                 name='tstat1')
                automask = pe.Node(interface=afni.Automask(
                    dilate=1, outputtype="NIFTI_GZ"),
                                   name='automask')
                skullstrip = pe.Node(interface=afni.Calc(
                    expr='a*b', outputtype="NIFTI_GZ"),
                                     name='skullstrip')
                tstat2 = pe.Node(interface=afni.TStat(args='-mean',
                                                      outputtype="NIFTI_GZ"),
                                 name='tstat2')

                workflow.connect(reorient, 'out_file', tstat1, 'in_file')
                workflow.connect(tstat1, 'out_file', automask, 'in_file')
                workflow.connect(automask, 'out_file', skullstrip, 'in_file_b')
                workflow.connect(reorient, 'out_file', skullstrip, 'in_file_a')
                workflow.connect(skullstrip, 'out_file', tstat2, 'in_file')

                # Register to anatomical space #can be changed
                #mean2anat = pe.Node(fsl.FLIRT(bins=40, cost='normmi', dof=7, interp='nearestneighbour', searchr_x=[-180,180], searchr_y=[-180,180], searchr_z=[-180,180]), name='mean2anat')
                mean2anat = pe.Node(fsl.FLIRT(bins=40,
                                              cost='normmi',
                                              dof=7,
                                              interp='nearestneighbour'),
                                    name='mean2anat')
                #mean2anat = pe.Node(fsl.FLIRT(no_search=True), name='mean2anat')
                mean2anat.inputs.reference = anat
                workflow.connect(tstat2, 'out_file', mean2anat, 'in_file')

                # Transform mean functional image
                warpmean = pe.Node(interface=fsl.ApplyWarp(), name='warpmean')
                warpmean.inputs.ref_file = MNI_brain
                warpmean.inputs.field_file = anat2MNI_fieldwarp
                workflow.connect(mean2anat, 'out_matrix_file', warpmean,
                                 'premat')
                workflow.connect(tstat2, 'out_file', warpmean, 'in_file')

                # ----- inversion matrix and eroded brain mask for regression -----

                # create inverse matrix from mean2anat registration
                invmat = pe.Node(fsl.ConvertXFM(), name='invmat')
                invmat.inputs.invert_xfm = True
                workflow.connect(mean2anat, 'out_matrix_file', invmat,
                                 'in_file')

                # erode functional brain mask
                erode_brain = pe.Node(fsl.ImageMaths(), name='erode_brain')
                erode_brain.inputs.args = '-kernel boxv 3 -ero'
                workflow.connect(automask, 'out_file', erode_brain, 'in_file')

                # register GM mask to functional image space, this is done for quality control
                reg_GM = pe.Node(fsl.preprocess.ApplyXFM(), name='register_GM')
                reg_GM.inputs.apply_xfm = True
                reg_GM.inputs.in_file = anat_GM
                workflow.connect(tstat2, 'out_file', reg_GM, 'reference')
                workflow.connect(invmat, 'out_file', reg_GM, 'in_matrix_file')

                # --------- motion regression and censor signals ------------------

                # normalize motion parameters
                norm_motion = pe.Node(interface=Function(
                    input_names=['in_file'],
                    output_names=['out_file'],
                    function=normalize_motion_data),
                                      name='normalize_motion')
                workflow.connect(realign4d, 'par_file', norm_motion, 'in_file')

                # create censor file, for censoring motion
                get_censor = pe.Node(afni.OneDToolPy(), name='motion_censors')
                get_censor.inputs.set_nruns = 1
                get_censor.inputs.censor_motion = (censor_thr, 'motion')
                get_censor.inputs.show_censor_count = True
                if overwrite: get_censor.inputs.args = '-overwrite'
                workflow.connect(norm_motion, 'out_file', get_censor,
                                 'in_file')

                # compute motion parameter derivatives (for use in regression)
                deriv_motion = pe.Node(afni.OneDToolPy(), name='deriv_motion')
                deriv_motion.inputs.set_nruns = 1
                deriv_motion.inputs.derivative = True
                if overwrite: deriv_motion.inputs.args = '-overwrite'
                deriv_motion.inputs.out_file = 'motion_derivatives.txt'
                workflow.connect(norm_motion, 'out_file', deriv_motion,
                                 'in_file')

                # scale motion parameters and get quadratures
                quadr_motion = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                       name='quadr_motion')
                quadr_motion.inputs.multicol = True
                workflow.connect(norm_motion, 'out_file', quadr_motion,
                                 'in_file')

                # scale motion derivatives and get quadratures
                quadr_motion_deriv = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                             name='quadr_motion_deriv')
                quadr_motion_deriv.inputs.multicol = True
                workflow.connect(deriv_motion, 'out_file', quadr_motion_deriv,
                                 'in_file')

                # -------- CSF regression signals ---------------

                # threshold and erode CSF mask
                erode_CSF_mask = pe.Node(fsl.ImageMaths(),
                                         name='erode_CSF_mask')
                erode_CSF_mask.inputs.args = '-thr 0.5 -kernel boxv 3 -ero'
                erode_CSF_mask.inputs.in_file = anat_CSF

                # register CSF mask to functional image space
                reg_CSF_mask = pe.Node(fsl.preprocess.ApplyXFM(),
                                       name='register_CSF_mask')
                reg_CSF_mask.inputs.apply_xfm = True
                workflow.connect(tstat2, 'out_file', reg_CSF_mask, 'reference')
                workflow.connect(invmat, 'out_file', reg_CSF_mask,
                                 'in_matrix_file')

                # inverse lesion mask and remove it from CSF mask #remove this if you don't have a lesion mask
                inverse_lesion_mask = pe.Node(fsl.ImageMaths(),
                                              name='inverse_lesion_mask')
                inverse_lesion_mask.inputs.args = '-add 1 -rem 2'
                inverse_lesion_mask.inputs.in_file = lesion_mask_file
                rem_lesion = pe.Node(fsl.ImageMaths(), name='remove_lesion')
                workflow.connect(erode_CSF_mask, 'out_file', rem_lesion,
                                 'in_file')
                workflow.connect(inverse_lesion_mask, 'out_file', rem_lesion,
                                 'mask_file')
                '''
			# Transform lesion mask to MNI152 space #remove if lesion masks are already in MNI152 space
		        warp_lesion = pe.Node(interface=fsl.ApplyWarp(), name='warp_lesion')
		        warp_lesion.inputs.ref_file = MNI_brain
		        warp_lesion.inputs.field_file = anat2MNI_fieldwarp
			warp_lesion.inputs.in_file = lesion_mask_file
			warp_lesion.inputs.out_file = anat_path + subj +'/'+ s + '/anat/nonlinear_reg/lesion_seg_warp.nii.gz'
			warp_lesion.run()
			'''

                # inverse old lesion mask and remove it from CSF mask #remove this if you don't have a lesion mask
                if old_lesion_mask_file:
                    inverse_old_lesion_mask = pe.Node(
                        fsl.ImageMaths(), name='inverse_old_lesion_mask')
                    inverse_old_lesion_mask.inputs.args = '-add 1 -rem 3'
                    #inverse_old_lesion_mask.inputs.in_file = old_lesion_mask_file[0]
                    inverse_old_lesion_mask.inputs.in_file = old_lesion_mask_file
                    rem_old_lesion = pe.Node(fsl.ImageMaths(),
                                             name='remove_old_lesion')
                    workflow.connect(rem_lesion, 'out_file', rem_old_lesion,
                                     'in_file')
                    workflow.connect(inverse_old_lesion_mask, 'out_file',
                                     rem_old_lesion, 'mask_file')
                    workflow.connect(rem_old_lesion, 'out_file', reg_CSF_mask,
                                     'in_file')
                    '''
			    # Transform old lesion mask to MNI152 space #remove if lesion masks are already in MNI152 space
		            warp_old_lesion = pe.Node(interface=fsl.ApplyWarp(), name='warp_old_lesion')
		            warp_old_lesion.inputs.ref_file = MNI_brain
		            warp_old_lesion.inputs.field_file = anat2MNI_fieldwarp
			    warp_old_lesion.inputs.in_file = old_lesion_mask_file
			    warp_old_lesion.inputs.out_file = anat_path + subj +'/'+ s + '/anat/nonlinear_reg/old_lesion_seg_warp.nii.gz'
			    warp_old_lesion.run()
			    '''

                else:
                    workflow.connect(rem_lesion, 'out_file', reg_CSF_mask,
                                     'in_file')

                # threshold CSF mask and intersect with functional brain mask
                thr_CSF_mask = pe.Node(fsl.ImageMaths(),
                                       name='threshold_CSF_mask')
                thr_CSF_mask.inputs.args = '-thr 0.25'
                workflow.connect(reg_CSF_mask, 'out_file', thr_CSF_mask,
                                 'in_file')
                workflow.connect(erode_brain, 'out_file', thr_CSF_mask,
                                 'mask_file')

                # extract CSF values
                get_CSF_noise = pe.Node(fsl.ImageMeants(),
                                        name='get_CSF_noise')
                workflow.connect(skullstrip, 'out_file', get_CSF_noise,
                                 'in_file')
                workflow.connect(thr_CSF_mask, 'out_file', get_CSF_noise,
                                 'mask')

                # compute CSF noise derivatives
                deriv_CSF = pe.Node(afni.OneDToolPy(), name='deriv_CSF')
                deriv_CSF.inputs.set_nruns = 1
                deriv_CSF.inputs.derivative = True
                if overwrite: deriv_CSF.inputs.args = '-overwrite'
                deriv_CSF.inputs.out_file = 'CSF_derivatives.txt'
                workflow.connect(get_CSF_noise, 'out_file', deriv_CSF,
                                 'in_file')

                # scale SCF noise and get quadratures
                quadr_CSF = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                    name='quadr_CSF')
                quadr_CSF.inputs.multicol = False
                workflow.connect(get_CSF_noise, 'out_file', quadr_CSF,
                                 'in_file')

                # scale CSF noise derivatives and get quadratures
                quadr_CSF_deriv = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                          name='quadr_CSF_deriv')
                quadr_CSF_deriv.inputs.multicol = False
                workflow.connect(deriv_CSF, 'out_file', quadr_CSF_deriv,
                                 'in_file')

                # -------- WM regression signals -----------------

                # threshold and erode WM mask
                erode_WM_mask = pe.Node(fsl.ImageMaths(), name='erode_WM_mask')
                erode_WM_mask.inputs.args = '-thr 0.5 -kernel boxv 7 -ero'
                erode_WM_mask.inputs.in_file = anat_WM

                # registrer WM mask to functional image space
                reg_WM_mask = pe.Node(fsl.preprocess.ApplyXFM(),
                                      name='register_WM_mask')
                reg_WM_mask.inputs.apply_xfm = True
                workflow.connect(tstat2, 'out_file', reg_WM_mask, 'reference')
                workflow.connect(invmat, 'out_file', reg_WM_mask,
                                 'in_matrix_file')
                workflow.connect(erode_WM_mask, 'out_file', reg_WM_mask,
                                 'in_file')

                # create inverse nonlinear registration MNI2anat
                invwarp = pe.Node(fsl.InvWarp(output_type='NIFTI_GZ'),
                                  name='invwarp')
                invwarp.inputs.warp = anat2MNI_fieldwarp
                invwarp.inputs.reference = anat

                # transform ventricle mask to functional space
                reg_ventricles = pe.Node(fsl.ApplyWarp(),
                                         name='register_ventricle_mask')
                reg_ventricles.inputs.in_file = ventricle_mask
                workflow.connect(tstat2, 'out_file', reg_ventricles,
                                 'ref_file')
                workflow.connect(invwarp, 'inverse_warp', reg_ventricles,
                                 'field_file')
                workflow.connect(invmat, 'out_file', reg_ventricles, 'postmat')

                # threshold WM mask and intersect with functional brain mask
                thr_WM_mask = pe.Node(fsl.ImageMaths(),
                                      name='threshold_WM_mask')
                thr_WM_mask.inputs.args = '-thr 0.25'
                workflow.connect(reg_WM_mask, 'out_file', thr_WM_mask,
                                 'in_file')
                workflow.connect(erode_brain, 'out_file', thr_WM_mask,
                                 'mask_file')

                # remove ventricles from WM mask
                exclude_ventricles = pe.Node(fsl.ImageMaths(),
                                             name='exclude_ventricles')
                workflow.connect(thr_WM_mask, 'out_file', exclude_ventricles,
                                 'in_file')
                workflow.connect(reg_ventricles, 'out_file',
                                 exclude_ventricles, 'mask_file')

                # check that WM is collected from both hemispheres
                check_WM_bilat = pe.Node(interface=Function(
                    input_names=['in_file'],
                    output_names=['errors'],
                    function=check_bilateralism),
                                         name='check_WM_bilateralism')
                workflow.connect(exclude_ventricles, 'out_file',
                                 check_WM_bilat, 'in_file')

                # extract WM values
                get_WM_noise = pe.Node(fsl.ImageMeants(), name='get_WM_noise')
                workflow.connect(skullstrip, 'out_file', get_WM_noise,
                                 'in_file')
                workflow.connect(exclude_ventricles, 'out_file', get_WM_noise,
                                 'mask')

                # compute WM noise derivatives
                deriv_WM = pe.Node(afni.OneDToolPy(), name='deriv_WM')
                deriv_WM.inputs.set_nruns = 1
                deriv_WM.inputs.derivative = True
                if overwrite: deriv_WM.inputs.args = '-overwrite'
                deriv_WM.inputs.out_file = 'WM_derivatives.txt'
                workflow.connect(get_WM_noise, 'out_file', deriv_WM, 'in_file')

                # scale WM noise and get quadratures
                quadr_WM = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                   name='quadr_WM')
                quadr_WM.inputs.multicol = False
                workflow.connect(get_WM_noise, 'out_file', quadr_WM, 'in_file')

                # scale WM noise derivatives and get quadratures
                quadr_WM_deriv = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                         name='quadr_WM_deriv')
                quadr_WM_deriv.inputs.multicol = False
                workflow.connect(deriv_WM, 'out_file', quadr_WM_deriv,
                                 'in_file')

                # ---------- global regression signals ----------------

                if global_reg:
                    # register anatomical whole brain mask to functional image space
                    reg_glob_mask = pe.Node(fsl.preprocess.ApplyXFM(),
                                            name='register_global_mask')
                    reg_glob_mask.inputs.apply_xfm = True
                    reg_glob_mask.inputs.in_file = anat
                    workflow.connect(tstat2, 'out_file', reg_glob_mask,
                                     'reference')
                    workflow.connect(invmat, 'out_file', reg_glob_mask,
                                     'in_matrix_file')

                    # threshold anatomical brain mask and intersect with functional brain mask
                    thr_glob_mask = pe.Node(fsl.ImageMaths(),
                                            name='threshold_global_mask')
                    thr_glob_mask.inputs.args = '-thr -0.1'
                    workflow.connect(reg_glob_mask, 'out_file', thr_glob_mask,
                                     'in_file')
                    workflow.connect(erode_brain, 'out_file', thr_glob_mask,
                                     'mask_file')

                    # extract global signal values
                    get_glob_noise = pe.Node(fsl.ImageMeants(),
                                             name='get_global_noise')
                    workflow.connect(skullstrip, 'out_file', get_glob_noise,
                                     'in_file')
                    workflow.connect(thr_glob_mask, 'out_file', get_glob_noise,
                                     'mask')

                    # compute global noise derivative
                    deriv_glob = pe.Node(afni.OneDToolPy(),
                                         name='deriv_global')
                    deriv_glob.inputs.set_nruns = 1
                    deriv_glob.inputs.derivative = True
                    if overwrite: deriv_glob.inputs.args = '-overwrite'
                    deriv_glob.inputs.out_file = 'global_derivatives.txt'
                    workflow.connect(get_glob_noise, 'out_file', deriv_glob,
                                     'in_file')

                    # scale global noise and get quadratures
                    quadr_glob = pe.Node(interface=Function(
                        input_names=['in_file', 'multicol'],
                        output_names=['out_file', 'out_quadr_file'],
                        function=scale_and_quadrature),
                                         name='quadr_glob')
                    quadr_glob.inputs.multicol = False
                    workflow.connect(get_glob_noise, 'out_file', quadr_glob,
                                     'in_file')

                    # scale global noise derivatives and get quadratures
                    quadr_glob_deriv = pe.Node(interface=Function(
                        input_names=['in_file', 'multicol'],
                        output_names=['out_file', 'out_quadr_file'],
                        function=scale_and_quadrature),
                                               name='quadr_glob_deriv')
                    quadr_glob_deriv.inputs.multicol = False
                    workflow.connect(deriv_glob, 'out_file', quadr_glob_deriv,
                                     'in_file')

                # ---------- regression matrix ----------

                # create bandpass regressors, can not be easily implemented to workflow
                get_bandpass = pe.Node(interface=Function(
                    input_names=['minf', 'maxf', 'example_file', 'tr'],
                    output_names=['out_tuple'],
                    function=bandpass),
                                       name='bandpass_regressors')
                get_bandpass.inputs.minf = myminf
                get_bandpass.inputs.maxf = mymaxf
                get_bandpass.inputs.tr = mytr
                workflow.connect(norm_motion, 'out_file', get_bandpass,
                                 'example_file')

                # concatenate regressor time series
                cat_reg_name = 'cat_regressors'
                if global_reg: cat_reg_name = cat_reg_name + '_global'
                cat_reg = pe.Node(interface=Function(
                    input_names=[
                        'mot', 'motd', 'motq', 'motdq', 'CSF', 'CSFd', 'CSFq',
                        'CSFdq', 'WM', 'WMd', 'WMq', 'WMdq', 'include_global',
                        'glob', 'globd', 'globq', 'globdq'
                    ],
                    output_names=['reg_file_args'],
                    function=concatenate_regressors),
                                  name=cat_reg_name)
                cat_reg.inputs.include_global = global_reg
                workflow.connect(quadr_motion, 'out_file', cat_reg, 'mot')
                workflow.connect(quadr_motion_deriv, 'out_file', cat_reg,
                                 'motd')
                workflow.connect(quadr_motion, 'out_quadr_file', cat_reg,
                                 'motq')
                workflow.connect(quadr_motion_deriv, 'out_quadr_file', cat_reg,
                                 'motdq')
                workflow.connect(quadr_CSF, 'out_file', cat_reg, 'CSF')
                workflow.connect(quadr_CSF_deriv, 'out_file', cat_reg, 'CSFd')
                workflow.connect(quadr_CSF, 'out_quadr_file', cat_reg, 'CSFq')
                workflow.connect(quadr_CSF_deriv, 'out_quadr_file', cat_reg,
                                 'CSFdq')
                workflow.connect(quadr_WM, 'out_file', cat_reg, 'WM')
                workflow.connect(quadr_WM_deriv, 'out_file', cat_reg, 'WMd')
                workflow.connect(quadr_WM, 'out_quadr_file', cat_reg, 'WMq')
                workflow.connect(quadr_WM_deriv, 'out_quadr_file', cat_reg,
                                 'WMdq')
                if global_reg:
                    workflow.connect(quadr_glob, 'out_file', cat_reg, 'glob')
                    workflow.connect(quadr_glob_deriv, 'out_file', cat_reg,
                                     'globd')
                    workflow.connect(quadr_glob, 'out_quadr_file', cat_reg,
                                     'globq')
                    workflow.connect(quadr_glob_deriv, 'out_quadr_file',
                                     cat_reg, 'globdq')
                else:
                    cat_reg.inputs.glob = None
                    cat_reg.inputs.globd = None
                    cat_reg.inputs.globq = None
                    cat_reg.inputs.globdq = None

# create regression matrix
                deconvolve_name = 'deconvolve'
                if global_reg: deconvolve_name = deconvolve_name + '_global'
                deconvolve = pe.Node(afni.Deconvolve(), name=deconvolve_name)
                deconvolve.inputs.polort = 2  # contstant, linear and quadratic background signals removed
                deconvolve.inputs.fout = True
                deconvolve.inputs.tout = True
                deconvolve.inputs.x1D_stop = True
                deconvolve.inputs.force_TR = mytr
                workflow.connect(cat_reg, 'reg_file_args', deconvolve, 'args')
                workflow.connect(get_bandpass, 'out_tuple', deconvolve,
                                 'ortvec')
                workflow.connect([(skullstrip, deconvolve,
                                   [(('out_file', str2list), 'in_files')])])

                # regress out motion and other unwanted signals
                tproject_name = 'tproject'
                if global_reg: tproject_name = tproject_name + '_global'
                tproject = pe.Node(afni.TProject(outputtype="NIFTI_GZ"),
                                   name=tproject_name)
                tproject.inputs.TR = mytr
                tproject.inputs.polort = 0  # use matrix created with 3dDeconvolve, higher order polynomials not needed
                tproject.inputs.cenmode = 'NTRP'  # interpolate removed time points
                workflow.connect(get_censor, 'out_file', tproject, 'censor')
                workflow.connect(skullstrip, 'out_file', tproject, 'in_file')
                workflow.connect(automask, 'out_file', tproject, 'mask')
                workflow.connect(deconvolve, 'x1D', tproject, 'ort')

                # Transform all images
                warpall_name = 'warpall'
                if global_reg: warpall_name = warpall_name + '_global'
                warpall = pe.Node(interface=fsl.ApplyWarp(), name=warpall_name)
                warpall.inputs.ref_file = MNI_brain
                warpall.inputs.field_file = anat2MNI_fieldwarp
                workflow.connect(mean2anat, 'out_matrix_file', warpall,
                                 'premat')
                workflow.connect(tproject, 'out_file', warpall, 'in_file')
                workflow.connect(warpall, 'out_file', outputnode,
                                 'result_func')

                # Run workflow
                workflow.write_graph()
                workflow.run()

        print "FUNCTIONAL PREPROCESSING DONE! Results in ", results_path + subj + '/' + s
    except:
        print "Error with patient: ", subj
        traceback.print_exc()
コード例 #14
0
ファイル: functional.py プロジェクト: xwolfs/mriqc
def fmri_qc_workflow(dataset, settings, name='funcMRIQC'):
    """ The fMRI qc workflow """

    workflow = pe.Workflow(name=name)

    # Define workflow, inputs and outputs
    # 0. Get data, put it in RAS orientation
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode')
    WFLOGGER.info('Building fMRI QC workflow, datasets list: %s',
                  sorted([d.replace(settings['bids_dir'] + '/', '') for d in dataset]))
    inputnode.iterables = [('in_file', dataset)]


    meta = pe.Node(ReadSidecarJSON(), name='metadata')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['qc', 'mosaic', 'out_group', 'out_dvars',
                'out_fd']), name='outputnode')


    reorient_and_discard = pe.Node(niu.Function(input_names=['in_file'],
                                                output_names=['exclude_index',
                                                              'out_file'],
                                                function=reorient_and_discard_non_steady),
                                   name='reorient_and_discard')

    # Workflow --------------------------------------------------------

    # 1. HMC: head motion correct
    hmcwf = hmc_mcflirt()
    if settings.get('hmc_afni', False):
        hmcwf = hmc_afni(st_correct=settings.get('correct_slice_timing', False),
                         despike=settings.get('despike', False),
                         deoblique=settings.get('deoblique', False))

    # Set HMC settings
    hmcwf.inputs.inputnode.fd_radius = settings.get('fd_radius', DEFAULT_FD_RADIUS)
    if settings.get('start_idx'):
        hmcwf.inputs.inputnode.start_idx = settings['start_idx']
    if settings.get('stop_idx'):
        hmcwf.inputs.inputnode.stop_idx = settings['stop_idx']


    mean = pe.Node(afni.TStat(                   # 2. Compute mean fmri
        options='-mean', outputtype='NIFTI_GZ'), name='mean')
    bmw = fmri_bmsk_workflow(                   # 3. Compute brain mask
        use_bet=settings.get('use_bet', False))

    # EPI to MNI registration
    ema = epi_mni_align(ants_nthreads=settings['ants_nthreads'],
                        testing=settings.get('testing', False))

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(nac.TSNR(), name='compute_tsnr')

    # 7. Compute IQMs
    iqmswf = compute_iqms(settings)
    # Reports
    repwf = individual_reports(settings)

    workflow.connect([
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, reorient_and_discard, [('in_file', 'in_file')]),
        (reorient_and_discard, hmcwf, [('out_file', 'inputnode.in_file')]),
        (hmcwf, bmw, [('outputnode.out_file', 'inputnode.in_file')]),
        (hmcwf, mean, [('outputnode.out_file', 'in_file')]),
        (hmcwf, tsnr, [('outputnode.out_file', 'in_file')]),
        (mean, ema, [('out_file', 'inputnode.epi_mean')]),
        (bmw, ema, [('outputnode.out_file', 'inputnode.epi_mask')]),
        (meta, iqmswf, [('subject_id', 'inputnode.subject_id'),
                        ('session_id', 'inputnode.session_id'),
                        ('task_id', 'inputnode.task_id'),
                        ('run_id', 'inputnode.run_id'),
                        ('out_dict', 'inputnode.metadata')]),
        (reorient_and_discard, iqmswf, [('out_file', 'inputnode.orig')]),
        (mean, iqmswf, [('out_file', 'inputnode.epi_mean')]),
        (hmcwf, iqmswf, [('outputnode.out_file', 'inputnode.hmc_epi'),
                         ('outputnode.out_fd', 'inputnode.hmc_fd')]),
        (bmw, iqmswf, [('outputnode.out_file', 'inputnode.brainmask')]),
        (tsnr, iqmswf, [('tsnr_file', 'inputnode.in_tsnr')]),
        (reorient_and_discard, repwf, [('out_file', 'inputnode.orig')]),
        (mean, repwf, [('out_file', 'inputnode.epi_mean')]),
        (tsnr, repwf, [('stddev_file', 'inputnode.in_stddev')]),
        (bmw, repwf, [('outputnode.out_file', 'inputnode.brainmask')]),
        (hmcwf, repwf, [('outputnode.out_fd', 'inputnode.hmc_fd')]),
        (ema, repwf, [('outputnode.epi_parc', 'inputnode.epi_parc'),
                      ('outputnode.report', 'inputnode.mni_report')]),
        (reorient_and_discard, repwf, [('exclude_index', 'inputnode.exclude_index')]),
        (iqmswf, repwf, [('outputnode.out_file', 'inputnode.in_iqms'),
                         ('outputnode.out_dvars', 'inputnode.in_dvars'),
                         ('outputnode.out_spikes', 'inputnode.in_spikes'),
                         ('outputnode.outliers', 'inputnode.outliers')]),
        (hmcwf, outputnode, [('outputnode.out_fd', 'out_fd')]),
    ])

    return workflow
コード例 #15
0
def fmri_qc_workflow(dataset, settings, name='funcMRIQC'):
    """
    The fMRI qc workflow

    .. workflow::

      import os.path as op
      from mriqc.workflows.functional import fmri_qc_workflow
      datadir = op.abspath('data')
      wf = fmri_qc_workflow([op.join(datadir, 'sub-001/func/sub-001_task-rest_bold.nii.gz')],
                            settings={'bids_dir': datadir,
                                      'output_dir': op.abspath('out'),
                                      'no_sub': True})


    """

    workflow = pe.Workflow(name=name)

    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    # Define workflow, inputs and outputs
    # 0. Get data, put it in RAS orientation
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode')
    WFLOGGER.info('Building fMRI QC workflow, datasets list: %s',
                  [str(Path(d).relative_to(settings['bids_dir']))
                   for d in sorted(dataset)])
    inputnode.iterables = [('in_file', dataset)]

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['qc', 'mosaic', 'out_group', 'out_dvars',
                'out_fd']), name='outputnode')

    non_steady_state_detector = pe.Node(nac.NonSteadyStateDetector(),
                                        name="non_steady_state_detector")

    sanitize = pe.Node(niutils.SanitizeImage(), name="sanitize",
                       mem_gb=biggest_file_gb * 4.0)
    sanitize.inputs.max_32bit = settings.get("float32", DEFAULTS['float32'])

    # Workflow --------------------------------------------------------

    # 1. HMC: head motion correct
    if settings.get('hmc_fsl', False):
        hmcwf = hmc_mcflirt(settings)
    else:
        hmcwf = hmc_afni(settings,
                         st_correct=settings.get('correct_slice_timing', False),
                         despike=settings.get('despike', False),
                         deoblique=settings.get('deoblique', False),
                         start_idx=settings.get('start_idx', None),
                         stop_idx=settings.get('stop_idx', None))

    # Set HMC settings
    hmcwf.inputs.inputnode.fd_radius = settings.get('fd_radius', DEFAULT_FD_RADIUS)

    mean = pe.Node(afni.TStat(                   # 2. Compute mean fmri
        options='-mean', outputtype='NIFTI_GZ'), name='mean',
        mem_gb=biggest_file_gb * 1.5)
    skullstrip_epi = fmri_bmsk_workflow(use_bet=True)

    # EPI to MNI registration
    ema = epi_mni_align(settings)

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(nac.TSNR(), name='compute_tsnr', mem_gb=biggest_file_gb * 2.5)

    # 7. Compute IQMs
    iqmswf = compute_iqms(settings)
    # Reports
    repwf = individual_reports(settings)

    workflow.connect([
        (inputnode, iqmswf, [('in_file', 'inputnode.in_file')]),
        (inputnode, sanitize, [('in_file', 'in_file')]),
        (inputnode, non_steady_state_detector, [('in_file', 'in_file')]),
        (non_steady_state_detector, sanitize, [('n_volumes_to_discard', 'n_volumes_to_discard')]),
        (sanitize, hmcwf, [('out_file', 'inputnode.in_file')]),
        (mean, skullstrip_epi, [('out_file', 'inputnode.in_file')]),
        (hmcwf, mean, [('outputnode.out_file', 'in_file')]),
        (hmcwf, tsnr, [('outputnode.out_file', 'in_file')]),
        (mean, ema, [('out_file', 'inputnode.epi_mean')]),
        (skullstrip_epi, ema, [('outputnode.out_file', 'inputnode.epi_mask')]),
        (sanitize, iqmswf, [('out_file', 'inputnode.in_ras')]),
        (mean, iqmswf, [('out_file', 'inputnode.epi_mean')]),
        (hmcwf, iqmswf, [('outputnode.out_file', 'inputnode.hmc_epi'),
                         ('outputnode.out_fd', 'inputnode.hmc_fd')]),
        (skullstrip_epi, iqmswf, [('outputnode.out_file', 'inputnode.brainmask')]),
        (tsnr, iqmswf, [('tsnr_file', 'inputnode.in_tsnr')]),
        (sanitize, repwf, [('out_file', 'inputnode.in_ras')]),
        (mean, repwf, [('out_file', 'inputnode.epi_mean')]),
        (tsnr, repwf, [('stddev_file', 'inputnode.in_stddev')]),
        (skullstrip_epi, repwf, [('outputnode.out_file', 'inputnode.brainmask')]),
        (hmcwf, repwf, [('outputnode.out_fd', 'inputnode.hmc_fd'),
                        ('outputnode.out_file', 'inputnode.hmc_epi')]),
        (ema, repwf, [('outputnode.epi_parc', 'inputnode.epi_parc'),
                      ('outputnode.report', 'inputnode.mni_report')]),
        (non_steady_state_detector, iqmswf, [('n_volumes_to_discard', 'inputnode.exclude_index')]),
        (iqmswf, repwf, [('outputnode.out_file', 'inputnode.in_iqms'),
                         ('outputnode.out_dvars', 'inputnode.in_dvars'),
                         ('outputnode.outliers', 'inputnode.outliers')]),
        (hmcwf, outputnode, [('outputnode.out_fd', 'out_fd')]),
    ])

    if settings.get('fft_spikes_detector', False):
        workflow.connect([
            (iqmswf, repwf, [('outputnode.out_spikes', 'inputnode.in_spikes'),
                             ('outputnode.out_fft', 'inputnode.in_fft')]),
        ])

    if settings.get('ica', False):
        melodic = pe.Node(nws.MELODICRPT(no_bet=True,
                                         no_mask=True,
                                         no_mm=True,
                                         compress_report=False,
                                         generate_report=True),
                          name="ICA", mem_gb=max(biggest_file_gb * 5, 8))
        workflow.connect([
            (sanitize, melodic, [('out_file', 'in_files')]),
            (skullstrip_epi, melodic, [('outputnode.out_file', 'report_mask')]),
            (melodic, repwf, [('out_report', 'inputnode.ica_report')])
        ])

    # Upload metrics
    if not settings.get('no_sub', False):
        from ..interfaces.webapi import UploadIQMs
        upldwf = pe.Node(UploadIQMs(), name='UploadMetrics')
        upldwf.inputs.url = settings.get('webapi_url')
        if settings.get('webapi_port'):
            upldwf.inputs.port = settings.get('webapi_port')
        upldwf.inputs.email = settings.get('email')
        upldwf.inputs.strict = settings.get('upload_strict', False)

        workflow.connect([
            (iqmswf, upldwf, [('outputnode.out_file', 'in_iqms')]),
        ])

    return workflow
コード例 #16
0
ファイル: functional.py プロジェクト: xwolfs/mriqc
def hmc_afni(name='fMRI_HMC_afni', st_correct=False, despike=False, deoblique=False):
    """A head motion correction (HMC) workflow for functional scans"""

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']), name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_fd']), name='outputnode')

    drop_trs = pe.Node(afni.Calc(expr='a', outputtype='NIFTI_GZ'),
                       name='drop_trs')

    reorient = pe.Node(afni.Resample(
        orientation='RPI', outputtype='NIFTI_GZ'), name='reorient')

    get_mean_RPI = pe.Node(afni.TStat(
        options='-mean', outputtype='NIFTI_GZ'), name='get_mean_RPI')

    # calculate hmc parameters
    hmc = pe.Node(
        afni.Volreg(args='-Fourier -twopass', zpad=4, outputtype='NIFTI_GZ'),
        name='motion_correct')

    get_mean_motion = get_mean_RPI.clone('get_mean_motion')
    hmc_A = hmc.clone('motion_correct_A')
    hmc_A.inputs.md1d_file = 'max_displacement.1D'

    # Compute the frame-wise displacement
    calc_fd = pe.Node(niu.Function(
        function=fd_jenkinson, input_names=['in_file', 'rmax'],
        output_names=['out_fd']), name='calc_fd')

    workflow.connect([
        (inputnode, drop_trs, [('in_file', 'in_file_a'),
                               ('start_idx', 'start_idx'),
                               ('stop_idx', 'stop_idx')]),
        (inputnode, calc_fd, [('fd_radius', 'rmax')]),
        (reorient, get_mean_RPI, [('out_file', 'in_file')]),
        (reorient, hmc, [('out_file', 'in_file')]),
        (get_mean_RPI, hmc, [('out_file', 'basefile')]),
        (hmc, get_mean_motion, [('out_file', 'in_file')]),
        (reorient, hmc_A, [('out_file', 'in_file')]),
        (get_mean_motion, hmc_A, [('out_file', 'basefile')]),
        (hmc_A, outputnode, [('out_file', 'out_file')]),
        (hmc_A, calc_fd, [('oned_matrix_save', 'in_file')]),
        (calc_fd, outputnode, [('out_fd', 'out_fd')]),
    ])

    # Slice timing correction, despiking, and deoblique

    st_corr = pe.Node(afni.TShift(outputtype='NIFTI_GZ'), name='TimeShifts')

    deoblique_node = pe.Node(afni.Refit(deoblique=True), name='deoblique')

    despike_node = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike')

    if st_correct and despike and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    elif st_correct and despike:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, reorient, [('out_file', 'in_file')]),
        ])

    elif st_correct and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    elif st_correct:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, reorient, [('out_file', 'in_file')])
        ])

    elif despike and deoblique:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    elif despike:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, reorient, [('out_file', 'in_file')]),
        ])

    elif deoblique:

        workflow.connect([
            (drop_trs, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    else:

        workflow.connect([
            (drop_trs, reorient, [('out_file', 'in_file')]),
        ])

    return workflow
コード例 #17
0
ファイル: fsl.py プロジェクト: valeriejill/qsiprep
def init_fsl_hmc_wf(scan_groups,
                    b0_threshold,
                    impute_slice_threshold,
                    fmap_demean,
                    fmap_bspline,
                    eddy_config,
                    mem_gb=3,
                    omp_nthreads=1,
                    dwi_metadata=None,
                    slice_quality='outlier_n_sqr_stdev_map',
                    name="fsl_hmc_wf"):
    """
    This workflow controls the dwi preprocessing stages using FSL tools.


    **Parameters**
        scan_groups: dict
            dictionary with fieldmaps and warp space information for the dwis
        impute_slice_threshold: float
            threshold for a slice to be replaced with imputed values. Overrides the
            parameter in ``eddy_config`` if set to a number > 0.
        do_topup: bool
            Should topup be performed before eddy? requires an rpe series or an
            rpe_b0.
        eddy_config: str
            Path to a JSON file containing settings for the call to ``eddy``.


    **Inputs**

        dwi_files: list
            List of single-volume files across all DWI series
        b0_indices: list
            Indexes into ``dwi_files`` that correspond to b=0 volumes
        bvecs: list
            List of paths to single-line bvec files
        bvals: list
            List of paths to single-line bval files
        b0_images: list
            List of single b=0 volumes
        original_files: list
            List of the files from which each DWI volume came from.

    **Outputs**


    **Subworkflows**

    """

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['dwi_files', 'b0_indices', 'bvec_files', 'bval_files', 'b0_images',
                    'original_files', 't1_brain', 't1_2_mni_reverse_transform']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["b0_template", "b0_template_mask", "pre_sdc_template",
                    "hmc_optimization_data", "sdc_method", 'slice_quality', 'motion_params',
                    "cnr_map", "bvec_files_to_transform", "dwi_files_to_transform", "b0_indices",
                    "to_dwi_ref_affines", "to_dwi_ref_warps", "rpe_b0_info"]),
        name='outputnode')

    workflow = Workflow(name=name)
    gather_inputs = pe.Node(GatherEddyInputs(), name="gather_inputs")
    if eddy_config is None:
        # load from the defaults
        eddy_cfg_file = pkgr_fn('qsiprep.data', 'eddy_params.json')
    else:
        eddy_cfg_file = eddy_config

    with open(eddy_cfg_file, "r") as f:
        eddy_args = json.load(f)

    eddy = pe.Node(ExtendedEddy(**eddy_args), name="eddy")
    dwi_merge = pe.Node(MergeDWIs(), name="dwi_merge")
    spm_motion = pe.Node(Eddy2SPMMotion(), name="spm_motion")

    workflow.connect([
        (inputnode, gather_inputs, [
            ('dwi_files', 'dwi_files'),
            ('bval_files', 'bval_files'),
            ('bvec_files', 'bvec_files'),
            ('b0_indices', 'b0_indices'),
            ('b0_images', 'b0_images'),
            ('original_files', 'original_files')]),
        (inputnode, dwi_merge, [
            ('dwi_files', 'dwi_files'),
            ('bval_files', 'bval_files'),
            ('bvec_files', 'bvec_files'),
            ('original_files', 'bids_dwi_files')]),
        (gather_inputs, eddy, [
            ('eddy_indices', 'in_index'),
            ('eddy_acqp', 'in_acqp')]),
        (dwi_merge, eddy, [
            ('out_dwi', 'in_file'),
            ('out_bval', 'in_bval'),
            ('out_bvec', 'in_bvec')]),
        (gather_inputs, outputnode, [
            ('pre_topup_image', 'pre_sdc_template'),
            ('forward_warps', 'to_dwi_ref_warps'),
            ('forward_transforms', 'to_dwi_ref_affines')])
    ])

    # If a topupref is provided, use it for TOPUP
    rpe_b0 = None
    fieldmap_type = scan_groups['fieldmap_info']['type']
    if fieldmap_type == 'epi':
        rpe_b0 = scan_groups['fieldmap_info']['epi']
    elif fieldmap_type == 'rpe_series':
        rpe_b0 = scan_groups['fieldmap_info']['rpe_series'][0]

    if rpe_b0 is not None:
        outputnode.inputs.sdc_method = "TOPUP"
        gather_inputs.inputs.rpe_b0 = rpe_b0
        prepare_rpe_b0 = pe.Node(B0RPEFieldmap(b0_file=rpe_b0), name="prepare_rpe_b0")

        topup = pe.Node(fsl.TOPUP(), name="topup")
        unwarped_mean = pe.Node(afni.TStat(outputtype='NIFTI_GZ'), name='unwarped_mean')
        unwarped_enhance = init_enhance_and_skullstrip_dwi_wf(name='unwarped_enhance')

        workflow.connect([
            (prepare_rpe_b0, outputnode, [('fmap_info', 'inputnode.rpe_b0_info')]),
            (prepare_rpe_b0, gather_inputs, [('fmap_file', 'rpe_b0')]),
            (gather_inputs, topup, [
                ('topup_datain', 'encoding_file'),
                ('topup_imain', 'in_file'),
                ('topup_config', 'config')]),
            (topup, unwarped_mean, [('out_corrected', 'in_file')]),
            (unwarped_mean, unwarped_enhance, [('out_file', 'inputnode.in_file')]),
            (unwarped_enhance, outputnode, [
                ('outputnode.skull_stripped_file', 'b0_template')]),
            (unwarped_enhance, outputnode, [
                ('outputnode.mask_file', 'b0_template_mask')]),
            (unwarped_enhance, eddy, [
                ('outputnode.mask_file', 'in_mask')]),
            (topup, eddy, [
                ('out_movpar', 'in_topup_movpar'),
                ('out_fieldcoef', 'in_topup_fieldcoef')]),
        ])
    elif fieldmap_type in ('fieldmap', 'phasediff', 'phase'):
        outputnode.inputs.sdc_method = fieldmap_type
        b0_enhance = init_enhance_and_skullstrip_dwi_wf(name='b0_enhance')
        b0_sdc_wf = init_sdc_wf(
            scan_groups['fieldmap_info'], dwi_metadata, omp_nthreads=omp_nthreads,
            fmap_demean=fmap_demean, fmap_bspline=fmap_bspline)

        workflow.connect([
            (gather_inputs, b0_enhance, [('pre_topup_image', 'inputnode.in_file')]),
            (b0_enhance, b0_sdc_wf, [
                ('outputnode.bias_corrected_file', 'inputnode.b0_ref'),
                ('outputnode.skull_stripped_file', 'inputnode.b0_ref_brain'),
                ('outputnode.mask_file', 'inputnode.b0_mask')]),
            (inputnode, b0_sdc_wf, [
                ('t1_brain', 'inputnode.t1_brain'),
                ('t1_2_mni_reverse_transform',
                 'inputnode.t1_2_mni_reverse_transform')]),
            (b0_sdc_wf, outputnode, [
                ('outputnode.method', 'sdc_method'),
                ('outputnode.b0_ref', 'b0_template'),
                ('outputnode.b0_mask', 'b0_template_mask')]),
            (b0_sdc_wf, eddy, [
                ('outputnode.fieldmap_hz', 'field'),
                ('outputnode.b0_mask', 'in_mask')])

        ])
    else:
        outputnode.inputs.sdc_method = "None"
        b0_enhance = init_enhance_and_skullstrip_dwi_wf(name='b0_enhance')
        workflow.connect([
            (gather_inputs, b0_enhance, [('pre_topup_image', 'inputnode.in_file')]),
            (b0_enhance, outputnode, [
                ('outputnode.skull_stripped_file', 'b0_template')]),
            (b0_enhance, outputnode, [
                ('outputnode.mask_file', 'b0_template_mask')]),
            (b0_enhance, eddy, [
                ('outputnode.mask_file', 'in_mask')]),
        ])

    # Organize outputs for the rest of the pipeline
    split_eddy = pe.Node(SplitDWIs(b0_threshold=b0_threshold), name="split_eddy")
    workflow.connect([
        (eddy, split_eddy, [
            ('out_rotated_bvecs', 'bvec_file'),
            ('out_corrected', 'dwi_file')]),
        (dwi_merge, split_eddy, [('out_bval', 'bval_file')]),
        (dwi_merge, outputnode, [
            ('original_images', 'original_files')]),
        (split_eddy, outputnode, [
            ('dwi_files', 'dwi_files_to_transform'),
            ('bvec_files', 'bvec_files_to_transform')]),
        (eddy, outputnode, [
            (slice_quality, 'slice_quality'),
            ('out_cnr_maps', 'cnr_map'),
            (slice_quality, 'hmc_optimization_data')]),
        (eddy, spm_motion, [('out_parameter', 'eddy_motion')]),
        (spm_motion, outputnode, [('spm_motion_file', 'motion_params')])
    ])

    return workflow
コード例 #18
0
def init_falff_wf(workdir: str | Path,
                  feature=None,
                  fwhm=None,
                  memcalc=MemoryCalculator.default()):
    """
    Calculate Amplitude of low frequency oscillations(ALFF) and
    fractional ALFF maps

    Returns
    -------
    workflow : workflow object
        ALFF workflow

    Notes
    -----
    Adapted from
    <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/alff/alff.py>

    """
    if feature is not None:
        name = f"{format_workflow(feature.name)}"
    else:
        name = "falff"
    if fwhm is not None:
        name = f"{name}_{int(float(fwhm) * 1e3):d}"
    name = f"{name}_wf"
    workflow = pe.Workflow(name=name)

    # input
    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=["tags", "vals", "metadata", "bold", "mask", "fwhm"]),
        name="inputnode",
    )
    unfiltered_inputnode = pe.Node(
        niu.IdentityInterface(fields=["bold", "mask"]),
        name="unfiltered_inputnode",
    )
    outputnode = pe.Node(niu.IdentityInterface(fields=["resultdicts"]),
                         name="outputnode")

    if fwhm is not None:
        inputnode.inputs.fwhm = float(fwhm)
    elif feature is not None and hasattr(feature, "smoothing"):
        inputnode.inputs.fwhm = feature.smoothing.get("fwhm")

    #
    make_resultdicts = pe.Node(
        MakeResultdicts(tagkeys=["feature"],
                        imagekeys=["alff", "falff", "mask"]),
        name="make_resultdicts",
    )
    if feature is not None:
        make_resultdicts.inputs.feature = feature.name
    workflow.connect(inputnode, "tags", make_resultdicts, "tags")
    workflow.connect(inputnode, "vals", make_resultdicts, "vals")
    workflow.connect(inputnode, "metadata", make_resultdicts, "metadata")
    workflow.connect(inputnode, "mask", make_resultdicts, "mask")

    workflow.connect(make_resultdicts, "resultdicts", outputnode,
                     "resultdicts")

    #
    resultdict_datasink = pe.Node(ResultdictDatasink(base_directory=workdir),
                                  name="resultdict_datasink")
    workflow.connect(make_resultdicts, "resultdicts", resultdict_datasink,
                     "indicts")

    # standard deviation of the filtered image
    stddev_filtered = pe.Node(afni.TStat(),
                              name="stddev_filtered",
                              mem_gb=memcalc.series_std_gb)
    stddev_filtered.inputs.outputtype = "NIFTI_GZ"
    stddev_filtered.inputs.options = "-stdev"
    workflow.connect(inputnode, "bold", stddev_filtered, "in_file")
    workflow.connect(inputnode, "mask", stddev_filtered, "mask")

    # standard deviation of the unfiltered image
    stddev_unfiltered = pe.Node(afni.TStat(),
                                name="stddev_unfiltered",
                                mem_gb=memcalc.series_std_gb)
    stddev_unfiltered.inputs.outputtype = "NIFTI_GZ"
    stddev_unfiltered.inputs.options = "-stdev"
    workflow.connect(unfiltered_inputnode, "bold", stddev_unfiltered,
                     "in_file")
    workflow.connect(unfiltered_inputnode, "mask", stddev_unfiltered, "mask")

    falff = pe.Node(afni.Calc(), name="falff", mem_gb=memcalc.volume_std_gb)
    falff.inputs.args = "-float"
    falff.inputs.expr = "(1.0*bool(a))*((1.0*b)/(1.0*c))"
    falff.inputs.outputtype = "NIFTI_GZ"
    workflow.connect(inputnode, "mask", falff, "in_file_a")
    workflow.connect(stddev_filtered, "out_file", falff, "in_file_b")
    workflow.connect(stddev_unfiltered, "out_file", falff, "in_file_c")

    #
    merge = pe.Node(niu.Merge(2), name="merge")
    workflow.connect(stddev_filtered, "out_file", merge, "in1")
    workflow.connect(falff, "out_file", merge, "in2")

    smooth = pe.MapNode(LazyBlurToFWHM(outputtype="NIFTI_GZ"),
                        iterfield="in_file",
                        name="smooth")
    workflow.connect(merge, "out", smooth, "in_file")
    workflow.connect(inputnode, "mask", smooth, "mask")
    workflow.connect(inputnode, "fwhm", smooth, "fwhm")

    zscore = pe.MapNode(ZScore(),
                        iterfield="in_file",
                        name="zscore",
                        mem_gb=memcalc.volume_std_gb)
    workflow.connect(smooth, "out_file", zscore, "in_file")
    workflow.connect(inputnode, "mask", zscore, "mask")

    split = pe.Node(niu.Split(splits=[1, 1]), name="split")
    workflow.connect(zscore, "out_file", split, "inlist")

    workflow.connect(split, "out1", make_resultdicts, "alff")
    workflow.connect(split, "out2", make_resultdicts, "falff")

    return workflow
コード例 #19
0
	workflow2 = pe.Workflow(name=data + '_2')
	workflow2.base_dir = '.'

	inputnode2 = pe.Node(interface=util.IdentityInterface(fields=['drifter_result']), name='inputspec2')
	outputnode2 = pe.Node(interface=util.IdentityInterface(fields=['result_func']), name='outputspec2')

	inputnode2.inputs.drifter_result=results_path+'/' + data + '_1/drifter/drifter_corrected.nii.gz'


	# Call fslcpgeom source dest, source is reorient output nii.gz file and dest is drifter folder nii.gz file
	reoriented_file=results_path+'/' + data + '_1/reorient/corr_epi_reoriented.nii.gz' 
	drifted_file=results_path+'/' + data + '_1/drifter/drifter_corrected.nii.gz'
	call(["fslcpgeom", reoriented_file, drifted_file])

	# AFNI skullstrip and mean image skullstrip
	mean_epi = pe.Node(interface=afni.TStat(args='-mean',outputtype="NIFTI_GZ"), name='mean_epi')
	skullstrip3D = pe.Node(interface=afni.Automask(dilate=1,outputtype="NIFTI_GZ"), name='skullstrip3D')
	skullstrip4D = pe.Node(interface=afni.Calc(expr = 'a*b',outputtype="NIFTI_GZ"), name='skullstrip4D’)
	mean_epi_brain = pe.Node(interface=afni.TStat(args='-mean',outputtype="NIFTI_GZ"), name='mean_epi_brain')

	workflow2.connect(inputnode2, 'drifter_result', mean_epi,'in_file')
	workflow2.connect(mean_epi, 'out_file', skullstrip3D, 'in_file')
	workflow2.connect(skullstrip3D, 'out_file', skullstrip, 'in_file_b')
	workflow2.connect(inputnode2, 'drifter_result', skullstrip, 'in_file_a')
	workflow2.connect(skullstrip, 'out_file', mean_epi_brain, 'in_file')

	# Remove n (3) first volumes
	remove3vol = pe.Node(interface=remove3vol(begin_index=3), name='remove3vol')
	workflow2.connect(skullstrip, 'out_file', remove3vol, 'in_file')

	# Spatial smoothing, kernel sigma 2.00 mm (5 mm is too much)