예제 #1
0
def _func_to_template(func_coreg_filename,
                      template_filename,
                      write_dir,
                      func_to_anat_oned_filename,
                      anat_to_template_oned_filename,
                      anat_to_template_warp_filename,
                      voxel_size=None,
                      caching=False,
                      verbose=True):
    """ Applies successive transforms to coregistered functional to put it in
    template space.

    Parameters
    ----------
    coreg_func_filename : str
        Path to functional volume, coregistered to a common space with the
        anatomical volume.

    template_filename : str
        Template to register the functional to.

    func_to_anat_oned_filename : str
        Path to the affine 1D transform from functional to coregistration
        space.

    anat_to_template_oned_filename : str
        Path to the affine 1D transform from anatomical to template space.

    anat_to_template_warp_filename : str
        Path to the warp transform from anatomical to template space.

    voxel_size : 3-tuple of floats, optional
        Voxel size of the registered functional, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.
    """
    environ = {}
    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if caching:
        memory = Memory(write_dir)
        resample = memory.cache(afni.Resample)
        catmatvec = memory.cache(afni.CatMatvec)
        allineate = memory.cache(afni.Allineate)
        warp_apply = memory.cache(afni.NwarpApply)
        for step in [resample, allineate, warp_apply]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        environ['AFNI_DECONFLICT'] = 'OVERWRITE'

    current_dir = os.getcwd()
    os.chdir(write_dir)  # XXX to remove
    normalized_filename = fname_presuffix(func_coreg_filename,
                                          suffix='_normalized')
    if voxel_size is None:
        func_template_filename = template_filename
    else:
        out_resample = resample(in_file=template_filename,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ',
                                environ=environ)
        func_template_filename = out_resample.outputs.out_file

    if anat_to_template_warp_filename is None:
        affine_transform_filename = fname_presuffix(func_to_anat_oned_filename,
                                                    suffix='_to_template')
        _ = catmatvec(in_file=[(anat_to_template_oned_filename, 'ONELINE'),
                               (func_to_anat_oned_filename, 'ONELINE')],
                      oneline=True,
                      out_file=affine_transform_filename,
                      environ=environ)
        _ = allineate(in_file=func_coreg_filename,
                      master=func_template_filename,
                      in_matrix=affine_transform_filename,
                      out_file=normalized_filename,
                      environ=environ)
    else:
        warp = "'{0} {1} {2}'".format(anat_to_template_warp_filename,
                                      anat_to_template_oned_filename,
                                      func_to_anat_oned_filename)

        _ = warp_apply(in_file=func_coreg_filename,
                       master=func_template_filename,
                       warp=warp,
                       out_file=normalized_filename,
                       environ=environ)
    os.chdir(current_dir)
    return normalized_filename
예제 #2
0
def run_workflow(session=None, csv_file=None, undist=True):
    from nipype import config
    #config.enable_debug_mode()

    # ------------------ Specify variables
    ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

    data_dir = ds_root
    output_dir = 'derivatives/featpreproc/warp2nmt/highpassed_files'
    working_dir = 'workingdirs'

    # ------------------ Input Files
    infosource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'run_id',
        'refsubject_id',
    ]), name="infosource")

    if csv_file is not None:
      print('=== reading csv ===')
      # Read csv and use pandas to set-up image and ev-processing
      df = pd.read_csv(csv_file)
      # init lists
      sub_img=[]; ses_img=[]; run_img=[]; ref_img=[]
      
      # fill lists to iterate mapnodes
      for index, row in df.iterrows():
        for r in row.run.strip("[]").split(" "):
            sub_img.append(row.subject)
            ses_img.append(row.session)
            run_img.append(r)
            if 'refsubject' in df.columns:
                if row.refsubject == 'nan':
                    # empty field
                    ref_img.append(row.subject)
                else:
                    # non-empty field
                    ref_img.append(row.refsubject) 
            else:
                ref_img.append(row.subject)

      infosource.iterables = [
            ('subject_id', sub_img),
            ('session_id', ses_img),
            ('run_id', run_img),
            ('refsubject_id', ref_img),
        ]
      infosource.synchronize = True
    else:
      print("No csv-file specified. Don't know what data to process.")

    # use undistorted epi's if these are requested (need to be generated with undistort workflow)
    if undist:
        func_flag = 'preproc_undistort'
    else:
        func_flag = 'preproc'    
    
    # SelectFiles
    templates = {
        'image': 
        'derivatives/featpreproc/highpassed_files/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_' + func_flag + '_mc_smooth_mask_gms_tempfilt_maths.nii.gz',

        'imagewarp': 
        'reference-vols/sub-{refsubject_id}/transforms/'
        'sub-{subject_id}_func2nmt_WARP.nii.gz',

        'ref_image': 
        'reference-vols/sub-{refsubject_id}/transforms/'
        'sub-{subject_id}_func2nmt_res-1x1x1.nii.gz',
    }

    inputfiles = Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), 
                        name="input_files")


    # ------------------ Output Files
    # Datasink
    outputfiles = Node(nio.DataSink(
        base_directory=ds_root,
        container=output_dir,
        parameterization=True),
        name="output_files")

    # Use the following DataSink output substitutions
    outputfiles.inputs.substitutions = [
        ('refsubject_id_', 'ref-'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('_Nwarp.nii.gz', '_NMTv2.nii.gz'),
        # remove subdirectories:
        ('highpassed_files/reg_func', 'highpassed_files'),
    ]  
       
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1/func'),
        (r'_ref-([a-zA-Z0-9]+)_run_id_[0-9][0-9]', r''),
    ]


    # -------------------------------------------- Create Pipeline
    warp2nmt = Workflow(
        name='warp2nmt',
        base_dir=os.path.join(ds_root, working_dir))

    warp2nmt.connect([
        (infosource, inputfiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ('run_id', 'run_id'),
          ('refsubject_id', 'refsubject_id'),
          ])])
       
    nwarp = Node(afni.NwarpApply(out_file='%s_Nwarp.nii.gz'),name='nwarp')       
    warp2nmt.connect(inputfiles, 'image',
                        nwarp, 'in_file')
    warp2nmt.connect(inputfiles, 'imagewarp',
                        nwarp, 'warp')
    warp2nmt.connect(inputfiles, 'ref_image',
                        nwarp, 'master')
    warp2nmt.connect(nwarp, 'out_file',
                        outputfiles, 'reg_func')

    warp2nmt.stop_on_first_crash = False  # True
    warp2nmt.keep_inputs = True
    warp2nmt.remove_unnecessary_outputs = False
    warp2nmt.write_graph()
    warp2nmt.run()
예제 #3
0
def _apply_transforms(to_register_filename,
                      target_filename,
                      write_dir,
                      transforms,
                      transformed_filename=None,
                      transforms_kind='nonlinear',
                      interpolation=None,
                      voxel_size=None,
                      inverse=False,
                      caching=False,
                      verbose=True):
    """ Applies successive transforms to a given image to put it in
    template space.

    Parameters
    ----------
    to_register_filename : str
        Path to the source file to register.

    target_filename : str
        Reference file to register to.

    transforms : list
        List of transforms in order of 3dNWarpApply application: first must
        one must be in the target space and last one must be in
        the source space.

    transformed_filename : str, optional
        Path to the output registered file

    inverse : bool, optional
        If True, after the transforms composition is computed, invert it.
        If the input transforms would take a dataset from space A to B,
        then the inverted transform will do the reverse.

    interpolation : one of {'nearestneighbour', 'linear', 'cubic', 'quintic',
                            'wsinc5'} or None, optional
        Interpolation type. If None, AFNI defaults are used.

    voxel_size : 3-tuple of floats, optional
        Voxel size of the registered functional, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.
    """
    environ = {'AFNI_DECONFLICT': 'OVERWRITE'}
    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if caching:
        memory = Memory(write_dir)
        catmatvec = memory.cache(afni.CatMatvec)
        allineate = memory.cache(afni.Allineate)
        warp_apply = memory.cache(afni.NwarpApply)
        resample = memory.cache(afni.Resample)
        for step in [resample, allineate, warp_apply]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run

    if transformed_filename is None:
        target_basename = os.path.basename(target_filename)
        target_basename = os.path.splitext(target_basename)[0]
        target_basename = os.path.splitext(target_basename)[0]
        transformed_filename = fname_presuffix(to_register_filename,
                                               suffix='_to_' + target_basename,
                                               newpath=write_dir)

    if voxel_size is None:
        resampled_target_filename = target_filename
    else:
        out_resample = resample(in_file=target_filename,
                                voxel_size=voxel_size,
                                out_file=fname_presuffix(target_filename,
                                                         suffix='_resampled',
                                                         newpath=write_dir),
                                environ=environ)
        resampled_target_filename = out_resample.outputs.out_file
    if transforms_kind is not 'nonlinear':
        affine_transform_filename = fname_presuffix(transformed_filename,
                                                    suffix='.aff12.1D',
                                                    use_ext=False)
        out_catmatvec = catmatvec(in_file=[(transform, 'ONELINE')
                                           for transform in transforms],
                                  oneline=True,
                                  out_file=affine_transform_filename,
                                  environ=environ)
        if inverse:
            affine_transform_filename = fname_presuffix(transformed_filename,
                                                        suffix='_INV.aff12.1D',
                                                        use_ext=False)
            _ = catmatvec(in_file=[(out_catmatvec.outputs.out_file, 'I')],
                          oneline=True,
                          out_file=affine_transform_filename,
                          environ=environ)
        if interpolation is None:
            _ = allineate(in_file=to_register_filename,
                          master=resampled_target_filename,
                          in_matrix=affine_transform_filename,
                          out_file=transformed_filename,
                          environ=environ)
        else:
            _ = allineate(in_file=to_register_filename,
                          master=resampled_target_filename,
                          in_matrix=affine_transform_filename,
                          final_interpolation=interpolation,
                          out_file=transformed_filename,
                          environ=environ)
    else:
        warp = "'"
        warp += " ".join(transforms)
        warp += "'"
        if interpolation is None:
            _ = warp_apply(in_file=to_register_filename,
                           master=resampled_target_filename,
                           warp=warp,
                           inv_warp=inverse,
                           out_file=transformed_filename,
                           environ=environ)
        else:
            _ = warp_apply(in_file=to_register_filename,
                           master=resampled_target_filename,
                           warp=warp,
                           inv_warp=inverse,
                           interp=interpolation,
                           out_file=transformed_filename,
                           environ=environ)

    # XXX obliquity information is lost if resampling is done
    transformed_filename = fix_obliquity(transformed_filename,
                                         resampled_target_filename,
                                         verbose=verbose,
                                         caching=caching,
                                         caching_dir=write_dir,
                                         environ=environ)
    return transformed_filename
예제 #4
0
def coregister_fmri_session(session_data,
                            t_r,
                            write_dir,
                            brain_volume,
                            use_rats_tool=True,
                            slice_timing=True,
                            prior_rigid_body_registration=False,
                            caching=False,
                            voxel_size_x=.1,
                            voxel_size_y=.1,
                            verbose=True,
                            **environ_kwargs):
    """
    Coregistration of the subject's functional and anatomical images.
    The functional volume is aligned to the anatomical, first with a rigid body
    registration and then on a per-slice basis (only a fine correction, this is
    mostly for correction of EPI distortion).


    Parameters
    ----------
    session_data : sammba.registration.SessionData
        Single animal data, giving paths to its functional and anatomical
        image, as well as it identifier.

    t_r : float
        Repetition time for the EPI, in seconds.

    write_dir : str
        Directory to save the output and temporary images.

    brain_volume : int
        Volume of the brain in mm3 used for brain extraction.
        Typically 400 for mouse and 1800 for rat.

    use_rats_tool : bool, optional
        If True, brain mask is computed using RATS Mathematical Morphology.
        Otherwise, a histogram-based brain segmentation is used.

    prior_rigid_body_registration : bool, optional
        If True, a rigid-body registration of the anat to the func is performed
        prior to the warp. Useful if the images headers have missing/wrong
        information.

    voxel_size_x : float, optional
        Resampling resolution for the x-axis, in mm.

    voxel_size_y : float, optional
        Resampling resolution for the y-axis, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.

    environ_kwargs : extra arguments keywords
        Extra arguments keywords, passed to interfaces environ variable.

    Returns
    -------
    the same sequence with each animal_data updated: the following attributes
    are added
        - `output_dir_` : str
                          Path to the output directory.
        - `coreg_func_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_anat_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_transform_` : str
                               Path to the transform from anat to func.

    Notes
    -----
    If `use_rats_tool` is turned on, RATS tool is used for brain extraction
    and has to be cited. For more information, see
    `RATS <http://www.iibi.uiowa.edu/content/rats-overview/>`_
    """
    func_filename = session_data.func
    anat_filename = session_data.anat

    environ = {'AFNI_DECONFLICT': 'OVERWRITE'}
    for (key, value) in environ_kwargs.items():
        environ[key] = value

    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if use_rats_tool:
        if segmentation.interfaces.Info().version() is None:
            raise ValueError('Can not locate RATS')
        else:
            ComputeMask = segmentation.MathMorphoMask
    else:
        ComputeMask = segmentation.HistogramMask

    if ants.base.Info().version is None:
        raise ValueError('Can not locate ANTS')

    if caching:
        memory = Memory(write_dir)
        tshift = memory.cache(afni.TShift)
        clip_level = memory.cache(afni.ClipLevel)
        volreg = memory.cache(afni.Volreg)
        allineate = memory.cache(afni.Allineate)
        tstat = memory.cache(afni.TStat)
        compute_mask = memory.cache(ComputeMask)
        calc = memory.cache(afni.Calc)
        allineate = memory.cache(afni.Allineate)
        allineate2 = memory.cache(afni.Allineate)
        unifize = memory.cache(afni.Unifize)
        bias_correct = memory.cache(ants.N4BiasFieldCorrection)
        catmatvec = memory.cache(afni.CatMatvec)
        warp = memory.cache(afni.Warp)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(afni.ZCutUp)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(afni.Zcat)
        copy_geom = memory.cache(fsl.CopyGeom)
        overwrite = False
        for step in [
                tshift, volreg, allineate, allineate2, tstat, compute_mask,
                calc, unifize, resample, slicer, warp_apply, qwarp, merge
        ]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        tshift = afni.TShift(terminal_output=terminal_output).run
        clip_level = afni.ClipLevel().run
        volreg = afni.Volreg(terminal_output=terminal_output).run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        allineate2 = afni.Allineate(terminal_output=terminal_output
                                    ).run  # TODO: remove after fixed bug
        tstat = afni.TStat(terminal_output=terminal_output).run
        compute_mask = ComputeMask().run
        calc = afni.Calc(terminal_output=terminal_output).run
        unifize = afni.Unifize(terminal_output=terminal_output).run
        bias_correct = ants.N4BiasFieldCorrection(
            terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        warp = afni.Warp().run
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = afni.ZCutUp(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = afni.Zcat(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        overwrite = True

    session_data._check_inputs()
    output_dir = os.path.join(os.path.abspath(write_dir),
                              session_data.animal_id)
    session_data._set_output_dir_(output_dir)
    current_dir = os.getcwd()
    os.chdir(output_dir)
    output_files = []

    #######################################
    # Correct functional for slice timing #
    #######################################
    if slice_timing:
        out_tshift = tshift(in_file=func_filename,
                            outputtype='NIFTI_GZ',
                            tpattern='altplus',
                            tr=str(t_r),
                            environ=environ)
        func_filename = out_tshift.outputs.out_file
        output_files.append(func_filename)

    ################################################
    # Register functional volumes to the first one #
    ################################################
    # XXX why do you need a thresholded image ?
    out_clip_level = clip_level(in_file=func_filename)
    out_calc_threshold = calc(in_file_a=func_filename,
                              expr='ispositive(a-{0}) * a'.format(
                                  out_clip_level.outputs.clip_val),
                              outputtype='NIFTI_GZ')
    thresholded_filename = out_calc_threshold.outputs.out_file

    out_volreg = volreg(  # XXX dfile not saved
        in_file=thresholded_filename,
        outputtype='NIFTI_GZ',
        environ=environ,
        oned_file=fname_presuffix(thresholded_filename,
                                  suffix='Vr.1Dfile.1D',
                                  use_ext=False),
        oned_matrix_save=fname_presuffix(thresholded_filename,
                                         suffix='Vr.aff12.1D',
                                         use_ext=False))

    # Apply the registration to the whole head
    out_allineate = allineate(in_file=func_filename,
                              master=func_filename,
                              in_matrix=out_volreg.outputs.oned_matrix_save,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='Av'),
                              environ=environ)

    # 3dAllineate removes the obliquity. This is not a good way to readd it as
    # removes motion correction info in the header if it were an AFNI file...as
    # it happens it's NIfTI which does not store that so irrelevant!
    out_copy_geom = copy_geom(dest_file=out_allineate.outputs.out_file,
                              in_file=out_volreg.outputs.out_file)

    allineated_filename = out_copy_geom.outputs.out_file

    # Create a (hopefully) nice mean image for use in the registration
    out_tstat = tstat(in_file=allineated_filename,
                      args='-mean',
                      outputtype='NIFTI_GZ',
                      environ=environ)

    # Update outputs
    output_files.extend([
        thresholded_filename, out_volreg.outputs.oned_matrix_save,
        out_volreg.outputs.out_file, out_volreg.outputs.md1d_file,
        allineated_filename, out_tstat.outputs.out_file
    ])

    ###########################################
    # Corret anat and func for intensity bias #
    ###########################################
    # Correct the functional average for intensities bias
    out_bias_correct = bias_correct(input_image=out_tstat.outputs.out_file)
    unbiased_func_filename = out_bias_correct.outputs.output_image

    # Bias correct the antomical image
    out_unifize = unifize(in_file=anat_filename,
                          outputtype='NIFTI_GZ',
                          environ=environ)
    unbiased_anat_filename = out_unifize.outputs.out_file

    # Update outputs
    output_files.extend([unbiased_func_filename, unbiased_anat_filename])

    #############################################
    # Rigid-body registration anat -> mean func #
    #############################################
    if prior_rigid_body_registration:
        # Mask the mean functional volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_func_filename)
        out_compute_mask_func = compute_mask(
            in_file=unbiased_func_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_func = calc(in_file_a=unbiased_func_filename,
                             in_file_b=out_compute_mask_func.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Mask the anatomical volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_anat_filename)
        out_compute_mask_anat = compute_mask(
            in_file=unbiased_anat_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_anat = calc(in_file_a=unbiased_anat_filename,
                             in_file_b=out_compute_mask_anat.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Compute the transformation from functional to anatomical brain
        # XXX: why in this sense
        out_allineate = allineate2(
            in_file=out_cacl_func.outputs.out_file,
            reference=out_cacl_anat.outputs.out_file,
            out_matrix=fname_presuffix(out_cacl_func.outputs.out_file,
                                       suffix='_shr.aff12.1D',
                                       use_ext=False),
            center_of_mass='',
            warp_type='shift_rotate',
            out_file=fname_presuffix(out_cacl_func.outputs.out_file,
                                     suffix='_shr'),
            environ=environ)
        rigid_transform_file = out_allineate.outputs.out_matrix
        output_files.extend([
            out_compute_mask_func.outputs.out_file,
            out_cacl_func.outputs.out_file,
            out_compute_mask_anat.outputs.out_file,
            out_cacl_anat.outputs.out_file, rigid_transform_file,
            out_allineate.outputs.out_file
        ])

        # apply the inverse transform to register the anatomical to the func
        catmatvec_out_file = fname_presuffix(rigid_transform_file,
                                             suffix='INV')
        out_catmatvec = catmatvec(in_file=[(rigid_transform_file, 'I')],
                                  oneline=True,
                                  out_file=catmatvec_out_file)
        output_files.append(out_catmatvec.outputs.out_file)
        out_allineate = allineate(in_file=unbiased_anat_filename,
                                  master=unbiased_func_filename,
                                  in_matrix=out_catmatvec.outputs.out_file,
                                  out_file=fname_presuffix(
                                      unbiased_anat_filename,
                                      suffix='_shr_in_func_space'),
                                  environ=environ)
        allineated_anat_filename = out_allineate.outputs.out_file
        output_files.append(allineated_anat_filename)
    else:
        allineated_anat_filename = unbiased_anat_filename

    ############################################
    # Nonlinear registration anat -> mean func #
    ############################################
    # 3dWarp doesn't put the obliquity in the header, so do it manually
    # This step generates one file per slice and per time point, so we are
    # making sure they are removed at the end
    out_warp = warp(in_file=allineated_anat_filename,
                    oblique_parent=unbiased_func_filename,
                    interp='quintic',
                    gridset=unbiased_func_filename,
                    outputtype='NIFTI_GZ',
                    verbose=True,
                    environ=environ)
    registered_anat_filename = out_warp.outputs.out_file
    registered_anat_oblique_filename = fix_obliquity(registered_anat_filename,
                                                     unbiased_func_filename,
                                                     verbose=verbose)

    # Concatenate all the anat to func tranforms
    mat_filename = fname_presuffix(registered_anat_filename,
                                   suffix='_warp.mat',
                                   use_ext=False)
    # XXX Handle this correctly according to caching
    if not os.path.isfile(mat_filename):
        np.savetxt(mat_filename, [out_warp.runtime.stdout], fmt='%s')
        output_files.append(mat_filename)

    transform_filename = fname_presuffix(registered_anat_filename,
                                         suffix='_anat_to_func.aff12.1D',
                                         use_ext=False)
    if prior_rigid_body_registration:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE'),
                               (rigid_transform_file, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)
    else:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)

    ##################################################
    # Per-slice non-linear registration func -> anat #
    ##################################################
    # Slice anatomical image
    anat_img = nibabel.load(registered_anat_oblique_filename)
    anat_n_slices = anat_img.header.get_data_shape()[2]
    sliced_registered_anat_filenames = []
    for slice_n in range(anat_n_slices):
        out_slicer = slicer(in_file=registered_anat_oblique_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(
                                registered_anat_oblique_filename,
                                suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      registered_anat_oblique_filename,
                                      verbose=verbose)
        sliced_registered_anat_filenames.append(oblique_slice)

    # Slice mean functional
    sliced_bias_corrected_filenames = []
    img = nibabel.load(func_filename)
    n_slices = img.header.get_data_shape()[2]
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=unbiased_func_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(unbiased_func_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      unbiased_func_filename,
                                      verbose=verbose)
        sliced_bias_corrected_filenames.append(oblique_slice)

    # Below line is to deal with slices where there is no signal (for example
    # rostral end of some anatomicals)

    # The inverse warp frequently fails, Resampling can help it work better
    # XXX why specifically .1 in voxel_size ?
    voxel_size_z = anat_img.header.get_zooms()[2]
    resampled_registered_anat_filenames = []
    for sliced_registered_anat_filename in sliced_registered_anat_filenames:
        out_resample = resample(in_file=sliced_registered_anat_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_registered_anat_filenames.append(
            out_resample.outputs.out_file)

    resampled_bias_corrected_filenames = []
    for sliced_bias_corrected_filename in sliced_bias_corrected_filenames:
        out_resample = resample(in_file=sliced_bias_corrected_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_bias_corrected_filenames.append(
            out_resample.outputs.out_file)

    # single slice non-linear functional to anatomical registration
    warped_slices = []
    warp_filenames = []
    for (resampled_bias_corrected_filename,
         resampled_registered_anat_filename) in zip(
             resampled_bias_corrected_filenames,
             resampled_registered_anat_filenames):
        warped_slice = fname_presuffix(resampled_bias_corrected_filename,
                                       suffix='_qw')
        out_qwarp = qwarp(
            in_file=resampled_bias_corrected_filename,
            base_file=resampled_registered_anat_filename,
            iwarp=True,  # XXX: is this necessary
            noneg=True,
            blur=[0],
            nmi=True,
            noXdis=True,
            allineate=True,
            allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 '
            '-parfix 4 0 -parfix 5 0 -parfix 6 0 '
            '-parfix 7 0 -parfix 9 0 '
            '-parfix 10 0 -parfix 12 0',
            out_file=warped_slice,
            environ=environ)
        warped_slices.append(out_qwarp.outputs.warped_source)
        warp_filenames.append(out_qwarp.outputs.source_warp)
        output_files.append(out_qwarp.outputs.base_warp)
        # There are files geenrated by the allineate option
        output_files.extend([
            fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin'),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.nii',
                            use_ext=False),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.aff12.1D',
                            use_ext=False)
        ])

    # Resample the mean volume back to the initial resolution,
    voxel_size = nibabel.load(unbiased_func_filename).header.get_zooms()
    resampled_warped_slices = []
    for warped_slice in warped_slices:
        out_resample = resample(in_file=warped_slice,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_warped_slices.append(out_resample.outputs.out_file)

    # fix the obliquity
    resampled_warped_slices_oblique = []
    for (sliced_registered_anat_filename,
         resampled_warped_slice) in zip(sliced_registered_anat_filenames,
                                        resampled_warped_slices):
        oblique_slice = fix_obliquity(resampled_warped_slice,
                                      sliced_registered_anat_filename,
                                      verbose=verbose)
        resampled_warped_slices_oblique.append(oblique_slice)

    # slice functional
    sliced_func_filenames = []
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=allineated_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(allineated_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      allineated_filename,
                                      verbose=verbose)
        sliced_func_filenames.append(oblique_slice)

    # Apply the precomputed warp slice by slice
    warped_func_slices = []
    for (sliced_func_filename, warp_filename) in zip(sliced_func_filenames,
                                                     warp_filenames):
        out_warp_apply = warp_apply(in_file=sliced_func_filename,
                                    master=sliced_func_filename,
                                    warp=warp_filename,
                                    out_file=fname_presuffix(
                                        sliced_func_filename, suffix='_qw'),
                                    environ=environ)
        warped_func_slices.append(out_warp_apply.outputs.out_file)

    # Finally, merge all slices !
    out_merge_func = merge(in_files=warped_func_slices,
                           outputtype='NIFTI_GZ',
                           environ=environ)

    # Fix the obliquity
    merged_oblique = fix_obliquity(out_merge_func.outputs.out_file,
                                   allineated_filename,
                                   verbose=verbose)

    # Update the fmri data
    setattr(session_data, "coreg_func_", merged_oblique)
    setattr(session_data, "coreg_anat_", registered_anat_oblique_filename)
    setattr(session_data, "coreg_transform_", transform_filename)
    os.chdir(current_dir)

    # Collect the outputs
    output_files.extend(sliced_registered_anat_filenames +
                        sliced_bias_corrected_filenames +
                        resampled_registered_anat_filenames +
                        resampled_bias_corrected_filenames + warped_slices +
                        warp_filenames + resampled_warped_slices_oblique +
                        sliced_func_filenames + warped_func_slices)
    if not caching:
        for out_file in output_files:
            if os.path.isfile(out_file):
                os.remove(out_file)
예제 #5
0
def _transform_to_template(to_register_filename,
                           template_filename,
                           write_dir,
                           func_to_anat_oned_filename,
                           anat_to_template_oned_filename,
                           anat_to_template_warp_filename,
                           voxel_size=None,
                           caching=False,
                           verbose=True):
    """ Applies successive transforms to a given image to put it in
    template space.

    Parameters
    ----------
    to_register_filename : str
        Path to functional volume, coregistered to a common space with the
        anatomical volume.

    template_filename : str
        Template to register the functional to.

    func_to_anat_oned_filename : str
        Coregistration transform.

    anat_to_template_oned_filename : str
        Path to the affine 1D transform from anatomical to template space.

    anat_to_template_warp_filename : str
        Path to the warp transform from anatomical to template space.

    voxel_size : 3-tuple of floats, optional
        Voxel size of the registered functional, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.
    """
    environ = {}
    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if caching:
        memory = Memory(write_dir)
        warp_apply = memory.cache(afni.NwarpApply)
        resample = memory.cache(afni.Resample)
        warp_apply.interface().set_default_terminal_output(terminal_output)
        resample.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        environ['AFNI_DECONFLICT'] = 'OVERWRITE'

    current_dir = os.getcwd()
    os.chdir(write_dir)
    normalized_filename = fname_presuffix(to_register_filename,
                                          suffix='_normalized')

    if voxel_size is None:
        resampled_template_filename = template_filename
    else:
        out_resample = resample(in_file=template_filename,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ')
        resampled_template_filename = out_resample.outputs.out_file

    transforms = [
        anat_to_template_warp_filename, anat_to_template_oned_filename,
        func_to_anat_oned_filename
    ]
    warp = "'"
    warp += " ".join(transforms)
    warp += "'"
    _ = warp_apply(in_file=to_register_filename,
                   master=resampled_template_filename,
                   warp=warp,
                   out_file=normalized_filename)
    os.chdir(current_dir)
    return normalized_filename
예제 #6
0
def _apply_perslice_warp(apply_to_file,
                         warp_files,
                         voxel_size_x,
                         voxel_size_y,
                         write_dir=None,
                         caching=False,
                         verbose=True,
                         terminal_output='allatonce',
                         environ=None):

    # Apply the precomputed warp slice by slice
    if write_dir is None:
        write_dir = os.path.dirname(apply_to_file),

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(fsl.Slice)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(fsl.Merge)
        for step in [resample, slicer, warp_apply, qwarp, merge]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = fsl.Slice(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = fsl.Merge(terminal_output=terminal_output).run

    apply_to_img = nibabel.load(apply_to_file)
    n_slices = apply_to_img.header.get_data_shape()[2]

    if len(warp_files) != n_slices:
        raise ValueError('number of warp files {0} does not match number of '
                         'slices {1}'.format(len(warp_files), n_slices))
    # Slice anatomical image
    output_files = []
    per_slice_dir = os.path.join(write_dir, 'per_slice')
    if not os.path.isdir(per_slice_dir):
        os.makedirs(per_slice_dir)

    # slice functional
    sliced_apply_to_files = []
    out_slicer = slicer(in_file=apply_to_file,
                        out_base_name=fname_presuffix(apply_to_file,
                                                      newpath=per_slice_dir,
                                                      use_ext=False))
    sliced_apply_to_files = _get_fsl_slice_output_files(
        out_slicer.inputs['out_base_name'], out_slicer.inputs['output_type'])

    warped_apply_to_slices = []
    sliced_apply_to_files_to_remove = []
    for (sliced_apply_to_file, warp_file) in zip(sliced_apply_to_files,
                                                 warp_files):
        if warp_file is None:
            warped_apply_to_slices.append(sliced_apply_to_file)
        else:
            sliced_apply_to_files_to_remove.append(sliced_apply_to_file)
            out_warp_apply = warp_apply(in_file=sliced_apply_to_file,
                                        master=sliced_apply_to_file,
                                        warp=warp_file,
                                        out_file=fname_presuffix(
                                            sliced_apply_to_file,
                                            suffix='_qwarped'),
                                        environ=environ)
            warped_apply_to_slices.append(out_warp_apply.outputs.out_file)

    # Fix the obliquity
    oblique_warped_apply_to_slices = []
    for (sliced_apply_to_file,
         warped_apply_to_slice) in zip(sliced_apply_to_files,
                                       warped_apply_to_slices):
        oblique_slice = fix_obliquity(warped_apply_to_slice,
                                      sliced_apply_to_file,
                                      verbose=verbose,
                                      caching=caching,
                                      caching_dir=per_slice_dir,
                                      environ=environ)
        oblique_warped_apply_to_slices.append(oblique_slice)

    # Finally, merge all slices !
    out_merge_apply_to = merge(in_files=oblique_warped_apply_to_slices,
                               dimension='z',
                               merged_file=fname_presuffix(apply_to_file,
                                                           suffix='_perslice',
                                                           newpath=write_dir),
                               environ=environ)

    # Fix the obliquity
    merged_apply_to_file = fix_obliquity(
        out_merge_apply_to.outputs.merged_file,
        apply_to_file,
        verbose=verbose,
        caching=caching,
        caching_dir=per_slice_dir,
        environ=environ)

    # Update the outputs
    output_files.extend(sliced_apply_to_files_to_remove +
                        oblique_warped_apply_to_slices)

    if not caching:
        for out_file in output_files:
            os.remove(out_file)

    return merged_apply_to_file
예제 #7
0
def _per_slice_qwarp(to_qwarp_file,
                     reference_file,
                     voxel_size_x,
                     voxel_size_y,
                     apply_to_file=None,
                     write_dir=None,
                     caching=False,
                     verbose=True,
                     terminal_output='allatonce',
                     environ=None):
    if write_dir is None:
        write_dir = os.path.dirname(to_qwarp_file),

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(fsl.Slice)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(fsl.Merge)
        for step in [resample, slicer, warp_apply, qwarp, merge]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = fsl.Slice(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = fsl.Merge(terminal_output=terminal_output).run

    # Slice anatomical image
    reference_img = nibabel.load(reference_file)
    per_slice_dir = os.path.join(write_dir, 'per_slice')
    if not os.path.isdir(per_slice_dir):
        os.makedirs(per_slice_dir)

    out_slicer = slicer(in_file=reference_file,
                        out_base_name=fname_presuffix(reference_file,
                                                      newpath=per_slice_dir,
                                                      use_ext=False))
    # XXX: workaround for nipype globbing to find slicer outputs
    # Use out_slicer.outputs.out_files once fixed
    sliced_reference_files = _get_fsl_slice_output_files(
        out_slicer.inputs['out_base_name'], out_slicer.inputs['output_type'])

    # Slice mean functional
    out_slicer = slicer(in_file=to_qwarp_file,
                        out_base_name=fname_presuffix(to_qwarp_file,
                                                      newpath=per_slice_dir,
                                                      use_ext=False))
    sliced_to_qwarp_files = _get_fsl_slice_output_files(
        out_slicer.inputs['out_base_name'], out_slicer.inputs['output_type'])

    # Below line is to deal with slices where there is no signal (for example
    # rostral end of some anatomicals)

    # The inverse warp frequently fails, Resampling can help it work better
    # XXX why specifically .1 in voxel_size ?
    voxel_size_z = reference_img.header.get_zooms()[2]
    resampled_sliced_reference_files = []
    for sliced_reference_file in sliced_reference_files:
        out_resample = resample(in_file=sliced_reference_file,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                out_file=fname_presuffix(sliced_reference_file,
                                                         suffix='_resampled'),
                                environ=environ)
        resampled_sliced_reference_files.append(out_resample.outputs.out_file)

    resampled_sliced_to_qwarp_files = []
    for sliced_to_qwarp_file in sliced_to_qwarp_files:
        out_resample = resample(in_file=sliced_to_qwarp_file,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                out_file=fname_presuffix(sliced_to_qwarp_file,
                                                         suffix='_resampled'),
                                environ=environ)
        resampled_sliced_to_qwarp_files.append(out_resample.outputs.out_file)

    # single slice non-linear functional to anatomical registration
    warped_slices = []
    warp_files = []
    output_files = []
    resampled_sliced_to_qwarp_files_to_remove = []
    for (resampled_sliced_to_qwarp_file,
         resampled_sliced_reference_file) in zip(
             resampled_sliced_to_qwarp_files,
             resampled_sliced_reference_files):
        warped_slice = fname_presuffix(resampled_sliced_to_qwarp_file,
                                       suffix='_qwarped')
        to_qwarp_data = nibabel.load(resampled_sliced_to_qwarp_file).get_data()
        ref_data = nibabel.load(resampled_sliced_reference_file).get_data()

        if to_qwarp_data.max() == 0 or ref_data.max() == 0:
            # deal with slices where there is no signal
            warped_slices.append(resampled_sliced_to_qwarp_file)
            warp_files.append(None)
        else:
            resampled_sliced_to_qwarp_files_to_remove.append(
                resampled_sliced_to_qwarp_file)
            out_qwarp = qwarp(
                in_file=resampled_sliced_to_qwarp_file,
                base_file=resampled_sliced_reference_file,
                noneg=True,
                blur=[0],
                nmi=True,
                noXdis=True,
                allineate=True,
                allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 '
                '-parfix 4 0 -parfix 5 0 -parfix 6 0 '
                '-parfix 7 0 -parfix 9 0 '
                '-parfix 10 0 -parfix 12 0',
                out_file=warped_slice,
                environ=environ,
                verb=verbose)
            # XXX fix qwarp bug : out_qwarp.outputs.warped_source extension is
            # +tlrc.HEAD if base_file and in_file are of different extensions
            warped_slices.append(warped_slice)
            warp_files.append(out_qwarp.outputs.source_warp)
            # There are files geenrated by the allineate option
            output_files.extend([
                fname_presuffix(out_qwarp.outputs.warped_source,
                                suffix='_Allin.nii',
                                use_ext=False),
                fname_presuffix(out_qwarp.outputs.warped_source,
                                suffix='_Allin.aff12.1D',
                                use_ext=False)
            ])

    # Resample the mean volume back to the initial resolution,
    voxel_size = nibabel.load(to_qwarp_file).header.get_zooms()[:3]
    resampled_warped_slices = []
    for warped_slice in warped_slices:
        out_resample = resample(in_file=warped_slice,
                                voxel_size=voxel_size,
                                out_file=fname_presuffix(warped_slice,
                                                         suffix='_resampled'),
                                environ=environ)
        resampled_warped_slices.append(out_resample.outputs.out_file)

    # fix the obliquity
    oblique_resampled_warped_slices = []
    for (sliced_reference_file,
         resampled_warped_slice) in zip(sliced_reference_files,
                                        resampled_warped_slices):
        oblique_slice = fix_obliquity(resampled_warped_slice,
                                      sliced_reference_file,
                                      verbose=verbose,
                                      caching=caching,
                                      caching_dir=per_slice_dir,
                                      environ=environ)
        oblique_resampled_warped_slices.append(oblique_slice)

    out_merge_func = merge(in_files=oblique_resampled_warped_slices,
                           dimension='z',
                           merged_file=fname_presuffix(to_qwarp_file,
                                                       suffix='_perslice',
                                                       newpath=write_dir),
                           environ=environ)

    # Fix the obliquity
    oblique_merged = fix_obliquity(out_merge_func.outputs.merged_file,
                                   reference_file,
                                   verbose=verbose,
                                   caching=caching,
                                   caching_dir=per_slice_dir,
                                   environ=environ)

    # Collect the outputs
    output_files.extend(sliced_reference_files + sliced_to_qwarp_files +
                        resampled_sliced_reference_files +
                        resampled_sliced_to_qwarp_files_to_remove +
                        warped_slices + oblique_resampled_warped_slices)

    # Apply the precomputed warp slice by slice
    if apply_to_file is not None:
        # slice functional
        out_slicer = slicer(in_file=apply_to_file,
                            out_base_name=fname_presuffix(
                                apply_to_file,
                                newpath=per_slice_dir,
                                use_ext=False))
        sliced_apply_to_files = _get_fsl_slice_output_files(
            out_slicer.inputs['out_base_name'],
            out_slicer.inputs['output_type'])
        warped_apply_to_slices = []
        sliced_apply_to_files_to_remove = []
        for (sliced_apply_to_file, warp_file) in zip(sliced_apply_to_files,
                                                     warp_files):
            if warp_file is None:
                warped_apply_to_slices.append(sliced_apply_to_file)
            else:
                sliced_apply_to_files_to_remove.append(sliced_apply_to_file)
                out_warp_apply = warp_apply(in_file=sliced_apply_to_file,
                                            master=sliced_apply_to_file,
                                            warp=warp_file,
                                            out_file=fname_presuffix(
                                                sliced_apply_to_file,
                                                suffix='_qwarped'),
                                            environ=environ)
                warped_apply_to_slices.append(out_warp_apply.outputs.out_file)

        # Fix the obliquity
        oblique_warped_apply_to_slices = []
        for (sliced_apply_to_file,
             warped_apply_to_slice) in zip(sliced_apply_to_files,
                                           warped_apply_to_slices):
            oblique_slice = fix_obliquity(warped_apply_to_slice,
                                          sliced_apply_to_file,
                                          verbose=verbose,
                                          caching=caching,
                                          caching_dir=per_slice_dir,
                                          environ=environ)
            oblique_warped_apply_to_slices.append(oblique_slice)

        # Finally, merge all slices !
        out_merge_apply_to = merge(in_files=oblique_warped_apply_to_slices,
                                   dimension='z',
                                   merged_file=fname_presuffix(
                                       apply_to_file,
                                       suffix='_perslice',
                                       newpath=write_dir),
                                   environ=environ)

        # Fix the obliquity
        merged_apply_to_file = fix_obliquity(
            out_merge_apply_to.outputs.merged_file,
            apply_to_file,
            verbose=verbose,
            caching=caching,
            caching_dir=per_slice_dir,
            environ=environ)

        # Update the outputs
        output_files.extend(sliced_apply_to_files_to_remove +
                            oblique_warped_apply_to_slices)
    else:
        merged_apply_to_file = None

    if not caching:
        for out_file in output_files:
            os.remove(out_file)

    return (oblique_merged, warp_files, merged_apply_to_file)
def run_workflows(session=None, csv_file=None):
    from nipype import config
    #config.enable_debug_mode()

    # ------------------ Specify variables
    ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

    data_dir = ds_root
    output_dir = 'derivatives/undistort'
    working_dir = 'workingdirs'

    # ------------------ Input Files
    infosource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'run_id',
        'refsubject_id',
    ]), name="infosource")

    if csv_file is not None:
      print('=== reading csv ===')
      # Read csv and use pandas to set-up image and ev-processing
      df = pd.read_csv(csv_file)
      # init lists
      sub_img=[]; ses_img=[]; run_img=[]; ref_img=[]
      
      # fill lists to iterate mapnodes
      for index, row in df.iterrows():
        for r in row.run.strip("[]").split(" "):
            sub_img.append(row.subject)
            ses_img.append(row.session)
            run_img.append(r)
            if 'refsubject' in df.columns:
                if row.refsubject == 'nan':
                    # empty field
                    ref_img.append(row.subject)
                else:
                    # non-empty field
                    ref_img.append(row.refsubject) 
            else:
                ref_img.append(row.subject)

      infosource.iterables = [
            ('subject_id', sub_img),
            ('session_id', ses_img),
            ('run_id', run_img),
            ('refsubject_id', ref_img),
        ]
      infosource.synchronize = True
    else:
      print("No csv-file specified. Don't know what data to process.")


    # SelectFiles
    templates = {
        'image': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc.nii.gz',
        'image_invPE': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/fmap/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_epi_res-1x1x1_preproc.nii.gz',
    }
    
    inputfiles = Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), 
                        name="input_files")

    # Datasink
    outfiles = Node(nio.DataSink(
        base_directory=ds_root,
        container=output_dir,
        parameterization=True),
        name="outfiles")

    # Use the following DataSink output substitutions
    outfiles.inputs.substitutions = [
        ('refsubject_id_', 'ref-'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('resampled-isotropic-1mm','undistort'),
        ('undistort/ud_func', 'undistort'),
    ]  
       
    outfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1/func'),
        (r'_ref-([a-zA-Z0-9]+)_run_id_[0-9][0-9]', r''),
    ]
    
    templates_mv = {
        'ud_minus': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_MINUS.nii.gz',
        'ud_minus_warp': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_MINUS_WARP.nii.gz',
        'ud_plus': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_PLUS.nii.gz',
        'ud_plus_warp': 
        'derivatives/resampled-isotropic-1mm/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_preproc_PLUS_WARP.nii.gz',
    }
    
    mv_infiles = Node(
        nio.SelectFiles(templates_mv,
                        base_directory=data_dir), 
                        name="mv_infiles")

    # Datasink
    mv_outfiles = Node(nio.DataSink(
        base_directory=ds_root,
        container=output_dir,
        parameterization=True),
        name="mv_outfiles")

    # Use the following DataSink output substitutions
    mv_outfiles.inputs.substitutions = [
        ('refsubject_id_', 'ref-'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('resampled-isotropic-1mm','undistort'),
        ('undistort/ud_func', 'undistort'),
    ]  
       
    mv_outfiles.inputs.regexp_substitutions = [
        (r'sub-([a-zA-Z0-9]+)_ses-([a-zA-Z0-9]+)', r'sub-\1/ses-\2/func/qwarp_plusminus/sub-\1_ses-\2'),
    ]    
    
    # -------------------------------------------- Create Pipeline
    undistort = Workflow(
        name='undistort',
        base_dir=os.path.join(ds_root, working_dir))

    undistort.connect([
        (infosource, inputfiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ('run_id', 'run_id'),
          ('refsubject_id', 'refsubject_id'),
          ])])
               
    qwarp = Node(afni.QwarpPlusMinus(
        nopadWARP=True,outputtype='NIFTI_GZ'),
                    iterfield=('in_file'),name='qwarp')       
        
    undistort.connect(inputfiles, 'image',
                        qwarp, 'in_file')
    undistort.connect(inputfiles, 'image_invPE',
                        qwarp, 'base_file') 
    undistort.connect(inputfiles, 'image',
                        qwarp, 'out_file')    
  
    nwarp = Node(afni.NwarpApply(out_file='%s_undistort.nii.gz'),name='nwarp')
    
    undistort.connect(inputfiles, 'image',
                     nwarp, 'in_file')
    undistort.connect(qwarp, 'source_warp',
                     nwarp, 'warp')
    undistort.connect(inputfiles, 'image',
                     nwarp, 'master')
    undistort.connect(nwarp, 'out_file',
                     outfiles, 'ud_func')

    undistort.stop_on_first_crash = False  # True
    undistort.keep_inputs = True
    undistort.remove_unnecessary_outputs = False
    undistort.write_graph()
    undistort.run()

    mv_ud = Workflow(
        name='mv_ud',
        base_dir=os.path.join(ds_root, working_dir))

    mv_ud.connect([
        (infosource, mv_infiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ('run_id', 'run_id'),
          ('refsubject_id', 'refsubject_id'),
          ])])
    
    mv_ud.connect(mv_infiles, 'ud_minus',
                        mv_outfiles, 'ud_func.@ud_minus')
    mv_ud.connect(mv_infiles, 'ud_plus',
                        mv_outfiles, 'ud_func.@ud_plus')
    mv_ud.connect(mv_infiles, 'ud_minus_warp',
                        mv_outfiles, 'ud_func.@ud_minus_warp')
    mv_ud.connect(mv_infiles, 'ud_plus_warp',
                        mv_outfiles, 'ud_func.@ud_plus_warp')
    
    mv_ud.stop_on_first_crash = False  # True
    mv_ud.keep_inputs = True
    mv_ud.remove_unnecessary_outputs = False
    mv_ud.write_graph()
    mv_ud.run()

    # remove the undistorted files from the ...derivatives/resampled folder
    for index, row in df.iterrows():
        fpath = os.path.join(data_dir,'derivatives','resampled-isotropic-1mm',
                     'sub-' + row.subject,'ses-' + str(row.session),'func')
        for f in glob.glob(os.path.join(fpath,'*US*.nii.gz')):
            os.remove(f)
예제 #9
0
id_outliers.inputs.legendre = True  # Use Legendre polynomials
id_outliers.inputs.polort = 4  # Detrend each voxel timeseries with polynomials of order 'integer' prior to outlier estimation # 4 is recommended
id_outliers.inputs.out_file = 'outlier_file'

#ATM ONLY: Add an unwarping mapnode here using the field maps
calc_distor_corr = pe.Node(afni.Qwarp(), name='calc_distor_corr')
calc_distor_corr.inputs.plusminus = True
calc_distor_corr.inputs.pblur = [0.05, 0.05]
calc_distor_corr.inputs.minpatch = 9
calc_distor_corr.inputs.noweight = True
calc_distor_corr.inputs.outputtype = 'NIFTI_GZ'
calc_distor_corr.inputs.out_file = 'foobar'
calc_distor_corr.inputs.in_file = fmap_files[0]
calc_distor_corr.inputs.base_file = fmap_files[1]

distor_corr = pe.MapNode(afni.NwarpApply(),
                         iterfield=['in_file'],
                         name='distor_corr')
distor_corr.inputs.ainterp = 'quintic'
calc_distor_corr.inputs.outputtype = 'NIFTI_GZ'
distor_corr.inputs.in_file = func_files

#######################################################################################
# The line below is the other way that inputs can be provided to a node
# Rather than hardcoding like above: distor_corr.inputs.ainterp = 'quintic'
# You pass the output from the previous node...in this case calc_distor_corr
# it's output is called 'source_warp' and you pass that to this node distor_corr
# and the relevant input here 'warp'
#######################################################################################

psb6351_wf.connect(calc_distor_corr, 'source_warp', distor_corr, 'warp')