Пример #1
0
    def _run_interface(self, runtime):
        # Set the realignement options
        realign = spm.Realign()
        realign.inputs.paths = self.inputs.paths
        realign.inputs.in_files = self.inputs.in_file
        realign.inputs.register_to_mean = self.inputs.register_to_mean
        realign.inputs.quality = 0.9
        realign.inputs.fwhm = 5.
        realign.inputs.separation = 4  # TODO: understand this parameter
        realign.inputs.interp = 2
        if self.inputs.correct_tagging:
            # Estimate the realignement parameters
            realign.inputs.jobtype = 'estimate'
            realign.run()
            parameters_file = realign.aggregate_outputs().get()[
                'realignment_parameters']
            rea_parameters = np.loadtxt(parameters_file)

            # Correct for tagging: equal means for control and tag scans
            n_scans = len(rea_parameters)
            if self.inputs.control_scans:
                control_scans = self.inputs.control_scans
            else:
                control_scans = range(0, n_scans, 2)

            if self.inputs.tag_scans:
                tag_scans = self.inputs.tag_scans
            else:
                tag_scans = range(1, n_scans, 2)

            gap = np.mean(rea_parameters[control_scans], axis=0) -\
                np.mean(rea_parameters[tag_scans], axis=0)
            rea_parameters[control_scans] -= gap / 2.
            rea_parameters[tag_scans] += gap / 2.

            # Save the corrected realignement parameters
            np.savetxt(parameters_file, rea_parameters, delimiter=' ')

            # Save the corrected transforms for each frame in spm compatible
            #  .mat. This .mat serves as header for spm in case of 4D files
            affine = spm_affine(self.inputs.in_file)
            rea_affines = np.zeros((4, 4, n_scans))
            for n_scan, param in enumerate(rea_parameters):
                rea_affines[..., n_scan] = params_to_affine(param).dot(affine)
            affines_file = os.path.splitext(self.inputs.in_file)[0] + '.mat'
            savemat(affines_file, dict(mat=rea_affines))
        else:
            realign.inputs.jobtype = 'estimate'
            realign.inputs.register_to_mean = self.inputs.register_to_mean
            realign.run()

        # Reslice and save the aligned volumes
        realign = spm.Realign()
        realign.inputs.paths = self.inputs.paths
        realign.inputs.in_files = self.inputs.in_file
        realign.inputs.jobtype = 'write'
        realign.run()
        return runtime
Пример #2
0
def test_spm(name='test_spm_3d'):
    """
    A simple workflow to test SPM's installation. By default will split the 4D volume in
    time-steps.
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_data']),
                        name='inputnode')
    dgr = pe.Node(nio.DataGrabber(template="feeds/data/fmri.nii.gz",
                                  outfields=['out_file'],
                                  sort_filelist=False),
                  name='datasource')

    stc = pe.Node(spm.SliceTiming(num_slices=21,
                                  time_repetition=1.0,
                                  time_acquisition=2. - 2. / 32,
                                  slice_order=list(range(21, 0, -1)),
                                  ref_slice=10),
                  name='stc')
    realign_estimate = pe.Node(spm.Realign(jobtype='estimate'),
                               name='realign_estimate')
    realign_write = pe.Node(spm.Realign(jobtype='write'), name='realign_write')
    realign_estwrite = pe.Node(spm.Realign(jobtype='estwrite'),
                               name='realign_estwrite')
    smooth = pe.Node(spm.Smooth(fwhm=[6, 6, 6]), name='smooth')

    if name == 'test_spm_3d':
        split = pe.Node(fsl.Split(dimension="t", output_type="NIFTI"),
                        name="split")
        workflow.connect([(dgr, split, [(('out_file', _get_first), 'in_file')
                                        ]),
                          (split, stc, [("out_files", "in_files")])])
    elif name == 'test_spm_4d':
        gunzip = pe.Node(Gunzip(), name="gunzip")
        workflow.connect([(dgr, gunzip, [(('out_file', _get_first), 'in_file')
                                         ]),
                          (gunzip, stc, [("out_file", "in_files")])])
    else:
        raise NotImplementedError(
            'No implementation of the test workflow \'{}\' was found'.format(
                name))

    workflow.connect([
        (inputnode, dgr, [('in_data', 'base_directory')]),
        (stc, realign_estimate, [('timecorrected_files', 'in_files')]),
        (realign_estimate, realign_write, [('modified_in_files', 'in_files')]),
        (stc, realign_estwrite, [('timecorrected_files', 'in_files')]),
        (realign_write, smooth, [('realigned_files', 'in_files')])
    ])
    return workflow
Пример #3
0
def realigntoframe17(niftilist):
    """given list of nifti files
    copies/creates realign_QA
    removes first 5 frames
    realignes rest to the 17th frame
    """
    startdir = os.getcwd()
    niftilist.sort()
    basepth, _ = os.path.split(niftilist[0])
    tmpdir, exists = bg.make_dir(basepth, 'realign_QA')
    if exists:
        return None, None
    # copy files to tmp dir
    copiednifti = []
    for f in niftilist:
        newf = bg.copy_file(f, tmpdir)
        copiednifti.append(str(newf))
    # put files in correct order
    os.chdir(tmpdir)
    alteredlist = [x for x in copiednifti]
    frame17 = alteredlist[16]
    alteredlist.remove(frame17)
    alteredlist = alteredlist[5:]
    alteredlist.insert(0, frame17)
    #print 'alteredlist', alteredlist
    # realign
    rlgn = spm.Realign(matlab_cmd='matlab-spm8')
    rlgn.inputs.in_files = alteredlist
    rlgn.inputs.ignore_exception = True
    #rlgn.inputs.write_which = [2,0]
    #rlgn.register_to_mean = True
    rlgnout = rlgn.run()
    os.chdir(startdir)
    return rlgnout, copiednifti
Пример #4
0
 def __init__(self, in_files=['path'], **options):
     import nipype.interfaces.spm as spm
     realign = spm.Realign()
     realign.inputs.in_files = in_files
     for ef in options:
         setattr(realign.inputs, ef, options[ef])
     self.res = realign.run()
Пример #5
0
 def realign(file_to_realign):
     print "running motion correction"
     realign = spm.Realign()
     realign.inputs.in_files = file_to_realign
     realign.inputs.register_to_mean = False
     #realign.inputs.out_prefix = 'spm_corr_'
     res = realign.run()
     parameters = res.outputs.realignment_parameters
     if not isinstance(parameters, list):
         parameters = [parameters]
     par_file = parameters
     out_file = []
     if isinstance(res.outputs.realigned_files, list):
         for rf in res.outputs.realigned_files:
             res = fsl.ImageMaths(in_file=rf,
                                  out_file=rf,
                                  output_type='NIFTI',
                                  op_string='-nan').run()
             out_file.append(res.outputs.out_file)
     else:
         res2 = fsl.ImageMaths(in_file=res.outputs.realigned_files,
                               out_file=res.outputs.realigned_files,
                               output_type='NIFTI',
                               op_string='-nan').run()
         out_file.append(res2.outputs.out_file)
     return out_file, par_file
Пример #6
0
def realigntoframe1(niftilist):
    """given list of nifti files
    copies relevent files to realign_QA
    realigns to the 1st frame
    """
    startdir = os.getcwd()
    niftilist.sort()
    basepth, _ = os.path.split(niftilist[0])
    tmpdir, exists = bg.make_dir(basepth, 'realign_QA')
    if exists:
        return None, None
    # copy files to tmp dir
    copiednifti = []
    for f in niftilist:
        newf = bg.copy_file(f, tmpdir)
        copiednifti.append(str(newf))
    print 'copied nifti', copiednifti
    # realign to frame1
    os.chdir(tmpdir)
    rlgn = spm.Realign()
    rlgn.inputs.matlab_cmd = 'matlab-spm8'
    rlgn.inputs.in_files = copiednifti
    rlgn.inputs.ignore_exception = True
    #print rlgn.mlab.cmdline
    rlgnout = rlgn.run()
    os.chdir(startdir)
    return rlgnout, copiednifti
Пример #7
0
def test_realign_list_outputs():
    filelist, outdir, cwd = create_files_in_directory()
    rlgn = spm.Realign(in_files=filelist[0])
    yield assert_true, rlgn._list_outputs(
    )['realignment_parameters'][0].startswith('rp_')
    yield assert_true, rlgn._list_outputs()['realigned_files'][0].startswith(
        'r')
    yield assert_true, rlgn._list_outputs()['mean_image'].startswith('mean')
    clean_directory(outdir, cwd)
def test_realign_run(image_fmri_nii):
    file_inp, _, data_inp = image_fmri_nii

    realign = spm.Realign()

    realign.inputs.in_files = file_inp
    #realign.inputs.register_to_mean = True
    #realign.inputs.jobtype = "estimate"
    realign.run()
Пример #9
0
def test_realign():
    yield assert_equal, spm.Realign._jobtype, 'spatial'
    yield assert_equal, spm.Realign._jobname, 'realign'
    yield assert_equal, spm.Realign().inputs.jobtype, 'estwrite'
    input_map = dict(in_files=dict(field='data', mandatory=True,
                                   copyfile=True),
                     quality=dict(field='eoptions.quality'),
                     fwhm=dict(field='eoptions.fwhm'),
                     separation=dict(field='eoptions.sep'),
                     register_to_mean=dict(field='eoptions.rtm'),
                     weight_img=dict(field='eoptions.weight'),
                     interp=dict(field='eoptions.interp'),
                     wrap=dict(field='eoptions.wrap'),
                     write_which=dict(field='roptions.which'),
                     write_interp=dict(field='roptions.interp'),
                     write_wrap=dict(field='roptions.wrap'),
                     write_mask=dict(field='roptions.mask'))
    rlgn = spm.Realign()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(rlgn.inputs.traits()[key],
                                        metakey), value
Пример #10
0
    def __init__(self, experiment_dir, output_dir, working_dir, func_source,
                 struct_source, datasink):
        self.experiment_dir = experiment_dir
        self.output_dir = output_dir
        self.working_dir = working_dir

        # specify input and output nodes
        self.func_source = func_source
        self.struct_source = struct_source
        self.datasink = datasink

        # specify workflow instance
        self.workflow = pe.Workflow(name='workflow')

        # specify nodes
        self.realign = pe.Node(interface=spm.Realign(), name='realign')

        self.coregister = pe.Node(interface=spm.Coregister(),
                                  name="coregister")
        self.coregister.inputs.jobtype = 'estimate'

        self.segment = pe.Node(interface=spm.Segment(), name="segment")

        self.normalize_func = pe.Node(interface=spm.Normalize(),
                                      name="normalize_func")
        self.normalize_func.inputs.jobtype = "write"

        self.normalize_struc = pe.Node(interface=spm.Normalize(),
                                       name="normalize_struc")
        self.normalize_struc.inputs.jobtype = "write"

        self.smooth = pe.Node(interface=spm.Smooth(), name="smooth")

        # connect the nodes to complete the workflow
        self.workflow.connect([
            (self.func_source, self.realign, [('outfiles', 'in_files')]),
            (self.struct_source, self.coregister, [('outfiles', 'source')]),
            (self.realign, self.coregister, [('mean_image', 'target')]),
            (self.coregister, self.segment, [('coregistered_source', 'data')]),
            (self.segment, self.normalize_func, [('transformation_mat',
                                                  'parameter_file')]),
            (self.realign, self.normalize_func, [('realigned_files',
                                                  'apply_to_files')]),
            (self.normalize_func, self.smooth, [('normalized_files',
                                                 'in_files')]),
            #(self.realign, self.datasink, [('realigned_files', 'realign')]),
            #(self.realign, self.datasink, [('mean_image', 'mean')]),
            (self.normalize_func, self.datasink, [('normalized_files', 'norm')]
             ),
            (self.smooth, self.datasink, [('smoothed_files', 'smooth')])
        ])
Пример #11
0
 def __init__(self, **template_dict):
     self.node = pe.Node(interface=spm.Realign(), name='realign')
     self.node.inputs.paths = template_dict['spm_path']
     self.node.inputs.fwhm = template_dict['realign_fwhm']
     self.node.inputs.interp = template_dict['realign_interp']
     self.node.inputs.quality = template_dict['realign_quality']
     self.node.inputs.register_to_mean = template_dict[
         'realign_register_to_mean']
     self.node.inputs.separation = template_dict['realign_separation']
     self.node.inputs.wrap = template_dict['realign_wrap']
     self.node.inputs.write_interp = template_dict['realign_write_interp']
     self.node.inputs.write_mask = template_dict['realign_write_mask']
     self.node.inputs.write_which = template_dict['realign_write_which']
     self.node.inputs.write_wrap = template_dict['realign_write_wrap']
def test_realign_regr():
    file_inp = os.path.join(
        Data_dir, "data_input/sub-02_task-fingerfootlips_bold_pr.nii")
    file_out_ref = os.path.join(
        Data_dir, "data_ref/sub-02_task-fingerfootlips_bold_SPMrealign.nii")

    realign_node = pe.Node(spm.Realign(in_files=file_inp), name='realignnode')

    realign_node.run()

    data_out = nb.load(realign_node.result.outputs.realigned_files).get_data()
    data_out_ref = nb.load(file_out_ref).get_data()

    assert np.allclose(data_out_ref, data_out)  # think about atol and rtol
Пример #13
0
def process_subject(func_fname, anat_fname):
    # Drop dummy volumes
    img = nib.load(func_fname)
    dropped_img = nib.Nifti1Image(img.get_data()[..., n_dummies:], img.affine,
                                  img.header)
    fixed_fname = prefix_path('f', degz(func_fname))
    nib.save(dropped_img, fixed_fname)

    # Ungz anatomical
    if anat_fname.endswith('.gz'):
        anat_img = nib.load(anat_fname)
        anat_fname = degz(anat_fname)
        nib.save(anat_img, anat_fname)

    # Slice time correction
    num_slices = img.shape[2]
    time_for_one_slice = TR / num_slices
    TA = TR - time_for_one_slice
    st = spm.SliceTiming()
    st.inputs.in_files = fixed_fname
    st.inputs.num_slices = num_slices
    st.inputs.time_repetition = TR
    st.inputs.time_acquisition = TA
    st.inputs.slice_order = order_func(num_slices)
    st.inputs.ref_slice = ref_slice
    st.run()
    fixed_stimed = prefix_path('a', fixed_fname)

    # Realign
    realign = spm.Realign()
    realign.inputs.in_files = fixed_stimed
    # Do not write resliced files, do write mean image
    realign.inputs.write_which = write_which
    realign.run()

    # Coregistration
    coreg = spm.Coregister()
    # Coregister structural to mean image from realignment
    coreg.inputs.target = prefix_path('mean', fixed_stimed)
    coreg.inputs.source = anat_fname
    coreg.run()

    # Normalization / resampling with normalization + realign params
    seg_norm = spm.Normalize12()
    seg_norm.inputs.image_to_align = anat_fname
    seg_norm.inputs.apply_to_files = fixed_stimed
    seg_norm.inputs.write_bounding_box = bounding_box
    seg_norm.inputs.write_voxel_sizes = voxel_sizes
    seg_norm.run()
def test_realign_node_run(image_fmri_nii):
    file_inp, _, data_inp = image_fmri_nii

    realign_node = pe.Node(spm.Realign(in_files=file_inp), name='realignnode')

    realign_node.run()

    data_out = nb.load(realign_node.result.outputs.realigned_files).get_data()

    # the middle image shouldn't change
    #assert (data_inp[:,:,:,data_inp.shape[3]//2] == data_out[:,:,:,data_inp.shape[3]//2]).all()

    # i'm assuming that the sum shouldn't change "too much"
    for i in range(data_inp.shape[3]):
        assert np.allclose(data_inp[:, :, :, i].sum(),
                           data_out[:, :, :, i].sum(),
                           rtol=2e-2)
def test_realign_translate_image(image_fmri_nii):
    _, image_inp, data_inp = image_fmri_nii

    filename_trans, data_trans = image_translate_nii(data_inp, image_inp)

    realign_node = pe.Node(spm.Realign(in_files=filename_trans),
                           name='realignnode')

    realign_node.run()

    data_out = nb.load(realign_node.result.outputs.realigned_files).get_data()

    # should think about some other error metric
    for i in [0, 2]:
        assert np.allclose(data_out[:, :, :, i],
                           data_out[:, :, :, 1],
                           rtol=1e-1)
Пример #16
0
def run_spm_realign(nii_dir):

    # Function realigns all EPIs to first aquisition in sequence
    # Create a container for names of skull-stripped EPIs
    stripped_list = []
    unzipped_stripped_list = []

    # Move into tagegd dir, get a list of all skull-stripped files, and create sub dir for stripped images
    os.chdir(nii_dir)
    os.mkdir('skull_stripped')
    for files in os.listdir(os.getcwd()):
        if files.endswith('brain.nii.gz'):
            stripped_list.append(files)

    # Sort list
    stripped_list.sort()
    for fname in stripped_list:
        os.system('mv %s skull_stripped' % (fname))

    # Unzip the .gz files
    os.chdir('skull_stripped')
    for fname in stripped_list:
        new_fname = fname[:-3]
        os.system('gunzip %s %s' % (fname, new_fname))

    # Get final list of unzipped skull-stripped files
    for files in os.listdir(os.getcwd()):
        if files.endswith('brain.nii'):
            unzipped_stripped_list.append(files)

    # Run spm realign on tagged skull stripepd images
    unzipped_stripped_list.sort()
    realign = spm.Realign()
    realign.inputs.in_files = unzipped_stripped_list
    realign.inputs.register_to_mean = False
    realign.inputs.paths = os.getcwd()
    print '<:::Realigning tagged epis to first aquisition in sequence:::>'
    realign.run()
    stripped_aligned_dir = os.getcwd()

    return stripped_aligned_dir
def test_realign_run_copy_image(image_fmri_nii, image_copy_fmri_nii):
    file_inp, image_inp, data_inp = image_fmri_nii

    filename_copy, data_copy = image_copy_fmri_nii

    realign_node = pe.Node(spm.Realign(in_files=filename_copy),
                           name='realignnode')

    realign_node.run()
    data_out = nb.load(realign_node.result.outputs.realigned_files).get_data()

    # since all images are the same difference shouldn't be far
    assert np.allclose(data_out, data_copy, rtol=1e-9, atol=1.e-9)

    # this doesn't work, it a weird mean
    mean_out = nb.load(realign_node.result.outputs.mean_image).get_data()
    #for i in range(3):
    #    assert np.allclose(data_out[:,:,:,i], mean_out)

    # this should be the original image (?)
    mod_out = nb.load(realign_node.result.outputs.modified_in_files).get_data()
    assert (mod_out == data_copy).all()
Пример #18
0
    def analysis_steps(self):
        self.analysis = type('', (), {})()
        # Get files
        subj_list = [
            subj.split('_')[:-1] for subj in next(os.walk(self.proj_dir))[1]
        ]
        # TODO limit the subj_list to those without sw processed files.

        # for parallelization by subject, use idnetityInterface
        self.analysis.infosource = Node(
            IdentityInterface(fields=['subj_id', 'task']), name="infosource")
        self.analysis.infosource.iterables = [('subject_id', subj_list),
                                              ('task', self.task_names)]

        templates = {
            'anat': '{subj_id}/t1/{subj_id}_t1*.nii',
            'func': '{subj_id}/{task}*/{subj_id}_{task}*.nii'
        }
        self.analysis.sf = Node(SelectFiles(templates), name='selectfiles')
        self.analysis.sf.inputs.base_directory = self.proj_dir

        # Realign
        self.analysis.realign = Node(spm.Realign(register_to_mean=True,
                                                 fwhm=self.opts.fwhm),
                                     name='realign')

        # Coregister
        self.analysis.coreg = Node(spm.Coregister(), name='coregistration')
        # Normalize
        self.analysis.norm12 = Node(spm.Normalize12(
            bias_regularization=1e-05, affine_regularization_type='mni'),
                                    name='normalize')

        #Smooth
        self.analysis.smooth = Node(spm.Smooth(), name='smooth')
        #smooth.inputs.in_files = 'functional.nii'
        self.analysis.smooth.inputs.fwhm = self.opts.smooth_fwhm
def create_workflow(files,
                    target_file,
                    subject_id,
                    TR,
                    slice_times,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    subjects_dir=None,
                    sink_directory=os.getcwd(),
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = list(range(1, len(files) + 1))
    name_unique.inputs.in_file = files

    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.jobtype = 'estwrite'

    num_slices = len(slice_times)
    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.num_slices = num_slices
    slice_timing.inputs.time_repetition = TR
    slice_timing.inputs.time_acquisition = TR - TR / float(num_slices)
    slice_timing.inputs.slice_order = (np.argsort(slice_times) + 1).tolist()
    slice_timing.inputs.ref_slice = int(num_slices / 2)

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(slice_timing, 'timecorrected_files', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """Segment and Register
    """

    registration = create_reg_workflow(name='registration')
    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    registration.inputs.inputspec.subject_id = subject_id
    registration.inputs.inputspec.subjects_dir = subjects_dir
    registration.inputs.inputspec.target_image = target_file
    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'SPM'
    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([
        (name_unique, realign, [('out_file', 'in_files')]),
        (realign, slice_timing, [('realigned_files', 'in_files')]),
        (slice_timing, art, [('timecorrected_files', 'realigned_files')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters')]),
    ])

    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(calc_median, 'median_file', mask, 'in_file')

    # get segmentation in normalized functional space

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors,
        imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'realignment_parameters', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1,
        imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
                              out_pf_name='pF_mcart.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(slice_timing, 'timecorrected_files', filter1, 'in_file')
    wf.connect(slice_timing, ('timecorrected_files', rename, '_filtermotart'),
               filter1, 'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')

    createfilter2 = MapNode(Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration,
               ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')

    filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2,
               'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(
        input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'],
        output_names=['out_files'],
        function=bandpass_filter,
        imports=imports),
                    name='bandpass_unsmooth')
    bandpass.inputs.fs = 1. / TR
    bandpass.inputs.highpass_freq = highpass_freq
    bandpass.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass, 'files')
    """Smooth the functional data using
    :class:`nipype.interfaces.spm.Smooth`.
    """

    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = vol_fwhm

    wf.connect(bandpass, 'out_files', smooth, 'in_files')

    collector = Node(Merge(2), name='collect_streams')
    wf.connect(smooth, 'smoothed_files', collector, 'in1')
    wf.connect(bandpass, 'out_files', collector, 'in2')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = MapNode(ants.ApplyTransforms(),
                      iterfield=['input_image'],
                      name='warpall')
    warpall.inputs.input_image_type = 3
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.reference_image = target_file
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 1

    # transform to target
    wf.connect(collector, 'out', warpall, 'input_image')
    wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')

    mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')

    wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')

    maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
    wf.connect(warpall, 'output_image', maskts, 'in_file')
    wf.connect(mask_target, 'out_file', maskts, 'mask_file')

    # map to surface
    # extract aparc+aseg ROIs
    # extract subcortical ROIs
    # extract target space ROIs
    # combine subcortical and cortical rois into a single cifti file

    #######
    # Convert aparc to subject functional space

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(
        freesurfer.SegStats(default_color_table=True),
        iterfield=['in_file', 'summary_file', 'avgwf_txt_file'],
        name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + list(range(10, 14)) +
                                     [17, 18, 26, 47] + list(range(49, 55)) +
                                     [58] + list(range(1001, 1036)) +
                                     list(range(2001, 2036)))

    wf.connect(registration, 'outputspec.aparc', sampleaparc,
               'segmentation_file')
    wf.connect(collector, 'out', sampleaparc, 'in_file')

    def get_names(files, suffix):
        """Generate appropriate names for output files
        """
        from nipype.utils.filemanip import (split_filename, filename_to_list,
                                            list_to_filename)
        out_names = []
        for filename in files:
            _, name, _ = split_filename(filename)
            out_names.append(name + suffix)
        return list_to_filename(out_names)

    wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc,
               'avgwf_txt_file')
    wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc,
               'summary_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    samplerlh.inputs.smooth_surf = surf_fwhm
    # samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = subjects_dir

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(collector, 'out', samplerlh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(collector, 'out', samplerrh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(
        input_names=['timeseries_file', 'label_file', 'indices'],
        output_names=['out_file'],
        function=extract_subrois,
        imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\
        list(range(49, 55)) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm_v2.nii.gz'))
    wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')

    ######

    substitutions = [('_target_subject_', ''),
                     ('_filtermotart_cleaned_bp_trans_masked', ''),
                     ('_filtermotart_cleaned_bp', '')]
    regex_subs = [
        ('_ts_masker.*/sar', '/smooth/'),
        ('_ts_masker.*/ar', '/unsmooth/'),
        ('_combiner.*/sar', '/smooth/'),
        ('_combiner.*/ar', '/unsmooth/'),
        ('_aparc_ts.*/sar', '/smooth/'),
        ('_aparc_ts.*/ar', '/unsmooth/'),
        ('_getsubcortts.*/sar', '/smooth/'),
        ('_getsubcortts.*/ar', '/unsmooth/'),
        ('series/sar', 'series/smooth/'),
        ('series/ar', 'series/unsmooth/'),
        ('_inverse_transform./', ''),
    ]
    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = substitutions
    datasink.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'realignment_parameters', datasink,
               'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.segmentation_files', datasink,
               'resting.mask_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'resting.qa.ants')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(bandpass, 'out_files', datasink,
               'resting.timeseries.@bandpassed')
    wf.connect(smooth, 'smoothed_files', datasink,
               'resting.timeseries.@smoothed')
    wf.connect(createfilter1, 'out_files', datasink,
               'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files', datasink,
               'resting.regress.@compcorr')
    wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
    wf.connect(sampleaparc, 'summary_file', datasink,
               'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file', datasink,
               'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file', datasink,
               'resting.parcellations.grayo.@subcortical')

    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = substitutions
    datasink2.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file', datasink2,
               'resting.parcellations.grayo.@surface')
    return wf
Пример #20
0
def test_realign():
    yield assert_equal, spm.Realign._jobtype, 'spatial'
    yield assert_equal, spm.Realign._jobname, 'realign'
    yield assert_equal, spm.Realign().inputs.jobtype, 'estwrite'
Пример #21
0
def test_realign():
    assert spm.Realign._jobtype == 'spatial'
    assert spm.Realign._jobname == 'realign'
    assert spm.Realign().inputs.jobtype == 'estwrite'
def compare_workflow(c, name='compare_realignments'):
    #import nipype.interfaces.matlab as mlab
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    from nipype.interfaces.nipy import FmriRealign4d
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.spm as spm

    #mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
    #mlab.MatlabCommand.set_default_paths('/software/spm8_4290')

    workflow = pe.Workflow(name=name)

    infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
                         name='subject_names')
    if c.test_mode:
        infosource.iterables = ('subject_id', [c.subjects[0]])
    else:
        infosource.iterables = ('subject_id', c.subjects)

    datagrabber = get_dataflow(c)
    workflow.connect(infosource, 'subject_id', datagrabber, 'subject_id')

    realign_nipy = pe.Node(interface=FmriRealign4d(), name='realign_nipy')
    realign_nipy.inputs.tr = c.TR
    realign_nipy.inputs.slice_order = c.SliceOrder
    realign_nipy.inputs.time_interp = True

    realign_nipy_no_t = pe.Node(interface=FmriRealign4d(),
                                name='realign_nipy_no_t')
    realign_nipy_no_t.inputs.tr = c.TR
    #realign_nipy_no_t.inputs.slice_order = c.SliceOrder

    realign_mflirt = pe.MapNode(interface=fsl.MCFLIRT(save_plots=True),
                                name='mcflirt',
                                iterfield=['in_file'])

    realign_spm = pe.MapNode(interface=spm.Realign(),
                             name='spm',
                             iterfield=['in_files'])

    report = pe.Node(interface=ReportSink(orderfields=[
        'Introduction', 'Translations', 'Rotations', 'Correlation_Matrix'
    ]),
                     name='write_report')
    report.inputs.Introduction = 'Comparing realignment nodes'
    report.inputs.base_directory = os.path.join(c.sink_dir)
    report.inputs.report_name = 'Comparing_Motion'

    rot = pe.MapNode(util.Function(
        input_names=['nipy1', 'nipy2', 'fsl', 'spm'],
        output_names=['fname'],
        function=plot_rot),
                     name='plot_rot',
                     iterfield=['nipy1', 'nipy2', 'fsl', 'spm'])

    trans = pe.MapNode(util.Function(
        input_names=['nipy1', 'nipy2', 'fsl', 'spm'],
        output_names=['fname'],
        function=plot_trans),
                       name='plot_trans',
                       iterfield=['nipy1', 'nipy2', 'fsl', 'spm'])

    coef = pe.MapNode(util.Function(
        input_names=['nipy1', 'nipy2', 'fsl', 'spm'],
        output_names=['fname'],
        function=corr_mat),
                      name='cor_mat',
                      iterfield=['nipy1', 'nipy2', 'fsl', 'spm'])

    workflow.connect(datagrabber, 'func', realign_nipy, 'in_file')
    workflow.connect(datagrabber, 'func', realign_nipy_no_t, 'in_file')
    workflow.connect(datagrabber, 'func', realign_mflirt, 'in_file')
    workflow.connect(datagrabber, 'func', realign_spm, 'in_files')

    workflow.connect(realign_nipy, 'par_file', rot, 'nipy1')
    workflow.connect(realign_nipy_no_t, 'par_file', rot, 'nipy2')
    workflow.connect(realign_spm, 'realignment_parameters', rot, 'spm')
    workflow.connect(realign_mflirt, 'par_file', rot, 'fsl')

    workflow.connect(realign_nipy, 'par_file', trans, 'nipy1')
    workflow.connect(realign_nipy_no_t, 'par_file', trans, 'nipy2')
    workflow.connect(realign_spm, 'realignment_parameters', trans, 'spm')
    workflow.connect(realign_mflirt, 'par_file', trans, 'fsl')

    workflow.connect(realign_nipy, 'par_file', coef, 'nipy1')
    workflow.connect(realign_nipy_no_t, 'par_file', coef, 'nipy2')
    workflow.connect(realign_spm, 'realignment_parameters', coef, 'spm')
    workflow.connect(realign_mflirt, 'par_file', coef, 'fsl')

    workflow.connect(trans, 'fname', report, 'Translations')
    workflow.connect(rot, 'fname', report, 'Rotations')
    workflow.connect(coef, 'fname', report, 'Correlation_Matrix')
    workflow.connect(infosource, 'subject_id', report, 'container')

    return workflow
Пример #23
0
def create_spm_preproc_func_pipeline(data_dir=None,
                                     subject_id=None,
                                     task_list=None):

    ###############################
    ## Set up Nodes
    ###############################

    ds = Node(nio.DataGrabber(infields=['subject_id', 'task_id'],
                              outfields=['func', 'struc']),
              name='datasource')
    ds.inputs.base_directory = os.path.abspath(data_dir + '/' + subject_id)
    ds.inputs.template = '*'
    ds.inputs.sort_filelist = True
    ds.inputs.template_args = {'func': [['task_id']], 'struc': []}
    ds.inputs.field_template = {
        'func': 'Functional/Raw/%s/func.nii',
        'struc': 'Structural/SPGR/spgr.nii'
    }
    ds.inputs.subject_id = subject_id
    ds.inputs.task_id = task_list
    ds.iterables = ('task_id', task_list)
    # ds.run().outputs #show datafiles

    # #Setup Data Sinker for writing output files
    # datasink = Node(nio.DataSink(), name='sinker')
    # datasink.inputs.base_directory = '/path/to/output'
    # workflow.connect(realigner, 'realignment_parameters', datasink, 'motion.@par')
    # datasink.inputs.substitutions = [('_variable', 'variable'),('file_subject_', '')]

    #Get Timing Acquisition for slice timing
    tr = 2
    ta = Node(interface=util.Function(input_names=['tr', 'n_slices'],
                                      output_names=['ta'],
                                      function=get_ta),
              name="ta")
    ta.inputs.tr = tr

    #Slice Timing: sequential ascending
    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.time_repetition = tr
    slice_timing.inputs.ref_slice = 1

    #Realignment - 6 parameters - realign to first image of very first series.
    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True

    #Plot Realignment
    plot_realign = Node(interface=PlotRealignmentParameters(),
                        name="plot_realign")

    #Artifact Detection
    art = Node(interface=ra.ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'SPM'

    #Coregister - 12 parameters, cost function = 'nmi', fwhm 7, interpolate, don't mask
    #anatomical to functional mean across all available data.
    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estimate'

    # Segment structural, gray/white/csf,mni,
    segment = Node(interface=spm.Segment(), name="segment")
    segment.inputs.save_bias_corrected = True

    #Normalize - structural to MNI - then apply this to the coregistered functionals
    normalize = Node(interface=spm.Normalize(), name="normalize")
    normalize.inputs.template = os.path.abspath(t1_template_file)

    #Plot normalization Check
    plot_normalization_check = Node(interface=Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = canonical_file

    #Create Mask
    compute_mask = Node(interface=ComputeMask(), name="compute_mask")
    #remove lower 5% of histogram of mean image
    compute_mask.inputs.m = .05

    #Smooth
    #implicit masking (.im) = 0, dtype = 0
    smooth = Node(interface=spm.Smooth(), name="smooth")
    fwhmlist = [0, 5, 8]
    smooth.iterables = ('fwhm', fwhmlist)

    #Create Covariate matrix
    make_covariates = Node(interface=Create_Covariates(),
                           name="make_covariates")

    ###############################
    ## Create Pipeline
    ###############################

    Preprocessed = Workflow(name="Preprocessed")
    Preprocessed.base_dir = os.path.abspath(data_dir + '/' + subject_id +
                                            '/Functional')

    Preprocessed.connect([
        (ds, ta, [(('func', get_n_slices), "n_slices")]),
        (ta, slice_timing, [("ta", "time_acquisition")]),
        (ds, slice_timing, [
            ('func', 'in_files'),
            (('func', get_n_slices), "num_slices"),
            (('func', get_slice_order), "slice_order"),
        ]),
        (slice_timing, realign, [('timecorrected_files', 'in_files')]),
        (realign, compute_mask, [('mean_image', 'mean_volume')]),
        (realign, coregister, [('mean_image', 'target')]),
        (ds, coregister, [('struc', 'source')]),
        (coregister, segment, [('coregistered_source', 'data')]),
        (segment, normalize, [
            ('transformation_mat', 'parameter_file'),
            ('bias_corrected_image', 'source'),
        ]),
        (realign, normalize, [('realigned_files', 'apply_to_files'),
                              (('realigned_files', get_vox_dims),
                               'write_voxel_sizes')]),
        (normalize, smooth, [('normalized_files', 'in_files')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters')]),
        (realign, art, [('realigned_files', 'realigned_files')]),
        (realign, plot_realign, [('realignment_parameters',
                                  'realignment_parameters')]),
        (normalize, plot_normalization_check, [('normalized_files', 'wra_img')
                                               ]),
        (realign, make_covariates, [('realignment_parameters',
                                     'realignment_parameters')]),
        (art, make_covariates, [('outlier_files', 'spike_id')]),
    ])
    return Preprocessed
Пример #24
0
def Couple_Preproc_Pipeline(base_dir=None,
                            output_dir=None,
                            subject_id=None,
                            spm_path=None):
    """ Create a preprocessing workflow for the Couples Conflict Study using nipype

    Args:
        base_dir: path to data folder where raw subject folder is located
        output_dir: path to where key output files should be saved
        subject_id: subject_id (str)
        spm_path: path to spm folder

    Returns:
        workflow: a nipype workflow that can be run
        
    """

    from nipype.interfaces.dcm2nii import Dcm2nii
    from nipype.interfaces.fsl import Merge, TOPUP, ApplyTOPUP
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as util
    from nipype.interfaces.utility import Merge as Merge_List
    from nipype.pipeline.engine import Node, Workflow
    from nipype.interfaces.fsl.maths import UnaryMaths
    from nipype.interfaces.nipy.preprocess import Trim
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces import spm
    from nipype.interfaces.spm import Normalize12
    from nipype.algorithms.misc import Gunzip
    from nipype.interfaces.nipy.preprocess import ComputeMask
    import nipype.interfaces.matlab as mlab
    from nltools.utils import get_resource_path, get_vox_dims, get_n_volumes
    from nltools.interfaces import Plot_Coregistration_Montage, PlotRealignmentParameters, Create_Covariates
    import os
    import glob

    ########################################
    ## Setup Paths and Nodes
    ########################################

    # Specify Paths
    canonical_file = os.path.join(spm_path, 'canonical', 'single_subj_T1.nii')
    template_file = os.path.join(spm_path, 'tpm', 'TPM.nii')

    # Set the way matlab should be called
    mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
    mlab.MatlabCommand.set_default_paths(spm_path)

    # Get File Names for different types of scans.  Parse into separate processing streams
    datasource = Node(interface=nio.DataGrabber(
        infields=['subject_id'], outfields=['struct', 'ap', 'pa']),
                      name='datasource')
    datasource.inputs.base_directory = base_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {
        'struct': '%s/Study*/t1w_32ch_mpr_08mm*',
        'ap': '%s/Study*/distortion_corr_32ch_ap*',
        'pa': '%s/Study*/distortion_corr_32ch_pa*'
    }
    datasource.inputs.template_args = {
        'struct': [['subject_id']],
        'ap': [['subject_id']],
        'pa': [['subject_id']]
    }
    datasource.inputs.subject_id = subject_id
    datasource.inputs.sort_filelist = True

    # iterate over functional scans to define paths
    scan_file_list = glob.glob(
        os.path.join(base_dir, subject_id, 'Study*', '*'))
    func_list = [s for s in scan_file_list if "romcon_ap_32ch_mb8" in s]
    func_list = [s for s in func_list
                 if "SBRef" not in s]  # Exclude sbref for now.
    func_source = Node(interface=util.IdentityInterface(fields=['scan']),
                       name="func_source")
    func_source.iterables = ('scan', func_list)

    # Create Separate Converter Nodes for each different type of file. (dist corr scans need to be done before functional)
    ap_dcm2nii = Node(interface=Dcm2nii(), name='ap_dcm2nii')
    ap_dcm2nii.inputs.gzip_output = True
    ap_dcm2nii.inputs.output_dir = '.'
    ap_dcm2nii.inputs.date_in_filename = False

    pa_dcm2nii = Node(interface=Dcm2nii(), name='pa_dcm2nii')
    pa_dcm2nii.inputs.gzip_output = True
    pa_dcm2nii.inputs.output_dir = '.'
    pa_dcm2nii.inputs.date_in_filename = False

    f_dcm2nii = Node(interface=Dcm2nii(), name='f_dcm2nii')
    f_dcm2nii.inputs.gzip_output = True
    f_dcm2nii.inputs.output_dir = '.'
    f_dcm2nii.inputs.date_in_filename = False

    s_dcm2nii = Node(interface=Dcm2nii(), name='s_dcm2nii')
    s_dcm2nii.inputs.gzip_output = True
    s_dcm2nii.inputs.output_dir = '.'
    s_dcm2nii.inputs.date_in_filename = False

    ########################################
    ## Setup Nodes for distortion correction
    ########################################

    # merge output files into list
    merge_to_file_list = Node(interface=Merge_List(2),
                              infields=['in1', 'in2'],
                              name='merge_to_file_list')

    # fsl merge AP + PA files (depends on direction)
    merger = Node(interface=Merge(dimension='t'), name='merger')
    merger.inputs.output_type = 'NIFTI_GZ'

    # use topup to create distortion correction map
    topup = Node(interface=TOPUP(), name='topup')
    topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                              'epi_params_APPA_MB8.txt')
    topup.inputs.output_type = "NIFTI_GZ"
    topup.inputs.config = 'b02b0.cnf'

    # apply topup to all functional images
    apply_topup = Node(interface=ApplyTOPUP(), name='apply_topup')
    apply_topup.inputs.in_index = [1]
    apply_topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                                    'epi_params_APPA_MB8.txt')
    apply_topup.inputs.output_type = "NIFTI_GZ"
    apply_topup.inputs.method = 'jac'
    apply_topup.inputs.interp = 'spline'

    # Clear out Zeros from spline interpolation using absolute value.
    abs_maths = Node(interface=UnaryMaths(), name='abs_maths')
    abs_maths.inputs.operation = 'abs'

    ########################################
    ## Preprocessing
    ########################################

    # Trim - remove first 10 TRs
    n_vols = 10
    trim = Node(interface=Trim(), name='trim')
    trim.inputs.begin_index = n_vols

    #Realignment - 6 parameters - realign to first image of very first series.
    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True

    #Coregister - 12 parameters
    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estwrite'

    #Plot Realignment
    plot_realign = Node(interface=PlotRealignmentParameters(),
                        name="plot_realign")

    #Artifact Detection
    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'SPM'

    # Gunzip - unzip the functional and structural images
    gunzip_struc = Node(Gunzip(), name="gunzip_struc")
    gunzip_func = Node(Gunzip(), name="gunzip_func")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize = Node(interface=Normalize12(jobtype='estwrite',
                                           tpm=template_file),
                     name="normalize")

    #Plot normalization Check
    plot_normalization_check = Node(interface=Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = canonical_file

    #Create Mask
    compute_mask = Node(interface=ComputeMask(), name="compute_mask")
    #remove lower 5% of histogram of mean image
    compute_mask.inputs.m = .05

    #Smooth
    #implicit masking (.im) = 0, dtype = 0
    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = 6

    #Create Covariate matrix
    make_cov = Node(interface=Create_Covariates(), name="make_cov")

    # Create a datasink to clean up output files
    datasink = Node(interface=nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = output_dir
    datasink.inputs.container = subject_id

    ########################################
    # Create Workflow
    ########################################

    workflow = Workflow(name='Preprocessed')
    workflow.base_dir = os.path.join(base_dir, subject_id)
    workflow.connect([
        (datasource, ap_dcm2nii, [('ap', 'source_dir')]),
        (datasource, pa_dcm2nii, [('pa', 'source_dir')]),
        (datasource, s_dcm2nii, [('struct', 'source_dir')]),
        (func_source, f_dcm2nii, [('scan', 'source_dir')]),
        (ap_dcm2nii, merge_to_file_list, [('converted_files', 'in1')]),
        (pa_dcm2nii, merge_to_file_list, [('converted_files', 'in2')]),
        (merge_to_file_list, merger, [('out', 'in_files')]),
        (merger, topup, [('merged_file', 'in_file')]),
        (topup, apply_topup, [('out_fieldcoef', 'in_topup_fieldcoef'),
                              ('out_movpar', 'in_topup_movpar')]),
        (f_dcm2nii, trim, [('converted_files', 'in_file')]),
        (trim, apply_topup, [('out_file', 'in_files')]),
        (apply_topup, abs_maths, [('out_corrected', 'in_file')]),
        (abs_maths, gunzip_func, [('out_file', 'in_file')]),
        (gunzip_func, realign, [('out_file', 'in_files')]),
        (s_dcm2nii, gunzip_struc, [('converted_files', 'in_file')]),
        (gunzip_struc, coregister, [('out_file', 'source')]),
        (coregister, normalize, [('coregistered_source', 'image_to_align')]),
        (realign, coregister, [('mean_image', 'target'),
                               ('realigned_files', 'apply_to_files')]),
        (realign, normalize, [(('mean_image', get_vox_dims),
                               'write_voxel_sizes')]),
        (coregister, normalize, [('coregistered_files', 'apply_to_files')]),
        (normalize, smooth, [('normalized_files', 'in_files')]),
        (realign, compute_mask, [('mean_image', 'mean_volume')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters'),
                        ('realigned_files', 'realigned_files')]),
        (realign, plot_realign, [('realignment_parameters',
                                  'realignment_parameters')]),
        (normalize, plot_normalization_check, [('normalized_files', 'wra_img')
                                               ]),
        (realign, make_cov, [('realignment_parameters',
                              'realignment_parameters')]),
        (art, make_cov, [('outlier_files', 'spike_id')]),
        (normalize, datasink, [('normalized_files', 'structural.@normalize')]),
        (coregister, datasink, [('coregistered_source', 'structural.@struct')
                                ]),
        (topup, datasink, [('out_fieldcoef', 'distortion.@fieldcoef')]),
        (topup, datasink, [('out_movpar', 'distortion.@movpar')]),
        (smooth, datasink, [('smoothed_files', 'functional.@smooth')]),
        (plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
        (plot_normalization_check, datasink,
         [('plot', 'functional.@plot_normalization')]),
        (make_cov, datasink, [('covariates', 'functional.@covariates')])
    ])
    return workflow
Пример #25
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import fmri_preprocessing_workflows as utils
        import nipype.interfaces.utility as nutil
        import nipype.interfaces.spm as spm
        import nipype.pipeline.engine as npe
        from clinica.utils.filemanip import zip_nii, unzip_nii

        # Zipping
        # =======
        unzip_node = npe.MapNode(name='Unzipping',
                                 iterfield=['in_file'],
                                 interface=nutil.Function(
                                     input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=unzip_nii))

        unzip_T1w = unzip_node.clone('UnzippingT1w')
        unzip_phasediff = unzip_node.clone('UnzippingPhasediff')
        unzip_bold = unzip_node.clone('UnzippingBold')
        unzip_magnitude1 = unzip_node.clone('UnzippingMagnitude1')

        # FieldMap calculation
        # ====================
        if self.parameters['unwarping']:
            fm_node = npe.MapNode(name="FieldMapCalculation",
                                  iterfield=[
                                      'phase', 'magnitude', 'epi', 'et',
                                      'blipdir', 'tert'
                                  ],
                                  interface=spm.FieldMap())

        # Slice timing correction
        # =======================
        st_node = npe.MapNode(name="SliceTimingCorrection",
                              iterfield=[
                                  'in_files', 'time_repetition', 'slice_order',
                                  'num_slices', 'ref_slice', 'time_acquisition'
                              ],
                              interface=spm.SliceTiming())

        # Motion correction and unwarping
        # ===============================

        if self.parameters['unwarping']:
            mc_node = npe.MapNode(name="MotionCorrectionUnwarping",
                                  iterfield=["scans", "pmscan"],
                                  interface=spm.RealignUnwarp())
            mc_node.inputs.register_to_mean = True
            mc_node.inputs.reslice_mask = False
        else:
            mc_node = npe.MapNode(name="MotionCorrection",
                                  iterfield=["in_files"],
                                  interface=spm.Realign())
            mc_node.inputs.register_to_mean = True

        # Brain extraction
        # ================
        import os.path as path
        from nipype.interfaces.freesurfer import MRIConvert
        if self.parameters['freesurfer_brain_mask']:
            brain_masks = [
                path.join(self.caps_directory, 'subjects', self.subjects[i],
                          self.sessions[i], 't1/freesurfer_cross_sectional',
                          self.subjects[i] + '_' + self.sessions[i],
                          'mri/brain.mgz') for i in range(len(self.subjects))
            ]
            conv_brain_masks = [
                str(self.subjects[i] + '_' + self.sessions[i] + '.nii')
                for i in range(len(self.subjects))
            ]
            bet_node = npe.MapNode(interface=MRIConvert(),
                                   iterfield=["in_file", "out_file"],
                                   name="BrainConversion")
            bet_node.inputs.in_file = brain_masks
            bet_node.inputs.out_file = conv_brain_masks
            bet_node.inputs.out_type = 'nii'
        else:
            bet_node = utils.BrainExtractionWorkflow(name="BrainExtraction")

        # Registration
        # ============
        reg_node = npe.MapNode(
            interface=spm.Coregister(),
            iterfield=["apply_to_files", "source", "target"],
            name="Registration")

        # Normalization
        # =============
        norm_node = npe.MapNode(interface=spm.Normalize12(),
                                iterfield=['image_to_align', 'apply_to_files'],
                                name='Normalization')

        # Smoothing
        # =========
        smooth_node = npe.MapNode(interface=spm.Smooth(),
                                  iterfield=['in_files'],
                                  name='Smoothing')
        smooth_node.inputs.fwhm = self.parameters['full_width_at_half_maximum']

        # Zipping
        # =======
        zip_node = npe.MapNode(name='Zipping',
                               iterfield=['in_file'],
                               interface=nutil.Function(
                                   input_names=['in_file'],
                                   output_names=['out_file'],
                                   function=zip_nii))

        zip_bet_node = zip_node.clone('ZippingBET')
        zip_mc_node = zip_node.clone('ZippingMC')
        zip_reg_node = zip_node.clone('ZippingRegistration')
        zip_norm_node = zip_node.clone('ZippingNormalization')
        zip_smooth_node = zip_node.clone('ZippingSmoothing')

        # Connections
        # ===========

        if self.parameters['freesurfer_brain_mask']:
            self.connect([
                # Brain extraction
                (bet_node, reg_node, [('out_file', 'target')]),
                (bet_node, zip_bet_node, [('out_file', 'in_file')]),
            ])
        else:
            self.connect([
                # Brain extraction
                (unzip_T1w, bet_node, [('out_file', 'Segmentation.data')]),
                (unzip_T1w, bet_node, [('out_file', 'ApplyMask.in_file')]),
                (bet_node, reg_node, [('ApplyMask.out_file', 'target')]),
                (bet_node, zip_bet_node, [('Fill.out_file', 'in_file')]),
            ])

        if self.parameters['unwarping']:
            self.connect([
                # FieldMap calculation
                (self.input_node, fm_node, [('et', 'et')]),
                (self.input_node, fm_node, [('blipdir', 'blipdir')]),
                (self.input_node, fm_node, [('tert', 'tert')]),
                (self.input_node, unzip_phasediff, [('phasediff', 'in_file')]),
                (self.input_node, unzip_magnitude1, [('magnitude1', 'in_file')
                                                     ]),
                (unzip_magnitude1, fm_node, [('out_file', 'magnitude')]),
                (unzip_phasediff, fm_node, [('out_file', 'phase')]),
                (unzip_bold, fm_node, [('out_file', 'epi')]),
                # Motion correction and unwarping
                (st_node, mc_node, [('timecorrected_files', 'scans')]),
                (fm_node, mc_node, [('vdm', 'pmscan')]),
                (mc_node, reg_node, [('realigned_unwarped_files',
                                      'apply_to_files')]),
                (mc_node, zip_mc_node, [('realigned_unwarped_files', 'in_file')
                                        ]),
            ])
        else:
            self.connect([
                # Motion correction and unwarping
                (st_node, mc_node, [('timecorrected_files', 'in_files')]),
                (mc_node, reg_node, [('realigned_files', 'apply_to_files')]),
                (mc_node, zip_mc_node, [('realigned_files', 'in_file')]),
            ])
        self.connect([
            # Unzipping
            (self.input_node, unzip_T1w, [('T1w', 'in_file')]),
            (self.input_node, unzip_bold, [('bold', 'in_file')]),
            # Slice timing correction
            (unzip_bold, st_node, [('out_file', 'in_files')]),
            (self.input_node, st_node, [('time_repetition', 'time_repetition')
                                        ]),
            (self.input_node, st_node, [('num_slices', 'num_slices')]),
            (self.input_node, st_node, [('slice_order', 'slice_order')]),
            (self.input_node, st_node, [('ref_slice', 'ref_slice')]),
            (self.input_node, st_node, [('time_acquisition',
                                         'time_acquisition')]),
            # Registration
            (mc_node, reg_node, [('mean_image', 'source')]),
            # Normalization
            (unzip_T1w, norm_node, [('out_file', 'image_to_align')]),
            (reg_node, norm_node, [('coregistered_files', 'apply_to_files')]),
            # Smoothing
            (norm_node, smooth_node, [('normalized_files', 'in_files')]),
            # Zipping
            (reg_node, zip_reg_node, [('coregistered_files', 'in_file')]),
            (norm_node, zip_norm_node, [('normalized_files', 'in_file')]),
            (smooth_node, zip_smooth_node, [('smoothed_files', 'in_file')]),
            # Returning output
            (zip_bet_node, self.output_node, [('out_file', 't1_brain_mask')]),
            (mc_node, self.output_node, [('realignment_parameters',
                                          'mc_params')]),
            (zip_mc_node, self.output_node, [('out_file', 'native_fmri')]),
            (zip_reg_node, self.output_node, [('out_file', 't1_fmri')]),
            (zip_norm_node, self.output_node, [('out_file', 'mni_fmri')]),
            (zip_smooth_node, self.output_node, [('out_file',
                                                  'mni_smoothed_fmri')]),
        ])
Пример #26
0
def test_realign_list_outputs(create_files_in_directory):
    filelist, outdir = create_files_in_directory
    rlgn = spm.Realign(in_files=filelist[0])
    assert rlgn._list_outputs()['realignment_parameters'][0].startswith('rp_')
    assert rlgn._list_outputs()['realigned_files'][0].startswith('r')
    assert rlgn._list_outputs()['mean_image'].startswith('mean')
Пример #27
0
epi_stack.inputs.out_format = 'epi'
epi_stack.inputs.out_ext = '.nii'
#Outputs: out_file

#Despiking using afni (position based on Jo et al. (2013)).
despike = Node(afni.Despike(), name='despike')
despike.inputs.outputtype = 'NIFTI'
#Outputs: out_file

#Slice timing corrected (gets timing from header)
st_corr = Node(spm.SliceTiming(), name='slicetiming_correction')
st_corr.inputs.ref_slice = 1
#Outputs: timecorrected_files

#Realignment using SPM <--- Maybe just estimate and apply all transforms at the end?
realign = Node(spm.Realign(), name='realign')
realign.inputs.register_to_mean = False
realign.inputs.quality = 1.0
#Outputs: realignment_parameters, reliced epi images (motion corrected)

tsnr = Node(misc.TSNR(), name='tsnr')
tsnr.inputs.regress_poly = 2
#Outputs: detrended_file, mean_file, stddev_file, tsnr_file

smooth = Node(spm.Smooth(), name='smooth')
smooth.inputs.fwhm = fwhm

####Anatomical preprocessing####

#dcmstack - Convert dicoms to nii (with embeded metadata)
anat_stack = Node(dcmstack.DcmStack(), name='anatstack')
Пример #28
0
and shared across users, projects and labs.


Setup preprocessing workflow
----------------------------

This is a generic preprocessing workflow that can be used by different analyses

"""

preproc = pe.Workflow(name='preproc')
"""Use :class:`nipype.interfaces.spm.Realign` for motion correction
and register all images to the mean image.
"""

realign = pe.Node(spm.Realign(), name="realign")
realign.inputs.register_to_mean = True
"""Use :class:`nipype.algorithms.rapidart` to determine which of the
images in the functional series are outliers based on deviations in
intensity or movement.
"""

art = pe.Node(ra.ArtifactDetect(), name="art")
art.inputs.use_differences = [True, False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'SPM'
"""Skull strip structural images using
:class:`nipype.interfaces.fsl.BET`.
Пример #29
0
analysis. It one is slightly different then the one used in spm_tutorial2.


Setup preprocessing workflow
----------------------------

This is a generic preprocessing workflow that can be used by different analyses

"""

preproc = pe.Workflow(name='preproc')
"""Use :class:`nipype.interfaces.spm.Realign` for motion correction
and register all images to the mean image.
"""

realign = pe.Node(interface=spm.Realign(), name="realign")

slice_timing = pe.Node(interface=spm.SliceTiming(), name="slice_timing")
"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid
body registration of the functional data to the structural data.
"""

coregister = pe.Node(interface=spm.Coregister(), name="coregister")
coregister.inputs.jobtype = 'estimate'

segment = pe.Node(interface=spm.Segment(), name="segment")
segment.inputs.save_bias_corrected = True
"""Uncomment the following line for faster execution
"""

# segment.inputs.gaussians_per_class = [1, 1, 1, 4]
Пример #30
0
def mod_realign(node, in_file, tr, do_slicetime, sliceorder, parameters={}):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.spm as spm
    import nipype.interfaces.nipy as nipy
    import os
    parameter_source = "FSL"
    keys = parameters.keys()
    if node == "nipy":
        realign = nipy.FmriRealign4d()
        realign.inputs.in_file = in_file
        realign.inputs.tr = tr
        if "loops" in keys:
            realign.inputs.loops = parameters["loops"]
        if "speedup" in keys:
            realign.inputs.speedup = parameters["speedup"]
        if "between_loops" in keys:
            realign.inputs.between_loops = parameters["between_loops"]
        if do_slicetime:
            realign.inputs.slice_order = sliceorder
            realign.inputs.time_interp = True

        res = realign.run()
        out_file = res.outputs.out_file
        par_file = res.outputs.par_file

    elif node == "fsl":
        if not isinstance(in_file, list):
            in_file = [in_file]
        out_file = []
        par_file = []
        # get the first volume of first run as ref file
        if not do_slicetime:
            extract = fsl.ExtractROI()
            extract.inputs.t_min = 0
            extract.inputs.t_size = 1
            extract.inputs.in_file = in_file[0]
            ref_vol = extract.run().outputs.roi_file

        for idx, file in enumerate(in_file):
            if do_slicetime:
                slicetime = fsl.SliceTimer()
                slicetime.inputs.in_file = file
                sliceorder_file = os.path.abspath('FSL_custom_order.txt')
                with open(sliceorder_file, 'w') as custom_order_fp:
                    for t in sliceorder:
                        custom_order_fp.write('%d\n' % (t + 1))
                slicetime.inputs.custom_order = sliceorder_file
                slicetime.inputs.time_repetition = tr
                res = slicetime.run()
                file_to_realign = res.outputs.slice_time_corrected_file
                if not idx:
                    extract = fsl.ExtractROI()
                    extract.inputs.t_min = 0
                    extract.inputs.t_size = 1
                    extract.inputs.in_file = file_to_realign
                    ref_vol = extract.run().outputs.roi_file
            else:
                file_to_realign = file
            realign = fsl.MCFLIRT(interpolation='spline', ref_file=ref_vol)
            realign.inputs.save_plots = True
            realign.inputs.mean_vol = True
            realign.inputs.in_file = file_to_realign
            realign.inputs.out_file = 'fsl_corr_' + \
                                      os.path.split(file_to_realign)[1]
            Realign_res = realign.run()
            out_file.append(Realign_res.outputs.out_file)
            par_file.append(Realign_res.outputs.par_file)

    elif node == 'spm':
        import numpy as np
        import nibabel as nib
        import nipype.interfaces.freesurfer as fs
        if not isinstance(in_file, list):
            in_file = [in_file]
        new_in_file = []
        for f in in_file:
            if f.endswith('.nii.gz'):
                convert = fs.MRIConvert()
                convert.inputs.in_file = f
                convert.inputs.out_type = 'nii'
                convert.inputs.in_type = 'niigz'
                f = convert.run().outputs.out_file
                new_in_file.append(f)
            else:
                new_in_file.append(f)
        if do_slicetime:
            img = nib.load(new_in_file[0])
            num_slices = img.shape[2]
            st = spm.SliceTiming()
            st.inputs.in_files = new_in_file
            st.inputs.num_slices = num_slices
            st.inputs.time_repetition = tr
            st.inputs.time_acquisition = tr - tr / num_slices
            st.inputs.slice_order = (np.asarray(sliceorder) +
                                     1).astype(int).tolist()
            st.inputs.ref_slice = 1
            res_st = st.run()
            file_to_realign = res_st.outputs.timecorrected_files
        else:
            file_to_realign = new_in_file
        par_file = []
        realign = spm.Realign()
        realign.inputs.in_files = file_to_realign
        #realign.inputs.out_prefix = 'spm_corr_'
        res = realign.run()
        parameters = res.outputs.realignment_parameters
        if not isinstance(parameters, list):
            parameters = [parameters]
        par_file = parameters
        parameter_source = 'SPM'
        fsl.ImageMaths(in_file=res.outputs.realigned_files,
                       out_file=res.outputs.realigned_files,
                       op_string='-nan').run()
        out_file = res.outputs.realigned_files
    elif node == 'afni':
        import nipype.interfaces.afni as afni
        import nibabel as nib
        import numpy as np
        if not isinstance(in_file, list):
            in_file = [in_file]
        img = nib.load(in_file[0])
        Nz = img.shape[2]
        out_file = []
        par_file = []
        # get the first volume of first run as ref file
        if not do_slicetime:
            extract = fsl.ExtractROI()
            extract.inputs.t_min = 0
            extract.inputs.t_size = 1
            extract.inputs.in_file = in_file[0]
            ref_vol = extract.run().outputs.roi_file

        for idx, file in enumerate(in_file):
            if do_slicetime:
                slicetime = afni.TShift()
                slicetime.inputs.in_file = file
                custom_order = open(
                    os.path.abspath('afni_custom_order_file.txt'), 'w')
                tpattern = []
                for i in xrange(len(sliceorder)):
                    tpattern.append((i * tr / float(Nz), sliceorder[i]))
                tpattern.sort(key=lambda x: x[1])
                for i, t in enumerate(tpattern):
                    print '%f\n' % (t[0])
                    custom_order.write('%f\n' % (t[0]))
                custom_order.close()

                slicetime.inputs.args = '-tpattern @%s' % os.path.abspath(
                    'afni_custom_order_file.txt')
                slicetime.inputs.tr = str(tr) + 's'
                slicetime.inputs.outputtype = 'NIFTI_GZ'
                res = slicetime.run()
                file_to_realign = res.outputs.out_file

                if not idx:
                    extract = fsl.ExtractROI()
                    extract.inputs.t_min = 0
                    extract.inputs.t_size = 1
                    extract.inputs.in_file = file_to_realign
                    ref_vol = extract.run().outputs.roi_file

            else:
                file_to_realign = file

            realign = afni.Volreg()
            realign.inputs.in_file = file_to_realign
            realign.inputs.out_file = "afni_corr_" + os.path.split(
                file_to_realign)[1]
            realign.inputs.oned_file = "afni_realignment_parameters.par"
            realign.inputs.basefile = ref_vol
            Realign_res = realign.run()
            out_file.append(Realign_res.outputs.out_file)

            parameters = Realign_res.outputs.oned_file
            if not isinstance(parameters, list):
                parameters = [parameters]
            for i, p in enumerate(parameters):
                foo = np.genfromtxt(p)
                boo = foo[:, [1, 2, 0, 4, 5, 3]]
                boo[:, :3] = boo[:, :3] * np.pi / 180
                np.savetxt(os.path.abspath('realignment_parameters_%d.par' %
                                           i),
                           boo,
                           delimiter='\t')
                par_file.append(
                    os.path.abspath('realignment_parameters_%d.par' % i))

            #par_file.append(Realign_res.outputs.oned_file)

    return out_file, par_file, parameter_source