Exemplo n.º 1
0
def gen_anat_segs(anat_loc, out_aff):
    # # Custom inputs# #
    try:
        FSLDIR = os.environ['FSLDIR']
    except KeyError:
        print('FSLDIR environment variable not set!')
    import nipype.interfaces.fsl as fsl
    #import nibabel as nib
    #from nilearn.image import resample_img
    from nipype.interfaces.fsl import ExtractROI
    print(
        '\nSegmenting anatomical image to create White Matter and Ventricular CSF masks for contraining the tractography...'
    )
    # Create MNI ventricle mask
    print('Creating MNI space ventricle mask...')
    anat_dir = os.path.dirname(anat_loc)
    lvent_out_file = "%s%s" % (anat_dir, '/LVentricle.nii.gz')
    rvent_out_file = "%s%s" % (anat_dir, '/RVentricle.nii.gz')
    MNI_atlas = "%s%s" % (
        FSLDIR,
        '/data/atlases/HarvardOxford/HarvardOxford-sub-prob-1mm.nii.gz')
    fslroi1 = ExtractROI(in_file=MNI_atlas,
                         roi_file=lvent_out_file,
                         t_min=2,
                         t_size=1)
    os.system(fslroi1.cmdline)
    fslroi2 = ExtractROI(in_file=MNI_atlas,
                         roi_file=rvent_out_file,
                         t_min=13,
                         t_size=1)
    os.system(fslroi2.cmdline)
    mni_csf_loc = anat_dir + '/VentricleMask.nii.gz'
    args = "%s%s%s" % ('-add ', rvent_out_file, ' -thr 0.1 -bin -dilF')
    maths = fsl.ImageMaths(in_file=lvent_out_file,
                           op_string=args,
                           out_file=mni_csf_loc)
    os.system(maths.cmdline)

    # Segment anatomical (should be in MNI space)
    print('Segmenting anatomical image using FAST...')
    fastr = fsl.FAST()
    fastr.inputs.in_files = anat_loc
    fastr.inputs.img_type = 1
    fastr.run()
    old_file_csf = "%s%s" % (anat_loc.split('.nii.gz')[0], '_pve_0.nii.gz')
    new_file_csf = "%s%s" % (anat_dir, '/CSF.nii.gz')
    old_file_wm = "%s%s" % (anat_loc.split('.nii.gz')[0], '_pve_2.nii.gz')
    new_file_wm = "%s%s" % (anat_dir, '/WM.nii.gz')
    os.rename(old_file_csf, new_file_csf)
    os.rename(old_file_wm, new_file_wm)

    # Reslice to 1x1x1mm voxels
    #img=nib.load(anat_loc)
    #vox_sz = img.affine[0][0]
    #targ_aff = img.affine/(np.array([[int(abs(vox_sz)),1,1,1],[1,int(abs(vox_sz)),1,1],[1,1,int(abs(vox_sz)),1],[1,1,1,1]]))
    #new_file_csf_res = resample_img(new_file_csf, target_affine=targ_aff)
    #new_file_wm_res = resample_img(new_file_wm, target_affine=targ_aff)
    #nib.save(new_file_csf_res, new_file_csf)
    #nib.save(new_file_wm_res, new_file_wm)
    return new_file_csf, mni_csf_loc, new_file_wm
Exemplo n.º 2
0
def fslmath_ExtractROI_T(input_file, tmin, tsize, out_prefix):
    from nipype.interfaces.fsl import ExtractROI
    import os

    splitM = ExtractROI()
    splitM.inputs.in_file = input_file
    splitM.inputs.t_min = tmin
    splitM.inputs.t_size = tsize
    splitM.inputs.output_type = 'NIFTI_GZ'
    print "ExtractROI [" + os.path.basename(input_file) + "]:" + splitM.cmdline
    res = splitM.run()
    outfile = d2s.move_to_results(res.outputs.roi_file, out_prefix)
    return outfile
Exemplo n.º 3
0
def gen_anat_segs(anat_loc, FSLDIR):
    import nipype.interfaces.fsl as fsl
    from nipype.interfaces.fsl import ExtractROI
    ##Create MNI ventricle mask
    print('Creating MNI space ventricle mask...')
    anat_dir = os.path.dirname(anat_loc)
    lvent_out_file = anat_dir + '/LVentricle.nii.gz'
    rvent_out_file = anat_dir + '/RVentricle.nii.gz'
    MNI_atlas = FSLDIR + '/data/atlases/HarvardOxford/HarvardOxford-sub-prob-1mm.nii.gz'
    fslroi1 = ExtractROI(in_file=MNI_atlas,
                         roi_file=lvent_out_file,
                         t_min=2,
                         t_size=1)
    os.system(fslroi1.cmdline)
    fslroi2 = ExtractROI(in_file=MNI_atlas,
                         roi_file=rvent_out_file,
                         t_min=13,
                         t_size=1)
    os.system(fslroi2.cmdline)
    mni_csf_loc = anat_dir + '/VentricleMask.nii.gz'
    args = '-add ' + rvent_out_file + ' -thr 0.1 -bin -dilF'
    maths = fsl.ImageMaths(in_file=lvent_out_file,
                           op_string=args,
                           out_file=mni_csf_loc)
    os.system(maths.cmdline)

    ##Segment anatomical (should be in MNI space)
    print('Segmenting anatomical image using FAST...')
    fastr = fsl.FAST()
    fastr.inputs.in_files = anat_loc
    fastr.inputs.img_type = 1
    fastr.run()
    old_file_csf = anat_loc.split('.nii.gz')[0] + '_pve_0.nii.gz'
    new_file_csf = anat_dir + '/CSF.nii.gz'
    os.rename(old_file_csf, new_file_csf)
    old_file_wm = anat_loc.split('.nii.gz')[0] + '_pve_2.nii.gz'
    new_file_wm = anat_dir + '/WM.nii.gz'
    os.rename(old_file_wm, new_file_wm)

    return (new_file_csf, new_file_wm, mni_csf_loc)
Exemplo n.º 4
0
def define_workflow(subject_list, run_list, experiment_dir, output_dir):
    """run the smooth workflow given subject and runs"""
    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=4, t_size=-1, output_type='NIFTI'),
                   name="extract")

    # Smooth - image smoothing
    smooth = Node(Smooth(fwhm=[8, 8, 8]), name="smooth")

    # Mask - applying mask to smoothed
    # mask_func = Node(ApplyMask(output_type='NIFTI'),
    # name="mask_func")

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'run_num']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list),
                            ('run_num', run_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    func_file = opj(
        'sub-{subject_id}', 'func',
        'sub-{subject_id}_task-tsl_run-{run_num}_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz'
    )
    templates = {'func': func_file}
    selectfiles = Node(SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    ## Use the following DataSink output substitutions
    substitutions = [('_subject_id_', 'sub-'), ('ssub', 'sub'),
                     ('_space-MNI152NLin2009cAsym_desc-preproc_', '_fwhm-8_'),
                     ('_fwhm_', ''), ('_roi', '')]
    substitutions += [('_run_num_%s' % r, '') for r in run_list]
    datasink.inputs.substitutions = substitutions

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the preprocessing workflow (spm smooth)
    preproc.connect([(infosource, selectfiles, [('subject_id', 'subject_id'),
                                                ('run_num', 'run_num')]),
                     (selectfiles, extract, [('func', 'in_file')]),
                     (extract, smooth, [('roi_file', 'in_files')]),
                     (smooth, datasink, [('smoothed_files', 'preproc.@smooth')
                                         ])])
    return preproc
Exemplo n.º 5
0
    def Extract_vol_pipeline(self, **kwargs):

        pipeline = self.new_pipeline(
            name='Extract_volume',
            desc=('Extract the last volume of the 4D PET timeseries'),
            citations=[],
            **kwargs)

        pipeline.add('fslroi',
                     ExtractROI(roi_file='vol.nii.gz', t_min=79, t_size=1),
                     inputs={'in_file': ('pet_volumes', nifti_gz_format)},
                     outputs={'pet_image': ('roi_file', nifti_gz_format)})

        return pipeline
Exemplo n.º 6
0
    def Extract_vol_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='Extract_volume',
            inputs=[DatasetSpec('pet_volumes', nifti_gz_format)],
            outputs=[DatasetSpec('pet_image', nifti_gz_format)],
            desc=('Extract the last volume of the 4D PET timeseries'),
            version=1,
            citations=[],
            **kwargs)

        fslroi = pipeline.create_node(
            ExtractROI(roi_file='vol.nii.gz', t_min=79, t_size=1),
            name='fslroi')
        pipeline.connect_input('pet_volumes', fslroi, 'in_file')
        pipeline.connect_output('pet_image', fslroi, 'roi_file')
        return pipeline
Exemplo n.º 7
0
    DWIDenoise,
    Generate5tt,
    MRDeGibbs,
    ResponseSD,
)

#: A dictionary that should be imported in the project's settings and included
#: within the *ANALYSIS_INTERFACES* setting.
interfaces = {
    "apply_topup": {ApplyTOPUP().version: ApplyTOPUP},
    "binary_maths": {BinaryMaths().version: BinaryMaths},
    "BET": {BET().version: BET},
    "CAT12 Segmentation": {"12.7": Cat12Segmentation},
    "fslmerge": {Merge().version: Merge},
    "fslreorient2std": {Reorient2Std().version: Reorient2Std},
    "fslroi": {ExtractROI().version: ExtractROI},
    "FAST": {FastWrapper.version: FastWrapper},
    "FLIRT": {FLIRT().version: FLIRT},
    "FNIRT": {FNIRT().version: FNIRT},
    "FSL Anatomical Processing Script": {FslAnat.__version__: FslAnat},
    "mean_image": {MeanImage().version: MeanImage},
    "robustfov": {RobustFOV().version: RobustFOV},
    "ReconAll": {ReconAll().version: ReconAll},
    "SUSAN": {SUSAN().version: SUSAN},
    "topup": {TopupWrapper.version: TopupWrapper},
    "eddy": {Eddy().version: Eddy},
    "denoise": {DWIDenoise().version: DWIDenoise},
    "degibbs": {MRDeGibbs().version: MRDeGibbs},
    "bias_correct": {DWIBiasCorrect().version: DWIBiasCorrect},
    "dwifslpreproc": {DwiFslPreproc.__version__: DwiFslPreproc},
    "mrconvert": {MRConvert.__version__: MRConvert},
Exemplo n.º 8
0
def preproc_workflow(input_dir,
                     output_dir,
                     subject_list,
                     ses_list,
                     anat_file,
                     func_file,
                     scan_size=477,
                     bet_frac=0.37):
    """
    The preprocessing workflow used in the preparation of the psilocybin vs escitalopram rsFMRI scans.
    Workflows and notes are defined throughout. Inputs are designed to be general and masks/default MNI space is provided

    :param input_dir: The input file directory containing all scans in BIDS format
    :param output_dir: The output file directory
    :param subject_list: a list of subject numbers
    :param ses_list: a list of scan numbers (session numbers)
    :param anat_file: The format of the anatomical scan within the input directory
    :param func_file: The format of the functional scan within the input directory
    :param scan_size: The length of the scan by number of images, most 10 minutes scans are around 400-500 depending
    upon scanner defaults and parameters - confirm by looking at your data
    :param bet_frac: brain extraction fractional intensity threshold
    :return: the preprocessing workflow
    """
    preproc = Workflow(name='preproc')
    preproc.base_dir = output_dir

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'ses']),
                      name="infosource")

    infosource.iterables = [('subject_id', subject_list), ('ses', ses_list)]

    # SelectFiles - to grab the data (alternative to DataGrabber)
    templates = {
        'anat': anat_file,
        'func': func_file
    }  # define the template of each file input

    selectfiles = Node(SelectFiles(templates, base_directory=input_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=output_dir, container=output_dir),
                    name="datasink")

    preproc.connect([(infosource, selectfiles, [('subject_id', 'subject_id'),
                                                ('ses', 'ses')])])
    ''' 
    This is your functional processing workflow, used to trim scans, despike the signal, slice-time correct, 
    and motion correct your data 
    '''

    fproc = Workflow(name='fproc')  # the functional processing workflow

    # ExtractROI - skip dummy scans at the beginning of the recording by removing the first three
    trim = Node(ExtractROI(t_min=3, t_size=scan_size, output_type='NIFTI_GZ'),
                name="trim")

    # 3dDespike - despike
    despike = Node(Despike(outputtype='NIFTI_GZ', args='-NEW'), name="despike")
    fproc.connect([(trim, despike, [('roi_file', 'in_file')])])
    preproc.connect([(selectfiles, fproc, [('func', 'trim.in_file')])])

    # 3dTshift - slice time correction
    slicetime = Node(TShift(outputtype='NIFTI_GZ', tpattern='alt+z2'),
                     name="slicetime")
    fproc.connect([(despike, slicetime, [('out_file', 'in_file')])])

    # 3dVolreg - correct motion and output 1d matrix
    moco = Node(Volreg(outputtype='NIFTI_GZ',
                       interp='Fourier',
                       zpad=4,
                       args='-twopass'),
                name="moco")
    fproc.connect([(slicetime, moco, [('out_file', 'in_file')])])

    moco_bpfdt = Node(
        MOCObpfdt(), name='moco_bpfdt'
    )  # use the matlab function to correct the motion regressor
    fproc.connect([(moco, moco_bpfdt, [('oned_file', 'in_file')])])
    '''
    This is the co-registration workflow using FSL and ANTs
    '''

    coreg = Workflow(name='coreg')

    # BET - structural data brain extraction
    bet_anat = Node(BET(output_type='NIFTI_GZ', frac=bet_frac, robust=True),
                    name="bet_anat")

    # FSL segmentation process to get WM map
    seg = Node(FAST(bias_iters=6,
                    img_type=1,
                    output_biascorrected=True,
                    output_type='NIFTI_GZ'),
               name="seg")
    coreg.connect([(bet_anat, seg, [('out_file', 'in_files')])])

    # functional to structural registration
    mean = Node(MCFLIRT(mean_vol=True, output_type='NIFTI_GZ'), name="mean")

    # BBR using linear methods for initial transform fit
    func2struc = Node(FLIRT(cost='bbr', dof=6, output_type='NIFTI_GZ'),
                      name='func2struc')
    coreg.connect([(seg, func2struc, [('restored_image', 'reference')])])
    coreg.connect([(mean, func2struc, [('mean_img', 'in_file')])])
    coreg.connect([(seg, func2struc, [(('tissue_class_files', pickindex, 2),
                                       'wm_seg')])])

    # convert the FSL linear transform into a C3d format for AFNI
    f2s_c3d = Node(C3dAffineTool(itk_transform=True, fsl2ras=True),
                   name='f2s_c3d')
    coreg.connect([(func2struc, f2s_c3d, [('out_matrix_file', 'transform_file')
                                          ])])
    coreg.connect([(mean, f2s_c3d, [('mean_img', 'source_file')])])
    coreg.connect([(seg, f2s_c3d, [('restored_image', 'reference_file')])])

    # Functional to structural registration via ANTs non-linear registration
    reg = Node(Registration(
        fixed_image='default_images/MNI152_T1_2mm_brain.nii.gz',
        transforms=['Affine', 'SyN'],
        transform_parameters=[(0.1, ), (0.1, 3.0, 0.0)],
        number_of_iterations=[[1500, 1000, 1000], [100, 70, 50, 20]],
        dimension=3,
        write_composite_transform=True,
        collapse_output_transforms=True,
        metric=['MI'] + ['CC'],
        metric_weight=[1] * 2,
        radius_or_number_of_bins=[32] + [4],
        convergence_threshold=[1.e-8, 1.e-9],
        convergence_window_size=[20] + [10],
        smoothing_sigmas=[[2, 1, 0], [4, 2, 1, 0]],
        sigma_units=['vox'] * 2,
        shrink_factors=[[4, 2, 1], [6, 4, 2, 1]],
        use_histogram_matching=[False] + [True],
        use_estimate_learning_rate_once=[True, True],
        output_warped_image=True),
               name='reg')

    coreg.connect([(seg, reg, [('restored_image', 'moving_image')])
                   ])  # connect segmentation node to registration node

    merge1 = Node(niu.Merge(2), iterfield=['in2'],
                  name='merge1')  # merge the linear and nonlinear transforms
    coreg.connect([(f2s_c3d, merge1, [('itk_transform', 'in2')])])
    coreg.connect([(reg, merge1, [('composite_transform', 'in1')])])

    # warp the functional images into MNI space using the transforms from FLIRT and SYN
    warp = Node(ApplyTransforms(
        reference_image='default_images/MNI152_T1_2mm_brain.nii.gz',
        input_image_type=3),
                name='warp')
    coreg.connect([(moco, warp, [('out_file', 'input_image')])])
    coreg.connect([(merge1, warp, [('out', 'transforms')])])

    preproc.connect([(selectfiles, coreg, [('anat', 'bet_anat.in_file')])])
    preproc.connect([(fproc, coreg, [('moco.out_file', 'mean.in_file')])])
    '''
    Scrubbing workflow - find the motion outliers, bandpass filter, re-mean the data after bpf
    '''

    scrub = Workflow(name='scrub')

    # Generate the Scrubbing Regressor
    scrub_metrics = Node(MotionOutliers(dummy=4,
                                        out_file='FD_outliers.1D',
                                        metric='fd',
                                        threshold=0.4),
                         name="scrub_metrics")

    # regress out timepoints
    scrub_frames = Node(Bandpass(highpass=0,
                                 lowpass=99999,
                                 outputtype='NIFTI_GZ'),
                        name='scrub_frames')
    scrub.connect([(scrub_metrics, scrub_frames, [('out_file',
                                                   'orthogonalize_file')])])
    preproc.connect([(coreg, scrub, [('warp.output_image',
                                      'scrub_frames.in_file')])])
    preproc.connect([(selectfiles, scrub, [('func', 'scrub_metrics.in_file')])
                     ])

    # mean image for remeaning after bandpass
    premean = Node(TStat(args='-mean', outputtype='NIFTI_GZ'), name='premean')
    # remean the image
    remean2 = Node(Calc(expr='a+b', outputtype='NIFTI_GZ'), name='remean2')
    scrub.connect([(scrub_frames, remean2, [('out_file', 'in_file_a')])])
    scrub.connect([(premean, remean2, [('out_file', 'in_file_b')])])
    preproc.connect([(coreg, scrub, [('warp.output_image', 'premean.in_file')])
                     ])
    '''
    Regressors for final cleaning steps
    '''

    regressors = Workflow(name='regressors')

    # Using registered structural image to create the masks for both WM and CSF
    regbet = Node(BET(robust=True, frac=0.37, output_type='NIFTI_GZ'),
                  name='regbet')

    regseg = Node(FAST(img_type=1,
                       output_type='NIFTI_GZ',
                       no_pve=True,
                       no_bias=True,
                       segments=True),
                  name='regseg')
    regressors.connect([(regbet, regseg, [('out_file', 'in_files')])])
    preproc.connect([(coreg, regressors, [('reg.warped_image',
                                           'regbet.in_file')])])
    '''
    Create a cerebrospinal fluid (CSF) regressor 
    '''

    # subtract subcortical GM from the CSF mask
    subcortgm = Node(BinaryMaths(
        operation='sub',
        operand_file='default_images/subcortical_gm_mask_bin.nii.gz',
        output_type='NIFTI_GZ',
        args='-bin'),
                     name='subcortgm')
    regressors.connect([(regseg, subcortgm, [(('tissue_class_files', pickindex,
                                               0), 'in_file')])])

    # Fill the mask holes

    fillcsf = Node(MaskTool(fill_holes=True, outputtype='NIFTI_GZ'),
                   name='fillcsf')
    regressors.connect([(subcortgm, fillcsf, [('out_file', 'in_file')])])

    # Erode the mask

    erocsf = Node(MaskTool(outputtype='NIFTI_GZ', dilate_inputs='-1'),
                  name='erocsf')
    regressors.connect([(fillcsf, erocsf, [('out_file', 'in_file')])])

    # Take mean csf signal from functional image
    meancsf = Node(ImageMeants(output_type='NIFTI_GZ'), name='meancsf')
    regressors.connect([(erocsf, meancsf, [('out_file', 'mask')])])
    preproc.connect([(coreg, regressors, [('warp.output_image',
                                           'meancsf.in_file')])])

    bpf_dt_csf = Node(CSFbpfdt(), name='bpf_dt_csf')
    regressors.connect([(meancsf, bpf_dt_csf, [('out_file', 'in_file')])])
    '''
    Creates a local white matter regressor
    '''

    # subtract subcortical gm
    subcortgm2 = Node(BinaryMaths(
        operation='sub',
        operand_file='default_images/subcortical_gm_mask_bin.nii.gz',
        output_type='NIFTI_GZ',
        args='-bin'),
                      name='subcortgm2')
    regressors.connect([(regseg, subcortgm2, [(('tissue_class_files',
                                                pickindex, 2), 'in_file')])])

    # fill mask
    fillwm = Node(MaskTool(fill_holes=True, outputtype='NIFTI_GZ'),
                  name='fillwm')
    regressors.connect([(subcortgm2, fillwm, [('out_file', 'in_file')])])

    # erod mask
    erowm = Node(MaskTool(outputtype='NIFTI_GZ', dilate_inputs='-1'),
                 name='erowm')
    regressors.connect([(fillwm, erowm, [('out_file', 'in_file')])])

    # generate local wm
    localwm = Node(Localstat(neighborhood=('SPHERE', 25),
                             stat='mean',
                             nonmask=True,
                             outputtype='NIFTI_GZ'),
                   name='localwm')
    regressors.connect([(erowm, localwm, [('out_file', 'mask_file')])])
    preproc.connect([(coreg, regressors, [('warp.output_image',
                                           'localwm.in_file')])])

    # bandpass filter the local wm regressor
    localwm_bpf = Node(Fourier(highpass=0.01,
                               lowpass=0.08,
                               args='-retrend',
                               outputtype='NIFTI_GZ'),
                       name='loacwm_bpf')
    regressors.connect([(localwm, localwm_bpf, [('out_file', 'in_file')])])

    # detrend the local wm regressor

    localwm_bpf_dt = Node(Detrend(args='-polort 2', outputtype='NIFTI_GZ'),
                          name='localwm_bpf_dt')
    regressors.connect([(localwm_bpf, localwm_bpf_dt, [('out_file', 'in_file')
                                                       ])])
    '''
    Clean up your functional image with the regressors you have created above
    '''

    # create a mask for blurring filtering, and detrending

    clean = Workflow(name='clean')

    mask = Node(BET(mask=True, functional=True), name='mask')

    mean_mask = Node(MCFLIRT(mean_vol=True, output_type='NIFTI_GZ'),
                     name="mean_mask")

    dilf = Node(DilateImage(operation='max', output_type='NIFTI_GZ'),
                name='dilf')
    clean.connect([(mask, dilf, [('mask_file', 'in_file')])])
    preproc.connect([(scrub, clean, [('remean2.out_file', 'mask.in_file')])])

    fill = Node(MaskTool(in_file='default_images/MNI152_T1_2mm_brain.nii.gz',
                         fill_holes=True,
                         outputtype='NIFTI_GZ'),
                name='fill')

    axb = Node(Calc(expr='a*b', outputtype='NIFTI_GZ'), name='axb')
    clean.connect([(dilf, axb, [('out_file', 'in_file_a')])])
    clean.connect([(fill, axb, [('out_file', 'in_file_b')])])

    bxc = Node(Calc(expr='ispositive(a)*b', outputtype='NIFTI_GZ'), name='bxc')
    clean.connect([(mean_mask, bxc, [('mean_img', 'in_file_a')])])
    clean.connect([(axb, bxc, [('out_file', 'in_file_b')])])
    preproc.connect([(scrub, clean, [('remean2.out_file', 'mean_mask.in_file')
                                     ])])

    #### BLUR, FOURIER BPF, and DETREND

    blurinmask = Node(BlurInMask(fwhm=6, outputtype='NIFTI_GZ'),
                      name='blurinmask')
    clean.connect([(bxc, blurinmask, [('out_file', 'mask')])])
    preproc.connect([(scrub, clean, [('remean2.out_file', 'blurinmask.in_file')
                                     ])])

    fourier = Node(Fourier(highpass=0.01,
                           lowpass=0.08,
                           retrend=True,
                           outputtype='NIFTI_GZ'),
                   name='fourier')
    clean.connect([(blurinmask, fourier, [('out_file', 'in_file')])])

    tstat = Node(TStat(args='-mean', outputtype='NIFTI_GZ'), name='tstat')
    clean.connect([(fourier, tstat, [('out_file', 'in_file')])])

    detrend = Node(Detrend(args='-polort 2', outputtype='NIFTI_GZ'),
                   name='detrend')
    clean.connect([(fourier, detrend, [('out_file', 'in_file')])])

    remean = Node(Calc(expr='a+b', outputtype='NIFTI_GZ'), name='remean')
    clean.connect([(detrend, remean, [('out_file', 'in_file_a')])])
    clean.connect([(tstat, remean, [('out_file', 'in_file_b')])])

    concat = Node(ConcatModel(), name='concat')

    # Removes nuisance regressors via regression function
    clean_rs = Node(Bandpass(highpass=0, lowpass=99999, outputtype='NIFTI_GZ'),
                    name='clean_rs')

    clean.connect([(concat, clean_rs, [('out_file', 'orthogonalize_file')])])

    remean1 = Node(Calc(expr='a+b', outputtype='NIFTI_GZ'), name='remean1')
    clean.connect([(clean_rs, remean1, [('out_file', 'in_file_a')])])
    clean.connect([(tstat, remean1, [('out_file', 'in_file_b')])])

    preproc.connect([(regressors, clean, [('bpf_dt_csf.out_file',
                                           'concat.in_file_a')])])
    preproc.connect([(fproc, clean, [('moco_bpfdt.out_file',
                                      'concat.in_file_b')])])

    preproc.connect([(regressors, clean, [('localwm_bpf_dt.out_file',
                                           'clean_rs.orthogonalize_dset')])])
    clean.connect([(remean, clean_rs, [('out_file', 'in_file')])])
    '''
    Write graphical output detailing the workflows and nodes 
    '''

    fproc.write_graph(graph2use='flat',
                      format='png',
                      simple_form=True,
                      dotfilename='./fproc.dot')
    fproc.write_graph(graph2use='colored',
                      format='png',
                      simple_form=True,
                      dotfilename='./fproc_color.dot')

    coreg.write_graph(graph2use='flat',
                      format='png',
                      simple_form=True,
                      dotfilename='./coreg.dot')
    coreg.write_graph(graph2use='colored',
                      format='png',
                      simple_form=True,
                      dotfilename='./coreg_color.dot')

    scrub.write_graph(graph2use='flat',
                      format='png',
                      simple_form=True,
                      dotfilename='./scrub.dot')
    scrub.write_graph(graph2use='colored',
                      format='png',
                      simple_form=True,
                      dotfilename='./scrub_color.dot')

    regressors.write_graph(graph2use='flat',
                           format='png',
                           simple_form=True,
                           dotfilename='./reg.dot')
    regressors.write_graph(graph2use='colored',
                           format='png',
                           simple_form=True,
                           dotfilename='./reg_color.dot')

    preproc.write_graph(graph2use='flat',
                        format='png',
                        simple_form=True,
                        dotfilename='./preproc.dot')
    preproc.write_graph(graph2use='colored',
                        format='png',
                        simple_form=True,
                        dotfilename='./preproc_color.dot')

    return preproc
Exemplo n.º 9
0
    coregwf.connect([
        (bet_anat, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', pif.get_wm),
                                    'in_file')]),
        (bet_anat, coreg_pre, [('out_file', 'reference')]),
        (MNI, Normalization, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, Normalization, [('out_matrix_file', 'in_matrix_file')]),
        (Normalization, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (MNI, applywarp, [('out_file', 'reference')]),
    ])
    #coregwf.write_graph(graph2use='colored', format='svg', simple_form=True)
    '''-------------------------Nodos flujo funcional---------------------------'''
    '''#########################################################################'''
    extract = Node(ExtractROI(t_min=4, t_size=-1, output_type='NIFTI_GZ'),
                   name="extract")

    mcflirt2 = Node(Function(input_names=['in_file', 'dof'],
                             output_names=['out_file', 'mean_img', 'par_file'],
                             function=pif.MCflirt2),
                    name='mcflirt2')
    mcflirt2.inputs.dof = 12
    #mcflirt2.iterables = ("dof", DOF)

    slicetimer = Node(SliceTimer(index_dir=False,
                                 interleaved=True,
                                 output_type='NIFTI',
                                 time_repetition=TR),
                      name="slicetimer")
Exemplo n.º 10
0
def preprocessing(*argu):

    argu = argu[0]
    json_file = argu[1]

    with open(json_file, 'r') as jsonfile:
        info = json.load(jsonfile, object_pairs_hook=OrderedDict)

    subject_list = info["subject_list"]
    experiment_dir = info["experiment_dir"]
    output_dir = 'datasink'
    working_dir = 'workingdir'

    task_list = info["task_list"]

    fwhm = [*map(int, info["fwhm"])]
    TR = float(info["TR"])
    iso_size = 4
    slice_list = [*map(int, info["slice order"])]

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=int(info["dummy scans"]),
                              t_size=-1,
                              output_type='NIFTI'),
                   name="extract")

    slicetime = Node(SliceTiming(num_slices=len(slice_list),
                                 ref_slice=int(median(slice_list)),
                                 slice_order=slice_list,
                                 time_repetition=TR,
                                 time_acquisition=TR - (TR / len(slice_list))),
                     name="slicetime")

    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="mcflirt")

    # Smooth - image smoothing
    smooth = Node(Smooth(), name="smooth")
    smooth.iterables = ("fwhm", fwhm)

    # Artifact Detection - determines outliers in functional images
    art = Node(ArtifactDetect(norm_threshold=2,
                              zintensity_threshold=3,
                              mask_type='spm_global',
                              parameter_source='FSL',
                              use_differences=[True, False],
                              plot_type='svg'),
               name="art")

    # BET - Skullstrip anatomical Image
    bet_anat = Node(BET(frac=0.5, robust=True, output_type='NIFTI_GZ'),
                    name="bet_anat")

    # FAST - Image Segmentation
    segmentation = Node(FAST(output_type='NIFTI_GZ'),
                        name="segmentation",
                        mem_gb=4)

    # Select WM segmentation file from segmentation output
    def get_wm(files):
        return files[-1]

    # Threshold - Threshold WM probability image
    threshold = Node(Threshold(thresh=0.5, args='-bin',
                               output_type='NIFTI_GZ'),
                     name="threshold")

    # FLIRT - pre-alignment of functional images to anatomical images
    coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'), name="coreg_pre")

    # FLIRT - coregistration of functional images to anatomical images with BBR
    coreg_bbr = Node(FLIRT(dof=6,
                           cost='bbr',
                           schedule=opj(os.getenv('FSLDIR'),
                                        'etc/flirtsch/bbr.sch'),
                           output_type='NIFTI_GZ'),
                     name="coreg_bbr")

    # Apply coregistration warp to functional images
    applywarp = Node(FLIRT(interp='spline',
                           apply_isoxfm=iso_size,
                           output_type='NIFTI'),
                     name="applywarp")

    # Apply coregistration warp to mean file
    applywarp_mean = Node(FLIRT(interp='spline',
                                apply_isoxfm=iso_size,
                                output_type='NIFTI_GZ'),
                          name="applywarp_mean")

    # Create a coregistration workflow
    coregwf = Workflow(name='coregwf')
    coregwf.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the coregistration workflow
    coregwf.connect([
        (bet_anat, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', get_wm),
                                    'in_file')]),
        (bet_anat, coreg_pre, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (bet_anat, applywarp, [('out_file', 'reference')]),
        (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')]),
        (bet_anat, applywarp_mean, [('out_file', 'reference')]),
    ])

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'task_name']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list),
                            ('task_name', task_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    anat_file = opj('sub-{subject_id}', 'anat', 'sub-{subject_id}_T1w.nii.gz')
    func_file = opj('sub-{subject_id}', 'func',
                    'sub-{subject_id}_task-{task_name}_bold.nii.gz')

    templates = {'anat': anat_file, 'func': func_file}
    selectfiles = Node(SelectFiles(templates,
                                   base_directory=info["base directory"]),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    ## Use the following DataSink output substitutions
    substitutions = [
        ('_subject_id_', 'sub-'),
        ('_task_name_', '/task-'),
        ('_fwhm_', 'fwhm-'),
        ('_roi', ''),
        ('_mcf', ''),
        ('_st', ''),
        ('_flirt', ''),
        ('.nii_mean_reg', '_mean'),
        ('.nii.par', '.par'),
    ]
    subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]
    substitutions.extend(subjFolders)
    datasink.inputs.substitutions = substitutions

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the preprocessing workflow
    preproc.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id'),
                                   ('task_name', 'task_name')]),
        (selectfiles, extract, [('func', 'in_file')]),
        (extract, slicetime, [('roi_file', 'in_files')]),
        (slicetime, mcflirt, [('timecorrected_files', 'in_file')]),
        (selectfiles, coregwf, [('anat', 'bet_anat.in_file'),
                                ('anat', 'coreg_bbr.reference')]),
        (mcflirt, coregwf, [('mean_img', 'coreg_pre.in_file'),
                            ('mean_img', 'coreg_bbr.in_file'),
                            ('mean_img', 'applywarp_mean.in_file')]),
        (mcflirt, coregwf, [('out_file', 'applywarp.in_file')]),
        (coregwf, smooth, [('applywarp.out_file', 'in_files')]),
        (mcflirt, datasink, [('par_file', 'preproc.@par')]),
        (smooth, datasink, [('smoothed_files', 'preproc.@smooth')]),
        (coregwf, datasink, [('applywarp_mean.out_file', 'preproc.@mean')]),
        (coregwf, art, [('applywarp.out_file', 'realigned_files')]),
        (mcflirt, art, [('par_file', 'realignment_parameters')]),
        (coregwf, datasink, [('coreg_bbr.out_matrix_file',
                              'preproc.@mat_file'),
                             ('bet_anat.out_file', 'preproc.@brain')]),
        (art, datasink, [('outlier_files', 'preproc.@outlier_files'),
                         ('plot_files', 'preproc.@plot_files')]),
    ])
    # Create preproc output graph# Creat # Create
    preproc.write_graph(graph2use='colored', format='png', simple_form=True)

    # Visualize the graph
    img1 = imread(opj(preproc.base_dir, 'preproc', 'graph.png'))
    plt.imshow(img1)
    plt.xticks([]), plt.yticks([])
    plt.show()

    # Visualize the detailed graph# Visua # Visual
    preproc.write_graph(graph2use='flat', format='png', simple_form=True)
    img2 = imread(opj(preproc.base_dir, 'preproc', 'graph_detailed.png'))
    plt.imshow(img2)
    plt.xticks([]), plt.yticks([])
    plt.show()

    print("Workflow all set. Check the workflow image :)")

    response = input('Should run the workflow? Enter yes or no :')

    if response == 'yes':
        preproc.run('MultiProc', plugin_args={'n_procs': 10})
    elif response == 'no':
        print('Exits the program since you entered no')
    else:
        raise RuntimeError('Should enter either yes or no')
Exemplo n.º 11
0
working_dir = 'workingdir'

subject_list = ['sub-1', 'sub-2', 'sub-3', 'sub-4', 'sub-5', 'sub-6']
fwhm = [4, 8]
# full width at half maximum
#filter width in space
task_list = ['objectviewing']

with open('ds000105/task-objectviewing_bold.json', 'rt') as fp:
    task_info = json.load(fp)
TR = task_info['RepetitionTime']

iso_size = 4

# ExtractROI - skip dummy scans
extract = Node(ExtractROI(t_min=4, t_size=-1),
               output_type='NIFTI',
               name="extract")

# MCFLIRT - motion correction
mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True, output_type='NIFTI'),
               name="mcflirt")

#SliceTimer - correct for slice wise acquisition
slicetimer = Node(SliceTimer(index_dir=False,
                             interleaved=True,
                             output_type='NIFTI',
                             time_repetition=TR),
                  name="slicetimer")

# Smooth - image smoothing
Exemplo n.º 12
0
structural_dir = '/home/colciencias/base/struct/'
experiment_dir = opj(base_dir, 'output/')
output_dir = 'datasink'
working_dir = 'workingdir'

subject_list = ['1']

# list of subject identifiers

fwhm = 8  # Smoothing widths to apply (Gaussian kernel size)
TR = 2  # Repetition time
init_volume = 0  # Firts volumen identification which will use in the pipeline
iso_size = 2  # Isometric resample of functional images to voxel size (in mm)

# ExtractROI - skip dummy scans
extract = Node(ExtractROI(t_min=init_volume, t_size=-1, output_type='NIFTI'),
               name="extract")

# MCFLIRT - motion correction
mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True, output_type='NIFTI'),
               name="motion_correction")

# SliceTimer - correct for slice wise acquisition
slicetimer = Node(SliceTimer(index_dir=False,
                             interleaved=True,
                             output_type='NIFTI',
                             time_repetition=TR),
                  name="slice_timing_correction")

# Smooth - image smoothing
smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")
def _main(subject_list,vols,subid_vol_dict, number_of_skipped_volumes,brain_path,\
    mask_path,\
    atlas_path,\
    tr_path,\
    motion_params_path,\
    func2std_mat_path,\
    MNI3mm_path,\
    base_directory,\
    fc_datasink_name,\
   motion_param_regression,\
   band_pass_filtering,\
   global_signal_regression,\
   smoothing,\
   volcorrect,\
   num_proc,\
   functional_connectivity_directory ):

    # ## Volume correction
    # * I have already extracted 4 volumes.
    # * Now extract 120 - 4 = 116 volumes from each subject
    # * So define vols = 114
    #

    if number_of_skipped_volumes == None:
        number_of_skipped_volumes = 4
    vols = vols - number_of_skipped_volumes

    def vol_correct(sub_id, subid_vol_dict, vols, number_of_skipped_volumes):
        sub_vols = subid_vol_dict[sub_id] - number_of_skipped_volumes
        if sub_vols > vols:
            t_min = sub_vols - vols
        elif sub_vols == vols:
            t_min = 0
        else:
            raise Exception('Volumes of Sub ',sub_id,' less than desired!')
        return int(t_min)


    # In[491]:



    volCorrect = Node(Function(function=vol_correct, input_names=['sub_id','subid_vol_dict','vols','number_of_skipped_volumes'],
                                    output_names=['t_min']), name='volCorrect')

    volCorrect.inputs.subid_vol_dict = subid_vol_dict
    volCorrect.inputs.vols = vols
    volCorrect.inputs.number_of_skipped_volumes = number_of_skipped_volumes


    # ## Define a function to fetch the filenames of a particular subject ID



    def get_subject_filenames(subject_id,brain_path,mask_path,atlas_path,tr_path,motion_params_path,func2std_mat_path,MNI3mm_path):
        import re
        from itertools import zip_longest
        for brain,mask,atlas,tr,motion_param,func2std_mat in zip_longest(brain_path,mask_path,atlas_path,tr_path,motion_params_path,func2std_mat_path): #itertools helps to zip unequal save_file_list_in_mask
        #  Source : https://stackoverflow.com/questions/11318977/zipping-unequal-lists-in-python-in-to-a-list-which-does-not-drop-any-element-fro
            print('*******************',brain,mask,atlas,tr,motion_param,func2std_mat)

            sub_id_extracted = re.search('.+_subject_id_(\d+)', brain).group(1)
            if str(subject_id) in brain:
    #             print("Files for subject ",subject_id,brain,mask,atlas,tr,motion_param)
                return brain,mask,atlas,tr,motion_param,func2std_mat,MNI3mm_path

        print ('Unable to locate Subject: ',subject_id,'extracted: ',sub_id_extracted)
        # print ('Unable to locate Subject: ',subject_id)
        raise Exception('Unable to locate Subject: ',subject_id,'extracted: ',sub_id_extracted)
        # raise Exception('Unable to locate Subject: ',subject_id)
        return 0




    # Make a node
    getSubjectFilenames = Node(Function(function=get_subject_filenames, input_names=['subject_id','brain_path','mask_path','atlas_path','tr_path','motion_params_path','func2std_mat_path','MNI3mm_path'],
                                    output_names=['brain','mask','atlas','tr','motion_param','func2std_mat', 'MNI3mm_path']), name='getSubjectFilenames')


    getSubjectFilenames.inputs.brain_path = brain_path
    getSubjectFilenames.inputs.mask_path = mask_path
    getSubjectFilenames.inputs.atlas_path = atlas_path
    getSubjectFilenames.inputs.tr_path = tr_path
    getSubjectFilenames.inputs.motion_params_path = motion_params_path
    getSubjectFilenames.inputs.func2std_mat_path = func2std_mat_path
    getSubjectFilenames.inputs.MNI3mm_path = MNI3mm_path




    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")

    infosource.iterables = [('subject_id',subject_list)]



    # ## Band Pass Filtering
    # Let's do a band pass filtering on the data using the
    # code from https://neurostars.org/t/bandpass-filtering-different-outputs-from-fsl-and-nipype-custom-function/824/2

    ### AFNI

    bandpass = Node(afni.Bandpass(highpass=0.01, lowpass=0.1,
                             despike=False, no_detrend=True, notrans=True,
                             outputtype='NIFTI_GZ'),name='bandpass')

    # bandpass = Node(afni.Bandpass(highpass=0.001, lowpass=0.01,
    #                          despike=False, no_detrend=True, notrans=True,
    #                          tr=2.0,outputtype='NIFTI_GZ'),name='bandpass')


    # ## Highpass filtering

    # In[506]:

    """
    Perform temporal highpass filtering on the data
    """

    # https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dBandpass.html
    # os.chdir('/home1/varunk/Autism-Connectome-Analysis-bids-related/')

    highpass = Node(afni.Bandpass(highpass=0.009, lowpass=99999,
                             despike=False, no_detrend=True, notrans=True,
                             outputtype='NIFTI_GZ'),name='highpass')

    #  FSL bandpass/Highpass
    # highpass = Node(interface=ImageMaths(suffix='_tempfilt'),
    #                   iterfield=['in_file'],
    #                   name='highpass')
    #
    # highpass.inputs.op_string = '-bptf 27.77775001525879  -1' # 23.64 # 31.25


    # ## Smoothing
    # ### Using 6mm fwhm
    # sigma = 6/2.3548 = 2.547987090198743

    spatialSmooth = Node(interface=ImageMaths(op_string='-s 2.5479',
                                                suffix='_smoothed'),
                       name='spatialSmooth')


    # ## Performs Gram Schmidt Process
    # https://en.wikipedia.org/wiki/Gram%E2%80%93Schmidt_process

    # In[509]:


    def orthogonalize(in_file, mask_file):
        import numpy as np
        import nibabel as nib
        import os
        from os.path import join as opj

        def gram_schmidt(voxel_time_series, mean_vector):
            numerator = np.dot(voxel_time_series,mean_vector)
            dinominator = np.dot(mean_vector,mean_vector)
            voxel_time_series_orthogonalized = voxel_time_series - (numerator/dinominator)*mean_vector

    #         TO CONFIRM IF THE VECTORS ARE ORTHOGONAL
    #         sum_dot_prod = np.sum(np.dot(voxel_time_series_orthogonalized,mean_vector))

    #         print('Sum of entries of orthogonalized vector = ',sum_dot_prod)
            return voxel_time_series_orthogonalized


        mask_data = nib.load(mask_file)
        mask = mask_data.get_data()

        brain_data = nib.load(in_file)
        brain = brain_data.get_data()

        x_dim, y_dim, z_dim, t_dim = brain_data.shape



        # Find mean brain


        mean_vector = np.zeros(t_dim)


        num_brain_voxels = 0

        # Count the number of brain voxels
        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        mean_vector = mean_vector + brain[i,j,k,:]
                        num_brain_voxels = num_brain_voxels + 1


        mean_vector = mean_vector / num_brain_voxels

        # Orthogonalize
        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        brain[i,j,k,:] = gram_schmidt(brain[i,j,k,:], mean_vector)



        sub_id = in_file.split('/')[-1].split('.')[0].split('_')[0].split('-')[1]

        gsr_file_name = 'sub-' + sub_id + '_task-rest_run-1_bold.nii.gz'

    #     gsr_file_name_nii = gsr_file_name + '.nii.gz'

        out_file = opj(os.getcwd(),gsr_file_name) # path

        brain_with_header = nib.Nifti1Image(brain, affine=brain_data.affine,header = brain_data.header)
        nib.save(brain_with_header,gsr_file_name)

        return out_file








    # In[510]:


    globalSignalRemoval = Node(Function(function=orthogonalize, input_names=['in_file','mask_file'],
                                      output_names=['out_file']), name='globalSignalRemoval' )
    # globalSignalRemoval.inputs.mask_file = mask_file
    # globalSignalRemoval.iterables = [('in_file',file_paths)]


    # ## GLM for regression of motion parameters

    # In[511]:


    def calc_residuals(in_file,
                       motion_file):
        """
        Calculates residuals of nuisance regressors -motion parameters for every voxel for a subject using GLM.

        Parameters
        ----------
        in_file : string
            Path of a subject's motion corrected nifti file.
        motion_par_file : string
            path of a subject's motion parameters


        Returns
        -------
        out_file : string
            Path of residual file in nifti format

        """
        import nibabel as nb
        import numpy as np
        import os
        from os.path import join as opj
        nii = nb.load(in_file)
        data = nii.get_data().astype(np.float32)
        global_mask = (data != 0).sum(-1) != 0


        # Check and define regressors which are provided from files
        if motion_file is not None:
            motion = np.genfromtxt(motion_file)
            if motion.shape[0] != data.shape[3]:
                raise ValueError('Motion parameters {0} do not match data '
                                 'timepoints {1}'.format(motion.shape[0],
                                                         data.shape[3]))
            if motion.size == 0:
                raise ValueError('Motion signal file {0} is '
                                 'empty'.format(motion_file))

        # Calculate regressors
        regressor_map = {'constant' : np.ones((data.shape[3],1))}

        regressor_map['motion'] = motion


        X = np.zeros((data.shape[3], 1))

        for rname, rval in regressor_map.items():
            X = np.hstack((X, rval.reshape(rval.shape[0],-1)))

        X = X[:,1:]

        if np.isnan(X).any() or np.isnan(X).any():
            raise ValueError('Regressor file contains NaN')

        Y = data[global_mask].T

        try:
            B = np.linalg.inv(X.T.dot(X)).dot(X.T).dot(Y)
        except np.linalg.LinAlgError as e:
            if "Singular matrix" in e:
                raise Exception("Error details: {0}\n\nSingular matrix error: "
                                "The nuisance regression configuration you "
                                "selected may have been too stringent, and the "
                                "regression could not be completed. Ensure your "
                                "parameters are not too "
                                "extreme.\n\n".format(e))
            else:
                raise Exception("Error details: {0}\n\nSomething went wrong with "
                                "nuisance regression.\n\n".format(e))

        Y_res = Y - X.dot(B)

        data[global_mask] = Y_res.T

        img = nb.Nifti1Image(data, header=nii.get_header(),
                             affine=nii.get_affine())

        subject_name = in_file.split('/')[-1].split('.')[0]
        filename = subject_name + '_residual.nii.gz'
        out_file = os.path.join(os.getcwd(),filename )
        img.to_filename(out_file) # alt to nib.save

        return out_file


    # In[512]:


    # Create a Node for above
    calc_residuals = Node(Function(function=calc_residuals, input_names=['in_file','motion_file'],
                                    output_names=['out_file']), name='calc_residuals')


    # ## Datasink
    # I needed to define the structure of what files are saved and where.

    # In[513]:


    # Create DataSink object
    dataSink = Node(DataSink(), name='datasink')

    # Name of the output folder
    dataSink.inputs.base_directory = opj(base_directory,fc_datasink_name)




    # To create the substitutions I looked the `datasink` folder where I was redirecting the output. I manually selected the part of file/folder name that I wanted to change and copied below to be substituted.
    #

    # In[514]:


    # Define substitution strings so that the data is similar to BIDS
    substitutions = [('_subject_id_', 'sub-')]

    # Feed the substitution strings to the DataSink node
    dataSink.inputs.substitutions = substitutions



    # ### Following is a Join Node that collects the preprocessed file paths and saves them in a file

    # In[516]:


    def save_file_list_function(in_fc_map_brain_file):
        # Imports
        import numpy as np
        import os
        from os.path import join as opj


        file_list = np.asarray(in_fc_map_brain_file)
        print('######################## File List ######################: \n',file_list)

        np.save('fc_map_brain_file_list',file_list)
        file_name = 'fc_map_brain_file_list.npy'
        out_fc_map_brain_file = opj(os.getcwd(),file_name) # path






        return out_fc_map_brain_file



    # In[517]:


    save_file_list = JoinNode(Function(function=save_file_list_function, input_names=['in_fc_map_brain_file'],
                     output_names=['out_fc_map_brain_file']),
                     joinsource="infosource",
                     joinfield=['in_fc_map_brain_file'],
                     name="save_file_list")


    # ## Create a FC node
    #
    # This node:
    # 1. Exracts the average time series of the brain ROI's using the atlas and stores
    #     it as a matrix of size [ROIs x Volumes].
    # 2. Extracts the Voxel time series and stores it in matrix of size [Voxels x Volumes]
    #


    # And save  FC matrix files in shape of brains
    def pear_coff(in_file, atlas_file, mask_file):
        # code to find how many voxels are in the brain region using the mask

            # imports
        import numpy as np
        import nibabel as nib
        import os
        from os.path import join as opj

        mask_data = nib.load(mask_file)
        mask = mask_data.get_data()

        x_dim, y_dim, z_dim = mask_data.shape


        atlasPath = atlas_file
        # Read the atlas
        atlasObject = nib.load(atlasPath)
        atlas = atlasObject.get_data()

        num_ROIs = int((np.max(atlas) - np.min(atlas) ))


        # Read the brain in_file

        brain_data = nib.load(in_file)
        brain = brain_data.get_data()

        x_dim, y_dim, z_dim, num_volumes = brain.shape


        num_brain_voxels = 0

        x_dim, y_dim, z_dim = mask_data.shape

        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        num_brain_voxels = num_brain_voxels + 1

        # Initialize a matrix of ROI time series and voxel time series

        ROI_matrix = np.zeros((num_ROIs, num_volumes))
        voxel_matrix = np.zeros((num_brain_voxels, num_volumes))

        # Fill up the voxel_matrix

        voxel_counter = 0
        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        voxel_matrix[voxel_counter,:] = brain[i,j,k,:]
                        voxel_counter = voxel_counter + 1


        # Fill up the ROI_matrix
        # Keep track of number of voxels per ROI as well by using an array - num_voxels_in_ROI[]

        num_voxels_in_ROI = np.zeros((num_ROIs,1)) # A column arrray containing number of voxels in each ROI

        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    label = int(atlas[i,j,k]) - 1
                    if label != -1:
                        ROI_matrix[label,:] = np.add(ROI_matrix[label,:], brain[i,j,k,:])
                        num_voxels_in_ROI[label,0] = num_voxels_in_ROI[label,0] + 1

        ROI_matrix = np.divide(ROI_matrix,num_voxels_in_ROI) # Check if divide is working correctly

        X, Y = ROI_matrix, voxel_matrix


        # Subtract mean from X and Y

        X = np.subtract(X, np.mean(X, axis=1, keepdims=True))
        Y = np.subtract(Y, np.mean(Y, axis=1, keepdims=True))

        temp1 = np.dot(X,Y.T)
        temp2 = np.sqrt(np.sum(np.multiply(X,X), axis=1, keepdims=True))
        temp3 = np.sqrt(np.sum(np.multiply(Y,Y), axis=1, keepdims=True))
        temp4 = np.dot(temp2,temp3.T)
        coff_matrix = np.divide(temp1, (temp4 + 1e-7))


        # Check if any ROI is missing and replace the NAN values in coff_matrix by 0
        if np.argwhere(np.isnan(coff_matrix)).shape[0] != 0:
            print("Some ROIs are not present. Replacing NAN in coff matrix by 0")
            np.nan_to_num(coff_matrix, copy=False)

        # TODO: when I have added 1e-7 in the dinominator, then why did I feel the need to replace NAN by zeros
        sub_id = in_file.split('/')[-1].split('.')[0].split('_')[0].split('-')[1]


        fc_file_name = sub_id + '_fc_map'

        print ("Pear Matrix calculated for subject: ",sub_id)

        roi_brain_matrix = coff_matrix
        brain_file = in_file


        x_dim, y_dim, z_dim, t_dim = brain.shape

        (brain_data.header).set_data_shape([x_dim,y_dim,z_dim,num_ROIs])

        brain_roi_tensor = np.zeros((brain_data.header.get_data_shape()))

        print("Creating brain for Subject-",sub_id)
        for roi in range(num_ROIs):
            brain_voxel_counter = 0
            for i in range(x_dim):
                for j in range(y_dim):
                    for k in range(z_dim):
                        if mask[i,j,k] == 1:
                            brain_roi_tensor[i,j,k,roi] = roi_brain_matrix[roi,brain_voxel_counter]
                            brain_voxel_counter = brain_voxel_counter + 1


            assert (brain_voxel_counter == len(roi_brain_matrix[roi,:]))
        print("Created brain for Subject-",sub_id)


        path = os.getcwd()
        fc_file_name = fc_file_name + '.nii.gz'
        out_file = opj(path,fc_file_name)

        brain_with_header = nib.Nifti1Image(brain_roi_tensor, affine=brain_data.affine,header = brain_data.header)
        nib.save(brain_with_header,out_file)


        fc_map_brain_file = out_file
        return fc_map_brain_file



    # In[521]:


    # Again Create the Node and set default values to paths

    pearcoff = Node(Function(function=pear_coff, input_names=['in_file','atlas_file','mask_file'],
                                    output_names=['fc_map_brain_file']), name='pearcoff')



    # # IMPORTANT:
    # * The ROI 255 has been removed due to resampling. Therefore the FC maps will have nan at that row. So don't use that ROI :)
    # * I came to know coz I keep getting this error: RuntimeWarning: invalid value encountered in true_divide
    # * To debug it, I read the coff matrix and checked its diagnol to discover the nan value.
    #
    #
    #

    # ## Extract volumes




    # ExtractROI - For volCorrect
    extract = Node(ExtractROI(t_size=-1),
                   output_type='NIFTI',
                   name="extract")



    # ###  Node for applying xformation matrix to functional data
    #

    # In[523]:


    func2std_xform = Node(FLIRT(output_type='NIFTI_GZ',
                             apply_xfm=True), name="func2std_xform")





    # motion_param_regression = 1
    # band_pass_filtering = 0
    # global_signal_regression = 0
    # smoothing = 1
    # volcorrect = 1
    if num_proc == None:
        num_proc = 7

    combination = 'motionRegress' + str(int(motion_param_regression)) + \
     'global' + str(int(global_signal_regression)) + 'smoothing' + str(int(smoothing)) +\
     'filt' + str(int(band_pass_filtering))

    print("Combination: ",combination)

    binary_string = str(int(motion_param_regression)) + str(int(global_signal_regression)) + \
    str(int(smoothing)) + str(int(band_pass_filtering)) + str(int(volcorrect))

    base_dir = opj(base_directory,functional_connectivity_directory)
    # wf = Workflow(name=functional_connectivity_directory)
    wf = Workflow(name=combination)

    wf.base_dir = base_dir # Dir where all the outputs will be stored.

    wf.connect(infosource ,'subject_id', getSubjectFilenames, 'subject_id')


    # ------- Dynamic Pipeline ------------------------


    nodes = [
    calc_residuals,
    globalSignalRemoval,
    spatialSmooth,
    bandpass,
    volCorrect]


    # from nipype.interfaces import fsl

    old_node = getSubjectFilenames
    old_node_output = 'brain'

    binary_string = binary_string+'0' # so that the loop runs one more time
    for idx, include in enumerate(binary_string):
        # 11111
        # motion_param_regression
        # global_signal_regression
        # smoothing
        # band_pass_filtering
        # volcorrect

        if old_node == calc_residuals:
            old_node_output = 'out_file'
        elif old_node == extract :
            old_node_output = 'roi_file'
        elif old_node == globalSignalRemoval:
            old_node_output = 'out_file'
        elif old_node == bandpass:
            old_node_output = 'out_file'
        elif old_node == highpass:
            old_node_output = 'out_file'
        elif old_node == spatialSmooth:
            old_node_output = 'out_file'
        elif old_node == volCorrect:
            old_node_output = 'out_file'


        if int(include):
            # if old_node is None:
            #
            #     wf.add_nodes([nodes[idx]])
            #
            # else:



            new_node = nodes[idx]


            if new_node == calc_residuals:
                wf.connect([(getSubjectFilenames, calc_residuals, [('motion_param', 'motion_file')])])
                new_node_input = 'in_file'

            elif new_node == extract :
                wf.connect([( volCorrect, extract, [('t_min','t_min')])])
                new_node_input = 'in_file'

            elif new_node == globalSignalRemoval:
                wf.connect([(getSubjectFilenames, globalSignalRemoval, [('mask','mask_file')])])
                new_node_input = 'in_file'

            elif new_node == bandpass:
                wf.connect([(getSubjectFilenames, bandpass, [('tr','tr')])])
                new_node_input = 'in_file'

            elif new_node == highpass:
                wf.connect([(getSubjectFilenames, highpass, [('tr','tr')])]) #Commenting for FSL
                new_node_input = 'in_file'

            elif new_node == spatialSmooth:
                new_node_input = 'in_file'

            elif new_node == volCorrect:
                wf.connect([(infosource, volCorrect, [('subject_id','sub_id')])])
                wf.connect([( volCorrect, extract, [('t_min','t_min')])])
                new_node = extract
                new_node_input = 'in_file'


            wf.connect(old_node, old_node_output, new_node, new_node_input)

            old_node = new_node


        else:
            if idx == 3: # bandpas == 0 => Highpass
                new_node = highpass
                wf.connect([(getSubjectFilenames, highpass, [('tr','tr')])]) #Commenting for FSL
                new_node_input = 'in_file'

                wf.connect(old_node, old_node_output, new_node, new_node_input)

                old_node = new_node

    wf.connect(old_node, old_node_output, pearcoff, 'in_file')
    wf.connect(getSubjectFilenames,'atlas', pearcoff, 'atlas_file')
    wf.connect(getSubjectFilenames, 'mask', pearcoff, 'mask_file')

    wf.connect(pearcoff, 'fc_map_brain_file', func2std_xform ,'in_file')
    wf.connect(getSubjectFilenames,'func2std_mat', func2std_xform, 'in_matrix_file')
    wf.connect(getSubjectFilenames, 'MNI3mm_path', func2std_xform,'reference')

    folder_name = combination + '.@fc_map_brain_file'
    wf.connect(func2std_xform, 'out_file',  save_file_list, 'in_fc_map_brain_file')
    wf.connect(save_file_list, 'out_fc_map_brain_file',  dataSink,folder_name)


    TEMP_DIR_FOR_STORAGE = opj(base_directory,'crash_files')
    wf.config = {"execution": {"crashdump_dir": TEMP_DIR_FOR_STORAGE}}

    wf.write_graph(graph2use='flat', format='png')
    wf.run('MultiProc', plugin_args={'n_procs': num_proc})
Exemplo n.º 14
0
def dti_artifact_correction(wf_name="dti_artifact_correction"):
    """ Run the diffusion MRI pre-processing workflow against the diff files in `data_dir`.

    It will resample/regrid the diffusion image to have isometric voxels.
    Corrects for head motion correction and Eddy currents.
    Estimates motion outliers and exports motion reports using nipype.algorithms.RapidArt.

    Nipype Inputs
    -------------
    dti_art_input.diff: traits.File
        path to the diffusion MRI image

    dti_art_input.bval: traits.File
        path to the bvals file

    dti_art_input.bvec: traits.File
        path to the bvecs file


    Nipype Outputs
    --------------
    dti_art_output.eddy_corr_file: traits.File
        Eddy currents corrected DTI image.

    dti_art_output.bvec_rotated: traits.File
        Rotated bvecs file

    dti_art_output.brain_mask_1: traits.File
        Brain mask extracted using BET on the first B0 image.

    dti_art_output.brain_mask_2: traits.File
        Brain mask extracted using BET on the average B0 image,
        after motion correction.

    dti_art_output.acpq: traits.File
        Text file with acquisition parameters calculated for Eddy.

    dti_art_output.index: traits.File
        Text file with acquisition indices calculated for Eddy.

    dti_art_output.avg_b0: traits.File
        The average b=0 image extracted from the motion and eddy
        currents correted diffusion MRI.

    dti_art_output.hmc_corr_file: traits.File

    dti_art_output.hmc_corr_bvec: traits.File

    dti_art_output.hmc_corr_xfms: traits.File

    dti_art_output.art_displacement_files: traits.File

    dti_art_output.art_intensity_files: traits.File

    dti_art_output.art_norm_files: traits.File

    dti_art_output.art_outlier_files: traits.File

    dti_art_output.art_plot_files: traits.File

    dti_art_output.art_statistic_files: traits.File

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields  = ["diff", "bval", "bvec"]
    out_fields = ["eddy_corr_file",
                  "bvec_rotated",
                  "brain_mask_1",
                  "brain_mask_2",
                  "acqp",
                  "index",
                  "avg_b0",
                 ]

    do_rapidart = get_config_setting("dmri.artifact_detect", True)
    if do_rapidart:
        out_fields += ["hmc_corr_file",
                       "hmc_corr_bvec",
                       "hmc_corr_xfms",
                       "art_displacement_files",
                       "art_intensity_files",
                       "art_norm_files",
                       "art_outlier_files",
                       "art_plot_files",
                       "art_statistic_files",
                      ]

    # input interface
    dti_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
                           name="dti_art_input")

    # resample
    resample = setup_node(Function(function=reslice,
                                   input_names=['in_file', 'new_zooms', 'order', 'out_file'],
                                   output_names=['out_file']),
                          name='dti_reslice')

    ## extract first b0 for Eddy and HMC brain mask
    list_b0 = pe.Node(Function(function=b0_indices,
                               input_names=['in_bval'],
                               output_names=['out_idx'],),
                               name='b0_indices')

    extract_b0 = pe.Node(ExtractROI(t_size=1),
                         name="extract_first_b0")

    # For Eddy, the mask is only used for selecting voxels for the estimation of the hyperparameters,
    # so isn’t very critical.
    # Note also that it is better with a too conservative (small) mask than a too big.
    bet_dwi0 = setup_node(BET(frac=0.3, mask=True, robust=True),
                          name='bet_dwi_pre')

    pick_first = lambda lst: lst[0]

    # motion artifacts detection, requires linear co-registration for motion estimation.
    if do_rapidart:
        # head motion correction
        hmc = hmc_pipeline()

        art = setup_node(rapidart_dti_artifact_detection(), name="detect_artifacts")

    # Eddy
    eddy = setup_node(Eddy(method='jac'), name="eddy")

    ## acquisition parameters for Eddy
    write_acqp = setup_node(Function(function=dti_acquisition_parameters,
                                     input_names=["in_file"],
                                     output_names=["out_acqp", "out_index"],),
                            name="write_acqp")

    ## rotate b-vecs
    rot_bvec = setup_node(Function(function=eddy_rotate_bvecs,
                                   input_names=["in_bvec", "eddy_params"],
                                   output_names=["out_file"],),
                          name="rot_bvec")

    ## extract all b0s and average them after Eddy correction
    avg_b0_post = pe.Node(Function(function=b0_average,
                                   input_names=['in_dwi', 'in_bval'],
                                   output_names=['out_file'],),
                          name='b0_avg_post')

    bet_dwi1 = setup_node(BET(frac=0.3, mask=True, robust=True),
                          name='bet_dwi_post')

    # nlmeans denoise
    apply_nlmeans = get_config_setting("dmri.apply_nlmeans", True)
    if apply_nlmeans:
        nlmeans = setup_node(Function(function=nlmeans_denoise,
                                      input_names=['in_file', 'mask_file', 'out_file', 'N'],
                                      output_names=['out_file']),
                             name='nlmeans_denoise')

    # output interface
    dti_output = setup_node(IdentityInterface(fields=out_fields),
                            name="dti_art_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # Connect the nodes
    wf.connect([
                # resample to iso-voxel
                (dti_input, resample, [("diff", "in_file"),]),

                # read from input file the acquisition parameters for eddy
                (dti_input, write_acqp, [("diff", "in_file")]),

                # reference mask for hmc and eddy
                (dti_input,  list_b0,    [("bval",     "in_bval")]),
                (resample,   extract_b0, [("out_file", "in_file")]),
                (list_b0,    extract_b0, [(("out_idx", pick_first), "t_min")]),

                (extract_b0, bet_dwi0,   [("roi_file", "in_file")]),

                # Eddy
                (resample,   eddy, [("out_file",  "in_file")]),
                (bet_dwi0,   eddy, [("mask_file", "in_mask")]),
                (dti_input,  eddy, [("bval",      "in_bval"),
                                    ("bvec",      "in_bvec")
                                   ]),
                (write_acqp, eddy, [("out_acqp",  "in_acqp"),
                                    ("out_index", "in_index")
                                   ]),

                # rotate bvecs
                (dti_input, rot_bvec, [("bvec",          "in_bvec")]),
                (eddy,      rot_bvec, [("out_parameter", "eddy_params")]),

                # final avg b0
                (dti_input,   avg_b0_post, [("bval",          "in_bval")]),
                (eddy,        avg_b0_post, [("out_corrected", "in_dwi" )]),
                (avg_b0_post, bet_dwi1,    [("out_file",      "in_file")]),

                # output
                (write_acqp,  dti_output,  [("out_acqp",  "acqp"),
                                            ("out_index", "index")]),
                (bet_dwi0,    dti_output,  [("mask_file", "brain_mask_1")]),
                (bet_dwi1,    dti_output,  [("mask_file", "brain_mask_2")]),
                (rot_bvec,    dti_output,  [("out_file",  "bvec_rotated")]),
                (avg_b0_post, dti_output,  [("out_file",  "avg_b0")]),
              ])

    if apply_nlmeans:
        wf.connect([
                    # non-local means
                    (eddy,     nlmeans,   [("out_corrected", "in_file")]),
                    (bet_dwi1, nlmeans,   [("mask_file",     "mask_file")]),

                    # output
                    (nlmeans, dti_output, [("out_file", "eddy_corr_file")]),
                   ])
    else:
        wf.connect([
                    # output
                    (eddy, dti_output, [("out_corrected", "eddy_corr_file")]),
                   ])

    if do_rapidart:
        wf.connect([
                    # head motion correction
                    (dti_input, hmc, [("bval", "inputnode.in_bval"),
                                      ("bvec", "inputnode.in_bvec"),
                                     ]),
                    (resample,  hmc, [("out_file",              "inputnode.in_file")]),
                    (bet_dwi0,  hmc, [("mask_file",             "inputnode.in_mask")]),
                    (list_b0,   hmc, [(("out_idx", pick_first), "inputnode.ref_num"),]),

                    # artifact detection
                    (hmc,      art, [("outputnode.out_file", "realigned_files"),
                                     ("outputnode.out_xfms", "realignment_parameters"),
                                    ]),
                    (bet_dwi1, art, [("mask_file", "mask_file"),]),

                    # output
                    (hmc, dti_output, [("outputnode.out_file", "hmc_corr_file"),
                                       ("outputnode.out_bvec", "hmc_corr_bvec"),
                                       ("outputnode.out_xfms", "hmc_corr_xfms"),
                                      ]),

                    (art, dti_output, [("displacement_files",  "art_displacement_files"),
                                       ("intensity_files",     "art_intensity_files"),
                                       ("norm_files",          "art_norm_files"),
                                       ("outlier_files",       "art_outlier_files"),
                                       ("plot_files",          "art_plot_files"),
                                       ("statistic_files",     "art_statistic_files"),
                                      ]),
                  ])

    return wf
Exemplo n.º 15
0
     "versions": [{
         "title": Merge().version or "1.0",
         "description":
         f"Default fslmerge version for nipype {_NIPYPE_VERSION}.",  # noqa: E501
         "input": FSLMERGE_INPUT_SPECIFICATION,
         "output": FSLMERGE_OUTPUT_SPECIFICATION,
         "nested_results_attribute": "outputs.get_traitsfree",
     }],
 },
 {
     "title":
     "fslroi",
     "description":
     "Extracts specific ROI from image.",
     "versions": [{
         "title": ExtractROI().version or "1.0",
         "description":
         f"Default fslroi version for nipype {_NIPYPE_VERSION}.",  # noqa: E501
         "input": FSLROI_INPUT_SPECIFICATION,
         "output": FSLROI_OUTPUT_SPECIFICATION,
         "nested_results_attribute": "outputs.get_traitsfree",
     }],
 },
 {
     "title":
     "topup",
     "description":
     "Estimates and corrects susceptibillity induced distortions.",  # noqa: E501
     "versions": [{
         "title": TOPUP().version or "1.0",
         "description":
Exemplo n.º 16
0
def init_eddy_wf(debug=False, name="eddy_wf"):
    """
    Create a workflow for head-motion & Eddy currents distortion estimation with FSL.

    Parameters
    ----------
    name : :obj:`str`
        Name of workflow (default: ``eddy_wf``)

    Inputs
    ------
    dwi_file
        dwi NIfTI file

    Outputs
    -------
    out_eddy
        The eddy corrected diffusion image..

    """
    from nipype.interfaces.fsl import Eddy, ExtractROI

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=["dwi_file", "metadata", "dwi_mask", "in_bvec", "in_bval"]
        ),
        name="inputnode",
    )

    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["out_rotated_bvecs", "eddy_ref_image", "out_eddy"]
        ),
        name="outputnode",
    )

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""\
Geometrical distortions derived from the so-called Eddy-currents, and head-motion
realignment parameters were estimated with the joint modeling of ``eddy_openmp``,
included in FSL {Eddy().version} [@eddy].
"""
    eddy = pe.Node(
        Eddy(),
        name="eddy",
    )

    if debug:
        eddy.inputs.niter = 1
        eddy.inputs.is_shelled = True
        eddy.inputs.dont_peas = True
        eddy.inputs.nvoxhp = 100

    # Generate the acqp and index files for eddy
    gen_eddy_files = pe.Node(
        niu.Function(
            input_names=["in_file", "in_meta"],
            output_names=["out_acqparams", "out_index"],
            function=gen_eddy_textfiles,
        ),
        name="gen_eddy_files",
    )

    eddy_ref_img = pe.Node(ExtractROI(t_min=0, t_size=1), name="eddy_roi")

    # fmt:off
    workflow.connect([
        (inputnode, eddy, [
            ("dwi_file", "in_file"),
            ("dwi_mask", "in_mask"),
            ("in_bvec", "in_bvec"),
            ("in_bval", "in_bval"),
        ]),
        (inputnode, gen_eddy_files, [
            ("dwi_file", "in_file"),
            ("metadata", "in_meta")
        ]),
        (gen_eddy_files, eddy, [
            ("out_acqparams", "in_acqp"),
            ("out_index", "in_index"),
        ]),
        (eddy, outputnode, [
            ("out_corrected", "out_eddy"),
            ("out_rotated_bvecs", "out_rotated_bvecs")
        ]),
        (eddy, eddy_ref_img, [("out_corrected", "in_file")]),
        (eddy_ref_img, outputnode, [("roi_file", "eddy_ref_image")]),
    ])
    # fmt:on
    return workflow
Exemplo n.º 17
0
def trimT(in_file, roi_file, t_min=0, t_size=-1):
    ExtractROI(in_file=in_file, roi_file=roi_file, t_min=t_min,
               t_size=t_size).run()
Exemplo n.º 18
0
def make_w_preproc_anna():
    n_in = Node(IdentityInterface(fields=[
        'prf_run1',
        'prf_run2',
        ]), name='input')

    n_out = Node(IdentityInterface(fields=[
        'prf_preproc',
        ]), name='output')

    n_roi = Node(ExtractROI(), name='roi')
    n_roi.inputs.t_min = 5
    n_roi.inputs.t_size = 1

    n_volreg = Node(interface=Volreg(), name='volreg')
    n_volreg.inputs.outputtype = 'NIFTI'
    n_volreg.inputs.zpad = 1
    n_volreg.inputs.oned_file = 'pRF_run1_mov.1D'

    n_volreg2 = Node(interface=Volreg(), name='volreg2')
    n_volreg2.inputs.outputtype = 'NIFTI'
    n_volreg2.inputs.zpad = 1
    n_volreg2.inputs.oned_file = 'pRF_run2_mov.1D'

    n_tcat = Node(interface=TCat(), name='tcat')
    n_tcat.inputs.outputtype = 'NIFTI'
    n_tcat.inputs.rlt = '++'
    n_tcat2 = Node(interface=TCat(), name='tcat2')
    n_tcat2.inputs.outputtype = 'NIFTI'
    n_tcat2.inputs.rlt = '++'

    n_tcat = Node(interface=TCat(), name='tcat')
    n_tcat.inputs.outputtype = 'NIFTI'
    n_tcat.inputs.rlt = '++'
    n_tcat2 = Node(interface=TCat(), name='tcat2')
    n_tcat2.inputs.outputtype = 'NIFTI'
    n_tcat2.inputs.rlt = '++'

    n_clip1 = Node(ClipLevel(), 'clip1')
    n_clip2 = Node(ClipLevel(), 'clip2')
    n_clip_both = Node(ClipLevel(), 'clip_both')

    n_mask1 = Node(Automask(), 'mask1')
    n_mask1.inputs.outputtype = 'NIFTI'
    n_mask2 = Node(Automask(), 'mask2')
    n_mask2.inputs.outputtype = 'NIFTI'
    n_mask_both = Node(Automask(), 'mask_both')
    n_mask_both.inputs.outputtype = 'NIFTI'

    n_calc1 = Node(Calc(), 'calc1')
    n_calc1.inputs.expr = 'step(a)*b'
    n_calc1.inputs.outputtype = 'NIFTI'
    n_calc2 = Node(Calc(), 'calc2')
    n_calc2.inputs.expr = 'step(a)*b'
    n_calc2.inputs.outputtype = 'NIFTI'
    n_calc_both = Node(Calc(), 'calc_both')
    n_calc_both.inputs.expr = 'step(a)*b'
    n_calc_both.inputs.outputtype = 'NIFTI'

    n_mean1 = Node(TStat(), 'mean1')
    n_mean1.inputs.outputtype = 'NIFTI'
    n_mean2 = Node(TStat(), 'mean2')
    n_mean2.inputs.outputtype = 'NIFTI'

    n_ratio1 = Node(Calc(), 'ratio1')
    n_ratio1.inputs.expr = '(a/b)'
    n_ratio1.inputs.outputtype = 'NIFTI'
    n_ratio1.inputs.args = '-fscale'

    n_means = Node(Means(), name='means')
    n_means.inputs.datum = 'float'
    n_means.inputs.outputtype = 'NIFTI'

    w = Workflow('nipype_prf')
    w.connect(n_in, 'prf_run1', n_roi, 'in_file')
    w.connect(n_in, 'prf_run1', n_volreg, 'in_file')
    w.connect(n_roi, 'roi_file', n_volreg, 'basefile')
    w.connect(n_volreg, 'out_file', n_tcat, 'in_files')
    w.connect(n_in, 'prf_run2', n_volreg2, 'in_file')
    w.connect(n_roi, 'roi_file', n_volreg2, 'basefile')
    w.connect(n_volreg2, 'out_file', n_tcat2, 'in_files')
    w.connect(n_tcat, 'out_file', n_clip1, 'in_file')
    w.connect(n_tcat2, 'out_file', n_clip2, 'in_file')
    w.connect(n_tcat, 'out_file', n_mask1, 'in_file')
    w.connect(n_clip1, ('clip_val', _to_args), n_mask1, 'args')
    w.connect(n_tcat2, 'out_file', n_mask2, 'in_file')
    w.connect(n_clip2, ('clip_val', _to_args), n_mask2, 'args')
    w.connect(n_mask1, 'out_file', n_calc1, 'in_file_a')
    w.connect(n_tcat, 'out_file', n_calc1, 'in_file_b')
    w.connect(n_mask2, 'out_file', n_calc2, 'in_file_a')
    w.connect(n_tcat2, 'out_file', n_calc2, 'in_file_b')
    w.connect(n_tcat, 'out_file', n_mean1, 'in_file')
    w.connect(n_tcat2, 'out_file', n_mean2, 'in_file')
    w.connect(n_tcat, 'out_file', n_means, 'in_file_a')
    w.connect(n_tcat2, 'out_file', n_means, 'in_file_b')
    w.connect(n_means, 'out_file', n_mask_both, 'in_file')
    w.connect(n_means, 'out_file', n_clip_both, 'in_file')
    w.connect(n_clip_both, ('clip_val', _to_args), n_mask_both, 'args')
    w.connect(n_mask_both, 'out_file', n_calc_both, 'in_file_a')
    w.connect(n_means, 'out_file', n_calc_both, 'in_file_b')
    w.connect(n_calc_both, 'out_file', n_out, 'prf_preproc')

    return w
Exemplo n.º 19
0
python3_env = "daniel"
python2_env = "daniel2"
cleanup = True
""" do not edit below """

for i in range(len(file_in)):

    # get fileparts of input
    path_file, name_file, ext_file = get_filename(file_in[i])

    # filenames
    file_vol0 = os.path.join(path_file, name_file + "_vol0" + ext_file)
    file_out = os.path.join(path_file, name_file + "_gnlcorr" + ext_file)

    # extract first volume
    fslroi = ExtractROI()
    fslroi.inputs.in_file = file_in[i]
    fslroi.inputs.roi_file = file_vol0
    fslroi.inputs.output_type = "NIFTI"
    fslroi.inputs.t_min = 0
    fslroi.inputs.t_size = 1
    fslroi.run()

    # exexute gnl correction
    gnl_correction(file_vol0, file_bash, file_coeff, python3_env, python2_env,
                   path_file, False)

    # apply warp to first volume
    applywarp = ApplyWarp()
    applywarp.inputs.in_file = file_in[i]
    applywarp.inputs.ref_file = file_in[i]
Exemplo n.º 20
0
def preproc(data_dir, sink_dir, subject, task, session, run, masks,
            motion_thresh, moco):
    from nipype.interfaces.fsl import MCFLIRT, FLIRT, FNIRT, ExtractROI, ApplyWarp, MotionOutliers, InvWarp, FAST
    #from nipype.interfaces.afni import AlignEpiAnatPy
    from nipype.interfaces.utility import Function
    from nilearn.plotting import plot_anat
    from nilearn import input_data

    #WRITE A DARA GRABBER
    def get_niftis(subject_id, data_dir, task, run, session):
        from os.path import join, exists
        t1 = join(data_dir, subject_id, 'session-{0}'.format(session),
                  'anatomical', 'anatomical-0', 'anatomical.nii.gz')
        #t1_brain_mask = join(data_dir, subject_id, 'session-1', 'anatomical', 'anatomical-0', 'fsl', 'anatomical-bet.nii.gz')
        epi = join(data_dir, subject_id, 'session-{0}'.format(session), task,
                   '{0}-{1}'.format(task, run), '{0}.nii.gz'.format(task))
        assert exists(t1), "t1 does not exist at {0}".format(t1)
        assert exists(epi), "epi does not exist at {0}".format(epi)
        standard = '/home/applications/fsl/5.0.8/data/standard/MNI152_T1_2mm.nii.gz'
        return t1, epi, standard

    data = Function(
        function=get_niftis,
        input_names=["subject_id", "data_dir", "task", "run", "session"],
        output_names=["t1", "epi", "standard"])
    data.inputs.data_dir = data_dir
    data.inputs.subject_id = subject
    data.inputs.run = run
    data.inputs.session = session
    data.inputs.task = task
    grabber = data.run()

    if session == 0:
        sesh = 'pre'
    if session == 1:
        sesh = 'post'

    #reg_dir = '/home/data/nbc/physics-learning/data/first-level/{0}/session-1/retr/retr-{1}/retr-5mm.feat/reg'.format(subject, run)
    #set output paths for quality assurance pngs
    qa1 = join(
        sink_dir, 'qa',
        '{0}-session-{1}_{2}-{3}_t1_flirt.png'.format(subject, session, task,
                                                      run))
    qa2 = join(
        sink_dir, 'qa',
        '{0}-session-{1}_{2}-{3}_mni_flirt.png'.format(subject, session, task,
                                                       run))
    qa3 = join(
        sink_dir, 'qa',
        '{0}-session-{1}_{2}-{3}_mni_fnirt.png'.format(subject, session, task,
                                                       run))
    confound_file = join(
        sink_dir, sesh, subject,
        '{0}-session-{1}_{2}-{3}_confounds.txt'.format(subject, session, task,
                                                       run))

    #run motion correction if indicated
    if moco == True:
        mcflirt = MCFLIRT(ref_vol=144, save_plots=True, output_type='NIFTI_GZ')
        mcflirt.inputs.in_file = grabber.outputs.epi
        #mcflirt.inputs.in_file = join(data_dir, subject, 'session-1', 'retr', 'retr-{0}'.format(run), 'retr.nii.gz')
        mcflirt.inputs.out_file = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_mcf.nii.gz'.format(
                subject, session, task, run))
        flirty = mcflirt.run()
        motion = np.genfromtxt(flirty.outputs.par_file)
    else:
        print "no moco needed"
        motion = 0

    #calculate motion outliers
    try:
        mout = MotionOutliers(metric='fd', threshold=motion_thresh)
        mout.inputs.in_file = grabber.outputs.epi
        mout.inputs.out_file = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_fd-gt-{3}mm'.format(
                subject, session, task, run, motion_thresh))
        mout.inputs.out_metric_plot = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_metrics.png'.format(
                subject, session, task, run))
        mout.inputs.out_metric_values = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_fd.txt'.format(subject, session, task,
                                                    run))
        moutliers = mout.run()
        outliers = np.genfromtxt(moutliers.outputs.out_file)
        e = 'no errors in motion outliers, yay'
    except Exception as e:
        print(e)
        outliers = np.genfromtxt(mout.inputs.out_metric_values)
        #set everything above the threshold to 1 and everything below to 0
        outliers[outliers > motion_thresh] = 1
        outliers[outliers < motion_thresh] = 0

    #concatenate motion parameters and motion outliers to form confounds file

    #outliers = outliers.reshape((outliers.shape[0],1))
    conf = outliers
    np.savetxt(confound_file, conf, delimiter=',')

    #extract an example volume for normalization
    ex_fun = ExtractROI(t_min=144, t_size=1)
    ex_fun.inputs.in_file = flirty.outputs.out_file
    ex_fun.inputs.roi_file = join(
        sink_dir, sesh, subject,
        '{0}-session-{1}_{2}-{3}-example_func.nii.gz'.format(
            subject, session, task, run))
    fun = ex_fun.run()

    warp = ApplyWarp(interp="nn", abswarp=True)

    if not exists(
            '/home/data/nbc/physics-learning/data/first-level/{0}/session-{1}/{2}/{2}-{3}/{2}-5mm.feat/reg/example_func2standard_warp.nii.gz'
            .format(subject, session, task, run)):
        #two-step normalization using flirt and fnirt, outputting qa pix
        flit = FLIRT(cost_func="corratio", dof=12)
        reg_func = flit.run(
            reference=fun.outputs.roi_file,
            in_file=grabber.outputs.t1,
            searchr_x=[-180, 180],
            searchr_y=[-180, 180],
            out_file=join(
                sink_dir, sesh, subject,
                '{0}-session-{1}_{2}-{3}_t1-flirt.nii.gz'.format(
                    subject, session, task, run)),
            out_matrix_file=join(
                sink_dir, sesh, subject,
                '{0}-session-{1}_{2}-{3}_t1-flirt.mat'.format(
                    subject, session, task, run)))
        reg_mni = flit.run(
            reference=grabber.outputs.t1,
            in_file=grabber.outputs.standard,
            searchr_y=[-180, 180],
            searchr_z=[-180, 180],
            out_file=join(
                sink_dir, sesh, subject,
                '{0}-session-{1}_{2}-{3}_mni-flirt-t1.nii.gz'.format(
                    subject, session, task, run)),
            out_matrix_file=join(
                sink_dir, sesh, subject,
                '{0}-session-{1}_{2}-{3}_mni-flirt-t1.mat'.format(
                    subject, session, task, run)))

        #plot_stat_map(aligner.outputs.out_file, bg_img=fun.outputs.roi_file, colorbar=True, draw_cross=False, threshold=1000, output_file=qa1a, dim=-2)
        display = plot_anat(fun.outputs.roi_file, dim=-1)
        display.add_edges(reg_func.outputs.out_file)
        display.savefig(qa1, dpi=300)
        display.close()

        display = plot_anat(grabber.outputs.t1, dim=-1)
        display.add_edges(reg_mni.outputs.out_file)
        display.savefig(qa2, dpi=300)
        display.close()

        perf = FNIRT(output_type='NIFTI_GZ')
        perf.inputs.warped_file = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_mni-fnirt-t1.nii.gz'.format(
                subject, session, task, run))
        perf.inputs.affine_file = reg_mni.outputs.out_matrix_file
        perf.inputs.in_file = grabber.outputs.standard
        perf.inputs.subsampling_scheme = [8, 4, 2, 2]
        perf.inputs.fieldcoeff_file = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_mni-fnirt-t1-warpcoeff.nii.gz'.format(
                subject, session, task, run))
        perf.inputs.field_file = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_mni-fnirt-t1-warp.nii.gz'.format(
                subject, session, task, run))
        perf.inputs.ref_file = grabber.outputs.t1
        reg2 = perf.run()
        warp.inputs.field_file = reg2.outputs.field_file
        #plot fnirted MNI overlaid on example func
        display = plot_anat(grabber.outputs.t1, dim=-1)
        display.add_edges(reg2.outputs.warped_file)
        display.savefig(qa3, dpi=300)
        display.close()
    else:
        warpspeed = InvWarp(output_type='NIFTI_GZ')
        warpspeed.inputs.warp = '/home/data/nbc/physics-learning/data/first-level/{0}/session-{1}/{2}/{2}-{3}/{2}-5mm.feat/reg/example_func2standard_warp.nii.gz'.format(
            subject, session, task, run)
        warpspeed.inputs.reference = fun.outputs.roi_file
        warpspeed.inputs.inverse_warp = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_mni-fnirt-t1-warp.nii.gz'.format(
                subject, session, task, run))
        mni2epiwarp = warpspeed.run()
        warp.inputs.field_file = mni2epiwarp.outputs.inverse_warp

    for key in masks.keys():
        #warp takes us from mni to epi
        warp.inputs.in_file = masks[key]
        warp.inputs.ref_file = fun.outputs.roi_file
        warp.inputs.out_file = join(
            sink_dir, sesh, subject,
            '{0}-session-{1}_{2}-{3}_{4}.nii.gz'.format(
                subject, session, task, run, key))
        net_warp = warp.run()

        qa_file = join(
            sink_dir, 'qa', '{0}-session-{1}_{2}-{3}_qa_{4}.png'.format(
                subject, session, task, run, key))

        display = plotting.plot_roi(net_warp.outputs.out_file,
                                    bg_img=fun.outputs.roi_file,
                                    colorbar=True,
                                    vmin=0,
                                    vmax=18,
                                    draw_cross=False)
        display.savefig(qa_file, dpi=300)
        display.close()

    return flirty.outputs.out_file, confound_file, e
Exemplo n.º 21
0
    def __init__(self, parent, dir_dic, bids):
        super().__init__(parent, dir_dic, bids)

        # Create interfaces ============================================================================================
        # BET
        T1w_BET = Node(BET(), name="T1w_BET")
        T1w_BET.btn_string = 'T1w Brain Extraction'
        self.interfaces.append(T1w_BET)

        T1w_gad_BET = Node(BET(), name="T1w_gad_BET")
        T1w_gad_BET.btn_string = 'T1w Gadolinium Enhanced Brain Extraction'
        self.interfaces.append(T1w_gad_BET)

        T2w_dbs_BET = Node(BET(), name="T2w_dbs_BET")
        T2w_dbs_BET.btn_string = 'T2w DBS Acquisition Brain Extraction'
        self.interfaces.append(T2w_dbs_BET)

        dwi_BET = Node(BET(), name="dwi_BET")
        dwi_BET.btn_string = 'dwi Brain Extraction'
        self.interfaces.append(dwi_BET)

        # BFC
        T1w_BFC = Node(N4BiasFieldCorrection(), name="T1w_BFC")
        T1w_BFC.btn_string = 'T1w Bias Field Correction'
        self.interfaces.append(T1w_BFC)

        # Split
        dwi_ROI_b0 = Node(ExtractROI(), name="dwi_ROI_b0")
        dwi_ROI_b0.btn_string = 'dwi Extract b0'
        self.interfaces.append(dwi_ROI_b0)

        # Eddy current correction
        dwi_Eddy = Node(Eddy(), name="dwi_Eddy")
        dwi_Eddy.btn_string = 'dwi Eddy Current Correction'
        self.interfaces.append(dwi_Eddy)

        # Distortion correction
        # as this section is script/comment heavy it was put into a function
        self.distortion_correction_workflow()

        # Data output (i.e. sink) ======================================================================================
        self.sink = Node(DataSink(), name="sink")
        self.sink.btn_string = 'data sink'
        self.sink.inputs.base_directory = self.dir_dic['data_dir']

        self.jsink = Node(JSONFileSink(), name="jsink")
        self.jsink.btn_string = 'json sink'
        self.jsink.inputs.base_directory = self.dir_dic['data_dir']

        # Initialize workflow ==========================================================================================
        self.wf = Workflow(name='pre_processing')

        # T1w BET to ants N4BiasFieldCorrection
        self.wf.connect([(self.return_interface("T1w_BET"),
                          self.return_interface("T1w_BFC"),
                          [("out_file", "input_image")])])
        self.wf.connect([(self.return_interface("T1w_BET"),
                          self.return_interface("T1w_BFC"), [("mask_file",
                                                              "mask_image")])])

        # Eddy
        self.wf.connect([(self.return_interface("dwi_BET"),
                          self.return_interface("dwi_Eddy"), [("out_file",
                                                               "in_file")])])

        self.wf.connect([(self.return_interface("dwi_BET"),
                          self.return_interface("dwi_Eddy"), [("mask_file",
                                                               "in_mask")])])

        # ROI b0
        self.wf.connect([(self.return_interface("dwi_Eddy"),
                          self.return_interface("dwi_ROI_b0"),
                          [("out_corrected", "in_file")])])

        # Distortion Correction:
        # b0_T1_Reg:
        #   -i: moving image
        #   -r: T1
        #   -x: T1 mask
        self.wf.connect([(self.return_interface("dwi_ROI_b0"),
                          self.return_interface("b0_T1w_Reg"),
                          [("roi_file", "moving_image")])])

        self.wf.connect([(self.return_interface("T1w_BFC"),
                          self.return_interface("b0_T1w_Reg"),
                          [("output_image", "fixed_image")])])

        # test remove as doesn't seem useful (see self.distortion_correction_workflow()) and causes a crash when added
        # self.wf.connect([(self.return_interface("T1w_BET"), self.return_interface("b0_T1w_Reg"),
        #                   [("mask_file", "fixed_image_mask")])])

        # dwi_T1_Tran:
        #   -i: Eddy corrected image
        #   -r: Eddy corrected b0
        #   -t: transforms
        self.wf.connect([(self.return_interface("dwi_Eddy"),
                          self.return_interface("dwi_T1w_Tran"),
                          [("out_corrected", "input_image")])])

        self.wf.connect([(self.return_interface("dwi_ROI_b0"),
                          self.return_interface("dwi_T1w_Tran"),
                          [("roi_file", "reference_image")])])

        self.wf.connect([(self.return_interface("b0_T1w_Reg"),
                          self.return_interface("dwi_T1w_Tran"),
                          [("composite_transform", "transforms")])])

        # BaseInterface generates a dict mapping button strings to the workflow nodes
        # self.map_workflow()
        graph_file = self.wf.write_graph("pre_processing", graph2use='flat')
        self.graph_file = graph_file.replace("pre_processing.png",
                                             "pre_processing_detailed.png")

        self.init_settings()
        self.init_ui()
Exemplo n.º 22
0
        def convert_single_subject(subj_folder):
            import os

            import nibabel as nb
            import numpy as np

            t1_folder = path.join(subj_folder, "PROCESSED", "MPRAGE", "SUBJ_111")
            subj_id = os.path.basename(subj_folder)
            print("Converting ", subj_id)
            numerical_id = (subj_id.split("_"))[1]
            bids_id = "sub-OASIS1" + str(numerical_id)
            bids_subj_folder = path.join(dest_dir, bids_id)
            if not os.path.isdir(bids_subj_folder):
                os.mkdir(bids_subj_folder)

            session_folder = path.join(bids_subj_folder, "ses-M00")
            if not os.path.isdir(session_folder):
                os.mkdir(path.join(session_folder))
                os.mkdir(path.join(session_folder, "anat"))

            # In order do convert the Analyze format to Nifti the path to the .img file is required
            img_file_path = glob(path.join(t1_folder, "*.img"))[0]
            output_path = path.join(
                session_folder, "anat", bids_id + "_ses-M00_T1w.nii.gz"
            )

            # First, convert to Nifti so that we can extract the s_form with NiBabel
            # (NiBabel creates an 'Spm2AnalyzeImage' object that does not contain 'get_sform' method
            img_with_wrong_orientation_analyze = nb.load(img_file_path)

            # OASIS-1 images have the same header but sform is incorrect
            # To solve this issue, we use header from images converted with FreeSurfer
            # to generate a 'clean hard-coded' header
            # affine:
            # [[   0.    0.   -1.   80.]
            #  [   1.    0.    0. -128.]
            #  [   0.    1.    0. -128.]
            #  [   0.    0.    0.    1.]]
            # fmt: off
            affine = np.array(
                [
                    0, 0, -1, 80,
                    1, 0, 0, -128,
                    0, 1, 0, -128,
                    0, 0, 0, 1
                ]
            ).reshape(4, 4)
            # fmt: on
            s_form = affine.astype(np.int16)

            hdr = nb.Nifti1Header()
            hdr.set_data_shape((256, 256, 160))
            hdr.set_data_dtype(np.int16)
            hdr["bitpix"] = 16
            hdr.set_sform(s_form, code="scanner")
            hdr.set_qform(s_form, code="scanner")
            hdr["extents"] = 16384
            hdr["xyzt_units"] = 10

            img_with_good_orientation_nifti = nb.Nifti1Image(
                np.round(img_with_wrong_orientation_analyze.get_data()).astype(
                    np.int16
                ),
                s_form,
                header=hdr,
            )
            nb.save(img_with_good_orientation_nifti, output_path)

            # Header correction to obtain dim0 = 3
            fslroi = ExtractROI(
                in_file=output_path, roi_file=output_path, t_min=0, t_size=1
            )
            fslroi.run()
import os 
from os.path import abspath
from datetime import datetime
from IPython.display import Image
import pydot
from nipype import Workflow, Node, MapNode, Function, config
from nipype.interfaces.fsl import TOPUP, ApplyTOPUP, BET, ExtractROI,  Eddy, FLIRT, FUGUE
from nipype.interfaces.fsl.maths import MathsCommand
import nipype.interfaces.utility as util 
import nipype.interfaces.mrtrix3 as mrt
#Requirements for the workflow to run smoothly: All files as in NIfTI-format and named according to the following standard: 
#Images are from the tonotopy DKI sequences on the 7T Philips Achieva scanner in Lund. It should work with any DKI sequence and possibly also a standard DTI but the setting for B0-corrections, epi-distortion corrections and eddy current corrections will be wrong. 
#DKI file has a base name shared with bvec and bval in FSL format. E.g. "DKI.nii.gz" "DKI.bvec" and "DKI.bval". 
#There is one b0-volume with reversed (P->A) phase encoding called DKIbase+_revenc. E.g. "DKI_revenc.nii.gz". 
#Philips B0-map magnitude and phase offset (in Hz) images. 
#One input file for topup describing the images as specified by topup. 
#Set nbrOfThreads to number of available CPU threads to run the analyses. 
### Need to make better revenc for the 15 version if we choose to use it (i.e. same TE and TR)
#Set to relevant directory/parameters
datadir=os.path.abspath("/Users/ling-men/Documents/MRData/testDKI")
rawDKI_base='DKI_15' 
B0map_base = 'B0map'
nbrOfThreads=6
print_graph = True 
acqparam_file = os.path.join(datadir,'acqparams.txt')
index_file = os.path.join(datadir,'index.txt')
####
#config.enable_debug_mode()
DKI_nii=os.path.join(datadir, rawDKI_base+'.nii.gz')
DKI_bval=os.path.join(datadir, rawDKI_base+'.bval')
Exemplo n.º 24
0
	
#####################################################
### If Specified Create FMAP From DWI NIFTI Files ###
#####################################################
	
	DWI_FMAP=list(filter(lambda x: "_dwi.nii.gz" in x, FMAP_SCANS))
	if OPT_GEN_FMAP_DWI == "TRUE" and DWI_SCANS and DWI_FMAP:
		JSON=list(filter(lambda x: "_dwi.json" in x, DWI_SCANS))[0]
		NIFTI=list(filter(lambda x: "_dwi.nii.gz" in x, DWI_SCANS))[0]
		CONTENT=json.load(open(JSON), object_pairs_hook=OrderedDict)
		PHASE_NEW=DICT_DIR.get(CONTENT.get("PhaseEncodingDirection"))
		TEMP_OUTPUT=list(filter(lambda x: "_dwi.nii.gz" in x, FMAP_SCANS))[0].replace("","")
		OUTPUT=TEMP_OUTPUT.replace([x for x in TEMP_OUTPUT.split("_") if "acq-" in x][0],"acq-{}".format(PHASE_NEW))
		Extract = ExtractROI(
			in_file=NIFTI,
			roi_file=OUTPUT,
			output_type="NIFTI_GZ",
			t_min=0,
			t_size=1)
		Extract.run()
		json.dump(CONTENT, open(OUTPUT.replace("nii.gz","json"), "w"), indent=12)
		FMAP_SCANS=list(filter(lambda x:'/fmap/sub-' in x, directory_structure(DIR_SUB)))
		
#############################################################
### Create Directionary Keys of Task Names for FUNC Scans ###
#############################################################
	
	if FUNC_SCANS:
		FUNC_JSONS=list(filter(lambda x:'bold.json' in x, FUNC_SCANS))
		for FUNC_JSON in FUNC_JSONS:
			if any(not 'task-' for elem in os.path.basename(FUNC_JSON).split("_")):
				Error = open(os.path.join('.', 'Fatal_BIDS_Error.txt'), 'w')
Exemplo n.º 25
0
def make_workflow(n_fmap=10):
    n_in = Node(IdentityInterface(fields=[
        'func',
        'fmap',
        ]), name='input')

    n_out = Node(IdentityInterface(fields=[
        'func1',
        'func2',
        'mean',
        ]), name='output')

    w = Workflow('preproc')

    w_mc_func = make_w_mcmean('func')
    if n_fmap == 1:  # nipype cannot handle conditional nodes
        w_mc_fmap = identify_workflow()
    else:
        w_mc_fmap = make_w_mcmean('fmap')
    w_masking = make_w_masking()
    w_warp = make_w_warp()

    n_apply = Node(interface=NwarpApply(), name='warpapply')
    n_apply.inputs.out_file = 'preprocessed.nii'

    n_mean = Node(interface=TStat(), name='mean')
    n_mean.inputs.args = '-mean'
    n_mean.inputs.outputtype = 'NIFTI_GZ'

    n_roi1 = Node(ExtractROI(), 'split1')
    n_roi1.inputs.t_min = 0
    n_roi1.inputs.roi_file = 'preprocessed_1.nii.gz'
    n_roi2 = Node(ExtractROI(), 'split2')
    n_roi2.inputs.roi_file = 'preprocessed_2.nii.gz'

    w.connect(n_in, 'fmap', w_mc_fmap, 'input.epi')

    w.connect(w_mc_fmap, 'output.mean', w_masking, 'input.fmap')
    w.connect(n_in, 'func', w_masking, 'input.func')
    w.connect(w_masking, 'output.func', w_mc_func, 'input.epi')

    w.connect(w_masking, 'output.fmap', w_warp, 'input.fmap')
    w.connect(w_mc_func, 'output.mean', w_warp, 'input.func')
    w.connect(w_mc_func, 'output.motion_parameters', w_warp, 'input.motion_parameters')

    w.connect(w_warp, 'output.warping', n_apply, 'warp')
    w.connect(w_masking, 'output.func', n_apply, 'in_file')
    w.connect(w_mc_fmap, 'output.mean', n_apply, 'master')
    w.connect(n_apply, 'out_file', n_mean, 'in_file')

    w.connect(n_apply, 'out_file', n_roi1, 'in_file')
    w.connect(n_apply, ('out_file', _half_dynamics), n_roi1, 't_size')
    w.connect(n_apply, 'out_file', n_roi2, 'in_file')
    w.connect(n_apply, ('out_file', _half_dynamics), n_roi2, 't_min')
    w.connect(n_apply, ('out_file', _half_dynamics), n_roi2, 't_size')

    w.connect(n_mean, 'out_file', n_out, 'mean')
    w.connect(n_roi1, 'roi_file', n_out, 'func1')
    w.connect(n_roi2, 'roi_file', n_out, 'func2')

    return w
Exemplo n.º 26
0
def run(base_dir):
    template = '/home/brainlab/Desktop/Rudas/Data/Parcellation/TPM.nii'
    matlab_cmd = '/home/brainlab/Desktop/Rudas/Tools/spm12_r7487/spm12/run_spm12.sh /home/brainlab/Desktop/Rudas/Tools/MCR/v713/ script'
    spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)

    print('SPM version: ' + str(spm.SPMCommand().version))

    structural_dir = '/home/brainlab/Desktop/Rudas/Data/Propofol/Structurals/'
    experiment_dir = opj(base_dir, 'output/')
    output_dir = 'datasink'
    working_dir = 'workingdir'
    '''

    subject_list = ['2014_05_02_02CB',
                    '2014_05_16_16RA',
                    '2014_05_30_30AQ',
                    '2014_07_04_04HD']
    '''
    subject_list = [
        '2014_05_02_02CB', '2014_05_16_16RA', '2014_05_30_30AQ',
        '2014_07_04_04HD', '2014_07_04_04SG', '2014_08_13_13CA',
        '2014_10_08_08BC', '2014_10_08_08VR', '2014_10_22_22CY',
        '2014_10_22_22TK', '2014_11_17_17EK', '2014_11_17_17NA',
        '2014_11_19_19SA', '2014_11_19_AK', '2014_11_25.25JK',
        '2014_11_27_27HF', '2014_12_10_10JR'
    ]

    # list of subject identifiers

    fwhm = 8  # Smoothing widths to apply (Gaussian kernel size)
    TR = 2  # Repetition time
    init_volume = 0  # Firts volumen identification which will use in the pipeline
    iso_size = 2  # Isometric resample of functional images to voxel size (in mm)

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=init_volume,
                              t_size=-1,
                              output_type='NIFTI'),
                   name="extract")

    # MCFLIRT - motion correction
    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="motion_correction")

    # SliceTimer - correct for slice wise acquisition
    slicetimer = Node(SliceTimer(index_dir=False,
                                 interleaved=True,
                                 output_type='NIFTI',
                                 time_repetition=TR),
                      name="slice_timing_correction")

    # Smooth - image smoothing
    smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")

    n4bias = Node(N4Bias(out_file='t1_n4bias.nii.gz'), name='n4bias')

    descomposition = Node(Descomposition(n_components=20,
                                         low_pass=0.1,
                                         high_pass=0.01,
                                         tr=TR),
                          name='descomposition')

    # Artifact Detection - determines outliers in functional images
    art = Node(ArtifactDetect(norm_threshold=2,
                              zintensity_threshold=3,
                              mask_type='spm_global',
                              parameter_source='FSL',
                              use_differences=[True, False],
                              plot_type='svg'),
               name="artifact_detection")

    extract_confounds_ws_csf = Node(
        ExtractConfounds(out_file='ev_without_gs.csv'),
        name='extract_confounds_ws_csf')

    extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                                 delimiter=','),
                                name='extract_confounds_global_signal')

    signal_extraction = Node(SignalExtraction(
        time_series_out_file='time_series.csv',
        correlation_matrix_out_file='correlation_matrix.png',
        atlas_identifier='cort-maxprob-thr25-2mm',
        tr=TR,
        plot=True),
                             name='signal_extraction')

    art_remotion = Node(ArtifacRemotion(out_file='fmri_art_removed.nii'),
                        name='artifact_remotion')

    # BET - Skullstrip anatomical anf funtional images
    bet_t1 = Node(BET(frac=0.5, robust=True, mask=True,
                      output_type='NIFTI_GZ'),
                  name="bet_t1")

    # FAST - Image Segmentation
    segmentation = Node(FAST(output_type='NIFTI'), name="segmentation")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize_fmri = Node(Normalize12(jobtype='estwrite',
                                      tpm=template,
                                      write_voxel_sizes=[2, 2, 2],
                                      write_bounding_box=[[-90, -126, -72],
                                                          [90, 90, 108]]),
                          name="normalize_fmri")

    gunzip = Node(Gunzip(), name="gunzip")

    normalize_t1 = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                        name="normalize_t1")

    normalize_masks = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                           name="normalize_masks")

    # Threshold - Threshold WM probability image
    threshold = Node(Threshold(thresh=0.5, args='-bin',
                               output_type='NIFTI_GZ'),
                     name="wm_mask_threshold")

    # FLIRT - pre-alignment of functional images to anatomical images
    coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'),
                     name="linear_warp_estimation")

    # FLIRT - coregistration of functional images to anatomical images with BBR
    coreg_bbr = Node(FLIRT(dof=6,
                           cost='bbr',
                           schedule=opj(os.getenv('FSLDIR'),
                                        'etc/flirtsch/bbr.sch'),
                           output_type='NIFTI_GZ'),
                     name="nonlinear_warp_estimation")

    # Apply coregistration warp to functional images
    applywarp = Node(FLIRT(interp='spline',
                           apply_isoxfm=iso_size,
                           output_type='NIFTI'),
                     name="registration_fmri")

    # Apply coregistration warp to mean file
    applywarp_mean = Node(FLIRT(interp='spline',
                                apply_isoxfm=iso_size,
                                output_type='NIFTI_GZ'),
                          name="registration_mean_fmri")

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    anat_file = opj(structural_dir, '{subject_id}', 't1.nii')
    func_file = opj('{subject_id}', 'fmri.nii')

    templates = {'anat': anat_file, 'func': func_file}

    selectfiles = Node(SelectFiles(templates, base_directory=base_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    # Create a coregistration workflow
    coregwf = Workflow(name='coreg_fmri_to_t1')
    coregwf.base_dir = opj(experiment_dir, working_dir)

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the coregistration workflow

    coregwf.connect([
        (bet_t1, n4bias, [('out_file', 'in_file')]),
        (n4bias, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', get_latest),
                                    'in_file')]),
        (n4bias, coreg_pre, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp, [('out_file', 'reference')]),
        (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp_mean, [('out_file', 'reference')]),
    ])

    ## Use the following DataSink output substitutions
    substitutions = [('_subject_id_', 'sub-')]
    #                 ('_fwhm_', 'fwhm-'),
    #                 ('_roi', ''),
    #                 ('_mcf', ''),
    #                 ('_st', ''),
    #                 ('_flirt', ''),
    #                 ('.nii_mean_reg', '_mean'),
    #                 ('.nii.par', '.par'),
    #                 ]
    #subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]

    #substitutions.extend(subjFolders)
    datasink.inputs.substitutions = substitutions

    # Connect all components of the preprocessing workflow
    preproc.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id')]),
        (selectfiles, extract, [('func', 'in_file')]),
        (extract, mcflirt, [('roi_file', 'in_file')]),
        (mcflirt, slicetimer, [('out_file', 'in_file')]),
        (selectfiles, coregwf, [('anat', 'bet_t1.in_file'),
                                ('anat', 'nonlinear_warp_estimation.reference')
                                ]),
        (mcflirt, coregwf, [('mean_img', 'linear_warp_estimation.in_file'),
                            ('mean_img', 'nonlinear_warp_estimation.in_file'),
                            ('mean_img', 'registration_mean_fmri.in_file')]),
        (slicetimer, coregwf, [('slice_time_corrected_file',
                                'registration_fmri.in_file')]),
        (coregwf, art, [('registration_fmri.out_file', 'realigned_files')]),
        (mcflirt, art, [('par_file', 'realignment_parameters')]),
        (art, art_remotion, [('outlier_files', 'outlier_files')]),
        (coregwf, art_remotion, [('registration_fmri.out_file', 'in_file')]),
        (coregwf, gunzip, [('n4bias.out_file', 'in_file')]),
        (selectfiles, normalize_fmri, [('anat', 'image_to_align')]),
        (art_remotion, normalize_fmri, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_t1, [('anat', 'image_to_align')]),
        (gunzip, normalize_t1, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_masks, [('anat', 'image_to_align')]),
        (coregwf, normalize_masks, [(('segmentation.partial_volume_files',
                                      get_wm_csf), 'apply_to_files')]),
        (normalize_fmri, smooth, [('normalized_files', 'in_files')]),
        (smooth, extract_confounds_ws_csf, [('smoothed_files', 'in_file')]),
        (normalize_masks, extract_confounds_ws_csf, [('normalized_files',
                                                      'list_mask')]),
        (mcflirt, extract_confounds_ws_csf, [('par_file', 'file_concat')]),

        #(smooth, extract_confounds_gs, [('smoothed_files', 'in_file')]),
        #(normalize_t1, extract_confounds_gs, [(('normalized_files',change_to_list), 'list_mask')]),
        #(extract_confounds_ws_csf, extract_confounds_gs, [('out_file', 'file_concat')]),
        (smooth, signal_extraction, [('smoothed_files', 'in_file')]),
        #(extract_confounds_gs, signal_extraction, [('out_file', 'confounds_file')]),
        (extract_confounds_ws_csf, signal_extraction, [('out_file',
                                                        'confounds_file')]),

        #(smooth, descomposition, [('smoothed_files', 'in_file')]),
        #(extract_confounds_ws_csf, descomposition, [('out_file', 'confounds_file')]),

        #(extract_confounds_gs, datasink, [('out_file', 'preprocessing.@confounds_with_gs')]),
        (extract_confounds_ws_csf, datasink,
         [('out_file', 'preprocessing.@confounds_without_gs')]),
        (smooth, datasink, [('smoothed_files', 'preprocessing.@smoothed')]),
        (normalize_fmri, datasink, [('normalized_files',
                                     'preprocessing.@fmri_normalized')]),
        (normalize_t1, datasink, [('normalized_files',
                                   'preprocessing.@t1_normalized')]),
        (normalize_masks, datasink, [('normalized_files',
                                      'preprocessing.@masks_normalized')]),
        (signal_extraction, datasink, [('time_series_out_file',
                                        'preprocessing.@time_serie')]),
        (signal_extraction, datasink, [('correlation_matrix_out_file',
                                        'preprocessing.@correlation_matrix')]),
        (signal_extraction, datasink,
         [('fmri_cleaned_out_file', 'preprocessing.@fmri_cleaned_out_file')]),
        #(descomposition, datasink, [('out_file', 'preprocessing.@descomposition')]),
        #(descomposition, datasink, [('plot_files', 'preprocessing.@descomposition_plot_files')])
    ])

    preproc.write_graph(graph2use='colored', format='png', simple_form=True)
    preproc.run()
Exemplo n.º 27
0
def main(paths, options_binary_string, ANAT, num_proc=7):

    json_path = paths[0]
    base_directory = paths[1]
    motion_correction_bet_directory = paths[2]
    parent_wf_directory = paths[3]
    # functional_connectivity_directory=paths[4]
    coreg_reg_directory = paths[5]
    atlas_resize_reg_directory = paths[6]
    subject_list = paths[7]
    datasink_name = paths[8]
    # fc_datasink_name=paths[9]
    atlasPath = paths[10]
    # brain_path=paths[11]
    # mask_path=paths[12]
    # atlas_path=paths[13]
    # tr_path=paths[14]
    # motion_params_path=paths[15]
    # func2std_mat_path=paths[16]
    # MNI3mm_path=paths[17]
    # demographics_file_path = paths[18]
    # phenotype_file_path = paths[19]
    data_directory = paths[20]

    number_of_subjects = len(subject_list)
    print("Working with ", number_of_subjects, " subjects.")

    # Create our own custom function - BIDSDataGrabber using a Function Interface.

    # In[858]:

    def get_nifti_filenames(subject_id, data_dir):
        #     Remember that all the necesary imports need to be INSIDE the function for the Function Interface to work!
        from bids.grabbids import BIDSLayout

        layout = BIDSLayout(data_dir)
        run = 1

        anat_file_path = [
            f.filename for f in layout.get(
                subject=subject_id, type='T1w', extensions=['nii', 'nii.gz'])
        ]
        func_file_path = [
            f.filename for f in layout.get(subject=subject_id,
                                           type='bold',
                                           run=run,
                                           extensions=['nii', 'nii.gz'])
        ]

        if len(anat_file_path) == 0:
            return None, func_file_path[0]  # No Anatomical files present
        return anat_file_path[0], func_file_path[0]

    BIDSDataGrabber = Node(Function(
        function=get_nifti_filenames,
        input_names=['subject_id', 'data_dir'],
        output_names=['anat_file_path', 'func_file_path']),
                           name='BIDSDataGrabber')
    # BIDSDataGrabber.iterables = [('subject_id',subject_list)]
    BIDSDataGrabber.inputs.data_dir = data_directory

    # ## Return TR

    def get_TR(in_file):
        from bids.grabbids import BIDSLayout

        data_directory = '/home1/varunk/data/ABIDE1/RawDataBIDs'
        layout = BIDSLayout(data_directory)
        metadata = layout.get_metadata(path=in_file)
        TR = metadata['RepetitionTime']
        return TR

    # ---------------- Added new Node to return TR and other slice timing correction params-------------------------------
    def _getMetadata(in_file):
        from bids.grabbids import BIDSLayout
        import logging

        logger = logging.getLogger(__name__)
        logger.setLevel(logging.DEBUG)

        # create a file handler
        handler = logging.FileHandler('progress.log')

        # add the handlers to the logger
        logger.addHandler(handler)

        interleaved = True
        index_dir = False
        data_directory = '/home1/varunk/data/ABIDE1/RawDataBIDs'
        layout = BIDSLayout(data_directory)
        metadata = layout.get_metadata(path=in_file)
        print(metadata)

        logger.info('Extracting Meta Data of file: %s', in_file)
        try:
            tr = metadata['RepetitionTime']
        except KeyError:
            print(
                'Key RepetitionTime not found in task-rest_bold.json so using a default of 2.0 '
            )
            tr = 2
            logger.error(
                'Key RepetitionTime not found in task-rest_bold.json for file %s so using a default of 2.0 ',
                in_file)

        try:
            slice_order = metadata['SliceAcquisitionOrder']
        except KeyError:
            print(
                'Key SliceAcquisitionOrder not found in task-rest_bold.json so using a default of interleaved ascending '
            )
            logger.error(
                'Key SliceAcquisitionOrder not found in task-rest_bold.json for file %s so using a default of interleaved ascending',
                in_file)
            return tr, index_dir, interleaved

        if slice_order.split(' ')[0] == 'Sequential':
            interleaved = False
        if slice_order.split(' ')[1] == 'Descending':
            index_dir = True

        return tr, index_dir, interleaved

    getMetadata = Node(Function(
        function=_getMetadata,
        input_names=['in_file'],
        output_names=['tr', 'index_dir', 'interleaved']),
                       name='getMetadata')

    # ### Skipping 4 starting scans
    # Extract ROI for skipping first 4 scans of the functional data
    # > **Arguments:**
    # t_min: (corresponds to time dimension) Denotes the starting time of the inclusion
    # t_size: Denotes the number of scans to include
    #
    # The logic behind skipping 4 initial scans is to take scans after the subject has stabalized in the scanner.

    # In[863]:

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=4, t_size=-1),
                   output_type='NIFTI',
                   name="extract")

    # ### Slice time correction
    # Created a Node that does slice time correction
    # > **Arguments**:
    # index_dir=False -> Slices were taken bottom to top i.e. in ascending order
    # interleaved=True means odd slices were acquired first and then even slices [or vice versa(Not sure)]

    slicetimer = Node(SliceTimer(output_type='NIFTI'), name="slicetimer")

    # ### Motion Correction
    # Motion correction is done using fsl's mcflirt. It alligns all the volumes of a functional scan to each other

    # MCFLIRT - motion correction
    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="mcflirt")

    #  Just a dummy node to transfer the output of Mcflirt to the next workflow. Needed if we didnt want to use the Mcflirt
    from_mcflirt = Node(IdentityInterface(fields=['in_file']),
                        name="from_mcflirt")

    # ### Skull striping
    # I used fsl's BET

    # In[868]:

    skullStrip = Node(BET(mask=False, frac=0.3, robust=True),
                      name='skullStrip')  #

    # *Note*: Do not include special characters in ```name``` field above coz then  wf.writegraph will cause issues

    # ## Resample
    # I needed to resample the anatomical file from 1mm to 3mm. Because registering a 1mm file was taking a huge amount of time.
    #

    # In[872]:

    # Resample - resample anatomy to 3x3x3 voxel resolution
    resample_mni = Node(
        Resample(
            voxel_size=(3, 3, 3),
            resample_mode='Cu',  # cubic interpolation
            outputtype='NIFTI'),
        name="resample_mni")

    resample_anat = Node(
        Resample(
            voxel_size=(3, 3, 3),
            resample_mode='Cu',  # cubic interpolation
            outputtype='NIFTI'),
        name="resample_anat")

    # In[873]:

    resample_atlas = Node(
        Resample(
            voxel_size=(3, 3, 3),
            resample_mode='NN',  # cubic interpolation
            outputtype='NIFTI'),
        name="resample_atlas")

    resample_atlas.inputs.in_file = atlasPath

    # # Matrix operations
    # ### For concatenating the transformation matrices

    concat_xform = Node(ConvertXFM(concat_xfm=True), name='concat_xform')

    # Node to calculate the inverse of func2std matrix
    inv_mat = Node(ConvertXFM(invert_xfm=True), name='inv_mat')

    # ## Extracting the mean brain

    meanfunc = Node(interface=ImageMaths(op_string='-Tmean', suffix='_mean'),
                    name='meanfunc')

    meanfuncmask = Node(interface=BET(mask=True, no_output=True, frac=0.3),
                        name='meanfuncmask')

    # ## Apply Mask

    # Does BET (masking) on the whole func scan [Not using this, creates bug for join node]
    maskfunc = Node(interface=ImageMaths(suffix='_bet', op_string='-mas'),
                    name='maskfunc')

    # Does BET (masking) on the mean func scan
    maskfunc4mean = Node(interface=ImageMaths(suffix='_bet', op_string='-mas'),
                         name='maskfunc4mean')

    # ## Datasink
    # I needed to define the structure of what files are saved and where.

    # Create DataSink object
    dataSink = Node(DataSink(), name='datasink')

    # Name of the output folder
    dataSink.inputs.base_directory = opj(base_directory, datasink_name)

    # Define substitution strings so that the data is similar to BIDS
    substitutions = [
        ('_subject_id_', 'sub-'), ('_resample_brain_flirt.nii_brain', ''),
        ('_roi_st_mcf_flirt.nii_brain_flirt', ''),
        ('task-rest_run-1_bold_roi_st_mcf.nii', 'motion_params'),
        ('T1w_resample_brain_flirt_sub-0050002_task-rest_run-1_bold_roi_st_mcf_mean_bet_flirt',
         'fun2std')
    ]

    # Feed the substitution strings to the DataSink node
    dataSink.inputs.substitutions = substitutions

    # ### Apply Mask to functional data
    # Mean file of the motion corrected functional scan is sent to
    # skullStrip to get just the brain and the mask_image.
    # Mask_image is just a binary file (containing 1 where brain is present and 0 where it isn't).
    # After getting the mask_image form skullStrip, apply that mask to aligned
    # functional image to extract its brain and remove the skull

    # In[889]:

    # Function
    # in_file: The file on which you want to apply mask
    # in_file2 = mask_file:  The mask you want to use. Make sure that mask_file has same size as in_file
    # out_file : Result of applying mask in in_file -> Gives the path of the output file

    def applyMask_func(in_file, in_file2):
        import numpy as np
        import nibabel as nib
        import os
        from os.path import join as opj

        # convert from unicode to string : u'/tmp/tmp8daO2Q/..' -> '/tmp/tmp8daO2Q/..' i.e. removes the prefix 'u'
        mask_file = in_file2

        brain_data = nib.load(in_file)
        mask_data = nib.load(mask_file)

        brain = brain_data.get_data().astype('float32')
        mask = mask_data.get_data()

        # applying mask by multiplying elementwise to the binary mask

        if len(brain.shape) == 3:  # Anat file
            brain = np.multiply(brain, mask)
        elif len(brain.shape) > 3:  # Functional File
            for t in range(brain.shape[-1]):
                brain[:, :, :, t] = np.multiply(brain[:, :, :, t], mask)
        else:
            pass

        # Saving the brain file

        path = os.getcwd()

        in_file_split_list = in_file.split('/')
        in_file_name = in_file_split_list[-1]

        out_file = in_file_name + '_brain.nii.gz'  # changing name
        brain_with_header = nib.Nifti1Image(brain,
                                            affine=brain_data.affine,
                                            header=brain_data.header)
        nib.save(brain_with_header, out_file)

        out_file = opj(path, out_file)
        out_file2 = in_file2

        return out_file, out_file2

    # #### Things learnt:
    # 1. I found out that whenever a node is being executed, it becomes the current directory and whatever file you create now, will be stored here.
    # 2. #from IPython.core.debugger import Tracer; Tracer()()    # Debugger doesnt work in nipype

    # Wrap the above function inside a Node

    # In[890]:

    applyMask = Node(Function(function=applyMask_func,
                              input_names=['in_file', 'in_file2'],
                              output_names=['out_file', 'out_file2']),
                     name='applyMask')

    # ### Some nodes needed for Co-registration and Normalization

    # Node for getting the xformation matrix
    func2anat_reg = Node(FLIRT(output_type='NIFTI'), name="func2anat_reg")

    # Node for applying xformation matrix to functional data
    func2std_xform = Node(FLIRT(output_type='NIFTI', apply_xfm=True),
                          name="func2std_xform")

    # Node for applying xformation matrix to functional data
    std2func_xform = Node(FLIRT(output_type='NIFTI',
                                apply_xfm=True,
                                interp='nearestneighbour'),
                          name="std2func_xform")

    # Node for Normalizing/Standardizing the anatomical and getting the xformation matrix
    anat2std_reg = Node(FLIRT(output_type='NIFTI'), name="anat2std_reg")

    # I wanted to use the MNI file as input to the workflow so I created an Identity
    # Node that reads the MNI file path and outputs the same MNI file path.
    # Then I connected this node to whereever it was needed.

    MNI152_2mm = Node(IdentityInterface(fields=['standard_file', 'mask_file']),
                      name="MNI152_2mm")
    # Set the mask_file and standard_file input in the Node. This setting sets the input mask_file permanently.
    MNI152_2mm.inputs.mask_file = os.path.expandvars(
        '$FSLDIR/data/standard/MNI152_T1_2mm_brain_mask.nii.gz')

    MNI152_2mm.inputs.standard_file = os.path.expandvars(
        '$FSLDIR/data/standard/MNI152_T1_2mm_brain.nii.gz')
    # MNI152_2mm.inputs.mask_file = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain_mask.nii.gz'
    # MNI152_2mm.inputs.standard_file = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz'

    # ## Band Pass Filtering
    # Let's do a band pass filtering on the data using the code from https://neurostars.org/t/bandpass-filtering-different-outputs-from-fsl-and-nipype-custom-function/824/2

    ### AFNI

    bandpass = Node(afni.Bandpass(highpass=0.008,
                                  lowpass=0.08,
                                  despike=False,
                                  no_detrend=True,
                                  notrans=True,
                                  outputtype='NIFTI_GZ'),
                    name='bandpass')

    # ### Following is a Join Node that collects the preprocessed file paths and saves them in a file

    # In[902]:

    def save_file_list_function_in_brain(in_brain):
        import numpy as np
        import os
        from os.path import join as opj

        file_list = np.asarray(in_brain)
        print('######################## File List ######################: \n',
              file_list)

        np.save('brain_file_list', file_list)
        file_name = 'brain_file_list.npy'
        out_brain = opj(os.getcwd(), file_name)  # path
        return out_brain

    def save_file_list_function_in_mask(in_mask):
        import numpy as np
        import os
        from os.path import join as opj

        file_list2 = np.asarray(in_mask)
        print('######################## File List ######################: \n',
              file_list2)

        np.save('mask_file_list', file_list2)
        file_name2 = 'mask_file_list.npy'
        out_mask = opj(os.getcwd(), file_name2)  # path
        return out_mask

    def save_file_list_function_in_motion_params(in_motion_params):
        import numpy as np
        import os
        from os.path import join as opj

        file_list3 = np.asarray(in_motion_params)
        print('######################## File List ######################: \n',
              file_list3)

        np.save('motion_params_file_list', file_list3)
        file_name3 = 'motion_params_file_list.npy'
        out_motion_params = opj(os.getcwd(), file_name3)  # path
        return out_motion_params

    def save_file_list_function_in_motion_outliers(in_motion_outliers):
        import numpy as np
        import os
        from os.path import join as opj

        file_list4 = np.asarray(in_motion_outliers)
        print('######################## File List ######################: \n',
              file_list4)

        np.save('motion_outliers_file_list', file_list4)
        file_name4 = 'motion_outliers_file_list.npy'
        out_motion_outliers = opj(os.getcwd(), file_name4)  # path
        return out_motion_outliers

    def save_file_list_function_in_joint_xformation_matrix(
            in_joint_xformation_matrix):
        import numpy as np
        import os
        from os.path import join as opj

        file_list5 = np.asarray(in_joint_xformation_matrix)
        print('######################## File List ######################: \n',
              file_list5)

        np.save('joint_xformation_matrix_file_list', file_list5)
        file_name5 = 'joint_xformation_matrix_file_list.npy'
        out_joint_xformation_matrix = opj(os.getcwd(), file_name5)  # path
        return out_joint_xformation_matrix

    def save_file_list_function_in_tr(in_tr):
        import numpy as np
        import os
        from os.path import join as opj

        tr_list = np.asarray(in_tr)
        print('######################## TR List ######################: \n',
              tr_list)

        np.save('tr_list', tr_list)
        file_name6 = 'tr_list.npy'
        out_tr = opj(os.getcwd(), file_name6)  # path
        return out_tr

    def save_file_list_function_in_atlas(in_atlas):
        import numpy as np
        import os
        from os.path import join as opj

        file_list7 = np.asarray(in_atlas)
        print('######################## File List ######################: \n',
              file_list7)

        np.save('atlas_file_list', file_list7)
        file_name7 = 'atlas_file_list.npy'
        out_atlas = opj(os.getcwd(), file_name7)  # path
        return out_atlas

    save_file_list_in_brain = JoinNode(Function(
        function=save_file_list_function_in_brain,
        input_names=['in_brain'],
        output_names=['out_brain']),
                                       joinsource="infosource",
                                       joinfield=['in_brain'],
                                       name="save_file_list_in_brain")

    save_file_list_in_mask = JoinNode(Function(
        function=save_file_list_function_in_mask,
        input_names=['in_mask'],
        output_names=['out_mask']),
                                      joinsource="infosource",
                                      joinfield=['in_mask'],
                                      name="save_file_list_in_mask")

    save_file_list_in_motion_outliers = JoinNode(
        Function(function=save_file_list_function_in_motion_outliers,
                 input_names=['in_motion_outliers'],
                 output_names=['out_motion_outliers']),
        joinsource="infosource",
        joinfield=['in_motion_outliers'],
        name="save_file_list_in_motion_outliers")

    save_file_list_in_motion_params = JoinNode(
        Function(function=save_file_list_function_in_motion_params,
                 input_names=['in_motion_params'],
                 output_names=['out_motion_params']),
        joinsource="infosource",
        joinfield=['in_motion_params'],
        name="save_file_list_in_motion_params")

    save_file_list_in_joint_xformation_matrix = JoinNode(
        Function(function=save_file_list_function_in_joint_xformation_matrix,
                 input_names=['in_joint_xformation_matrix'],
                 output_names=['out_joint_xformation_matrix']),
        joinsource="infosource",
        joinfield=['in_joint_xformation_matrix'],
        name="save_file_list_in_joint_xformation_matrix")

    save_file_list_in_tr = JoinNode(Function(
        function=save_file_list_function_in_tr,
        input_names=['in_tr'],
        output_names=['out_tr']),
                                    joinsource="infosource",
                                    joinfield=['in_tr'],
                                    name="save_file_list_in_tr")

    save_file_list_in_atlas = JoinNode(Function(
        function=save_file_list_function_in_atlas,
        input_names=['in_atlas'],
        output_names=['out_atlas']),
                                       joinsource="infosource",
                                       joinfield=['in_atlas'],
                                       name="save_file_list_in_atlas")

    # save_file_list = JoinNode(Function(function=save_file_list_function, input_names=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix', 'in_tr', 'in_atlas'],
    #                output_names=['out_brain','out_mask','out_motion_params','out_motion_outliers','out_joint_xformation_matrix','out_tr', 'out_atlas']),
    #                joinsource="infosource",
    #                joinfield=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix','in_tr', 'in_atlas'],
    #                name="save_file_list")

    # def save_file_list_function(in_brain, in_mask, in_motion_params, in_motion_outliers, in_joint_xformation_matrix, in_tr, in_atlas):
    #     # Imports
    #     import numpy as np
    #     import os
    #     from os.path import join as opj
    #
    #
    #     file_list = np.asarray(in_brain)
    #     print('######################## File List ######################: \n',file_list)
    #
    #     np.save('brain_file_list',file_list)
    #     file_name = 'brain_file_list.npy'
    #     out_brain = opj(os.getcwd(),file_name) # path
    #
    #
    #     file_list2 = np.asarray(in_mask)
    #     print('######################## File List ######################: \n',file_list2)
    #
    #     np.save('mask_file_list',file_list2)
    #     file_name2 = 'mask_file_list.npy'
    #     out_mask = opj(os.getcwd(),file_name2) # path
    #
    #
    #     file_list3 = np.asarray(in_motion_params)
    #     print('######################## File List ######################: \n',file_list3)
    #
    #     np.save('motion_params_file_list',file_list3)
    #     file_name3 = 'motion_params_file_list.npy'
    #     out_motion_params = opj(os.getcwd(),file_name3) # path
    #
    #
    #     file_list4 = np.asarray(in_motion_outliers)
    #     print('######################## File List ######################: \n',file_list4)
    #
    #     np.save('motion_outliers_file_list',file_list4)
    #     file_name4 = 'motion_outliers_file_list.npy'
    #     out_motion_outliers = opj(os.getcwd(),file_name4) # path
    #
    #
    #     file_list5 = np.asarray(in_joint_xformation_matrix)
    #     print('######################## File List ######################: \n',file_list5)
    #
    #     np.save('joint_xformation_matrix_file_list',file_list5)
    #     file_name5 = 'joint_xformation_matrix_file_list.npy'
    #     out_joint_xformation_matrix = opj(os.getcwd(),file_name5) # path
    #
    #     tr_list = np.asarray(in_tr)
    #     print('######################## TR List ######################: \n',tr_list)
    #
    #     np.save('tr_list',tr_list)
    #     file_name6 = 'tr_list.npy'
    #     out_tr = opj(os.getcwd(),file_name6) # path
    #
    #
    #     file_list7 = np.asarray(in_atlas)
    #     print('######################## File List ######################: \n',file_list7)
    #
    #     np.save('atlas_file_list',file_list7)
    #     file_name7 = 'atlas_file_list.npy'
    #     out_atlas = opj(os.getcwd(),file_name7) # path
    #
    #
    #
    #
    #     return out_brain, out_mask, out_motion_params, out_motion_outliers, out_joint_xformation_matrix, out_tr , out_atlas
    #
    #
    #
    # save_file_list = JoinNode(Function(function=save_file_list_function, input_names=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix', 'in_tr', 'in_atlas'],
    #                  output_names=['out_brain','out_mask','out_motion_params','out_motion_outliers','out_joint_xformation_matrix','out_tr', 'out_atlas']),
    #                  joinsource="infosource",
    #                  joinfield=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix','in_tr', 'in_atlas'],
    #                  name="save_file_list")

    # ### Motion outliers

    motionOutliers = Node(MotionOutliers(no_motion_correction=False,
                                         metric='fd',
                                         out_metric_plot='fd_plot.png',
                                         out_metric_values='fd_raw.txt'),
                          name='motionOutliers')

    # ## Workflow for atlas registration  from std to functional

    wf_atlas_resize_reg = Workflow(name=atlas_resize_reg_directory)

    wf_atlas_resize_reg.connect([

        # Apply the inverse matrix to the 3mm Atlas to transform it to func space
        (maskfunc4mean, std2func_xform, [(('out_file', 'reference'))]),
        (resample_atlas, std2func_xform, [('out_file', 'in_file')]),

        # Now, applying the inverse matrix
        (inv_mat, std2func_xform, [('out_file', 'in_matrix_file')]
         ),  # output: Atlas in func space
        (std2func_xform, save_file_list_in_atlas, [('out_file', 'in_atlas')]),

        # ---------------------------Save the required files --------------------------------------------
        (save_file_list_in_motion_params, dataSink,
         [('out_motion_params', 'motion_params_paths.@out_motion_params')]),
        (save_file_list_in_motion_outliers, dataSink,
         [('out_motion_outliers', 'motion_outliers_paths.@out_motion_outliers')
          ]),
        (save_file_list_in_brain, dataSink,
         [('out_brain', 'preprocessed_brain_paths.@out_brain')]),
        (save_file_list_in_mask, dataSink,
         [('out_mask', 'preprocessed_mask_paths.@out_mask')]),
        (save_file_list_in_joint_xformation_matrix, dataSink,
         [('out_joint_xformation_matrix',
           'joint_xformation_matrix_paths.@out_joint_xformation_matrix')]),
        (save_file_list_in_tr, dataSink, [('out_tr', 'tr_paths.@out_tr')]),
        (save_file_list_in_atlas, dataSink, [('out_atlas',
                                              'atlas_paths.@out_atlas')])
    ])

    # In[909]:

    wf_coreg_reg = Workflow(name=coreg_reg_directory)
    # wf_coreg_reg.base_dir = base_directory
    # Dir where all the outputs will be stored(inside coregistrationPipeline folder).

    if ANAT == 1:
        wf_coreg_reg.connect(BIDSDataGrabber, 'anat_file_path', skullStrip,
                             'in_file')  # Resampled the anat file to 3mm

        wf_coreg_reg.connect(skullStrip, 'out_file', resample_anat, 'in_file')

        wf_coreg_reg.connect(
            resample_anat, 'out_file', func2anat_reg, 'reference'
        )  # Make the resampled file as reference in func2anat_reg

        # Sec 1. The above 3 steps registers the mean image to resampled anat image and
        # calculates the xformation matrix .. I hope the xformation matrix will be saved

        wf_coreg_reg.connect(MNI152_2mm, 'standard_file', resample_mni,
                             'in_file')

        wf_coreg_reg.connect(resample_mni, 'out_file', anat2std_reg,
                             'reference')

        wf_coreg_reg.connect(resample_anat, 'out_file', anat2std_reg,
                             'in_file')

        # Calculates the Xformationmatrix from anat3mm to MNI 3mm

        # We can get those matrices by refering to func2anat_reg.outputs.out_matrix_file and similarly for anat2std_reg

        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file', concat_xform,
                             'in_file')

        wf_coreg_reg.connect(anat2std_reg, 'out_matrix_file', concat_xform,
                             'in_file2')

        wf_coreg_reg.connect(concat_xform, 'out_file', dataSink,
                             'tranformation_matrix_fun2std.@out_file')

        wf_coreg_reg.connect(concat_xform, 'out_file',
                             save_file_list_in_joint_xformation_matrix,
                             'in_joint_xformation_matrix')

        # Now inverse the func2std MAT to std2func
        wf_coreg_reg.connect(concat_xform, 'out_file', wf_atlas_resize_reg,
                             'inv_mat.in_file')
# ------------------------------------------------------------------------------------------------------------------------------

# Registration of Functional to MNI 3mm space w/o using anatomical
    if ANAT == 0:
        print('Not using Anatomical high resoulution files')
        wf_coreg_reg.connect(MNI152_2mm, 'standard_file', resample_mni,
                             'in_file')
        wf_coreg_reg.connect(
            resample_mni, 'out_file', func2anat_reg, 'reference'
        )  # Make the resampled file as reference in func2anat_reg

        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file', dataSink,
                             'tranformation_matrix_fun2std.@out_file')

        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file',
                             save_file_list_in_joint_xformation_matrix,
                             'in_joint_xformation_matrix')

        # Now inverse the func2std MAT to std2func
        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file',
                             wf_atlas_resize_reg, 'inv_mat.in_file')

    # ## Co-Registration, Normalization and Bandpass Workflow
    # 1. Co-registration means alligning the func to anat
    # 2. Normalization means aligning func/anat to standard
    # 3. Applied band pass filtering in range - highpass=0.008, lowpass=0.08

    # In[910]:

    wf_motion_correction_bet = Workflow(name=motion_correction_bet_directory)
    # wf_motion_correction_bet.base_dir = base_directory

    wf_motion_correction_bet.connect([
        (from_mcflirt, meanfunc, [('in_file', 'in_file')]),
        (meanfunc, meanfuncmask, [('out_file', 'in_file')]),
        (from_mcflirt, applyMask, [('in_file', 'in_file')]),  # 1
        (meanfuncmask, applyMask, [
            ('mask_file', 'in_file2')
        ]),  # 2 output: 1&2,  BET on coregistered fmri scan
        (meanfunc, maskfunc4mean, [('out_file', 'in_file')]),  # 3
        (meanfuncmask, maskfunc4mean,
         [('mask_file', 'in_file2')]),  # 4 output: 3&4, BET on mean func scan
        (applyMask, save_file_list_in_brain, [('out_file', 'in_brain')]),
        (applyMask, save_file_list_in_mask, [('out_file2', 'in_mask')]),
        (maskfunc4mean, wf_coreg_reg, [('out_file', 'func2anat_reg.in_file')])
    ])

    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")

    infosource.iterables = [('subject_id', subject_list)]

    # Create the workflow

    wf = Workflow(name=parent_wf_directory)
    # base_dir = opj(s,'result')
    wf.base_dir = base_directory  # Dir where all the outputs will be stored(inside BETFlow folder).

    # wf.connect([      (infosource, BIDSDataGrabber, [('subject_id','subject_id')]),
    #                   (BIDSDataGrabber, extract, [('func_file_path','in_file')]),
    #
    #                   (BIDSDataGrabber,getMetadata, [('func_file_path','in_file')]),
    #
    #                   (getMetadata,slicetimer, [('tr','time_repetition')]),
    #
    #
    #                   (getMetadata,slicetimer, [('index_dir','index_dir')]),
    #
    #                   (getMetadata,slicetimer, [('interleaved','interleaved')]),
    #
    #                   (getMetadata,save_file_list_in_tr, [('tr','in_tr')]),
    #
    #                   (extract,slicetimer,[('roi_file','in_file')]),
    #
    #                   (slicetimer, mcflirt,[('slice_time_corrected_file','in_file')])
    #                   (mcflirt,dataSink,[('par_file','motion_params.@par_file')]), # saves the motion parameters calculated before
    #
    #                   (mcflirt,save_file_list_in_motion_params,[('par_file','in_motion_params')]),
    #
    #                   (mcflirt,wf_motion_correction_bet,[('out_file','from_mcflirt.in_file')])
    #            ])
    # # Run it in parallel
    # wf.run('MultiProc', plugin_args={'n_procs': num_proc})
    #
    #
    #
    # # Visualize the detailed graph
    # # from IPython.display import Image
    # wf.write_graph(graph2use='flat', format='png', simple_form=True)

    # Options:
    # discard 4 Volumes (extract), slicetimer, mcflirt
    print('Preprocessing Options:')
    print('Skipping 4 dummy volumes - ', options_binary_string[0])
    print('Slicetiming correction - ', options_binary_string[1])
    print('Finding Motion Outliers - ', options_binary_string[2])
    print('Doing Motion Correction - ', options_binary_string[3])

    # ANAT = 0
    nodes = [extract, slicetimer, motionOutliers, mcflirt]
    wf.connect(infosource, 'subject_id', BIDSDataGrabber, 'subject_id')
    wf.connect(BIDSDataGrabber, 'func_file_path', getMetadata, 'in_file')
    wf.connect(getMetadata, 'tr', save_file_list_in_tr, 'in_tr')

    old_node = BIDSDataGrabber
    old_node_output = 'func_file_path'

    for idx, include in enumerate(options_binary_string):

        if old_node == extract:
            old_node_output = 'roi_file'
        elif old_node == slicetimer:
            old_node_output = 'slice_time_corrected_file'
        # elif old_node == mcflirt:

        # old_node_output = 'out_file'

        if int(include):
            new_node = nodes[idx]

            if new_node == slicetimer:
                wf.connect(getMetadata, 'tr', slicetimer, 'time_repetition')
                wf.connect(getMetadata, 'index_dir', slicetimer, 'index_dir')
                wf.connect(getMetadata, 'interleaved', slicetimer,
                           'interleaved')
                new_node_input = 'in_file'
            elif new_node == extract:
                new_node_input = 'in_file'
            elif new_node == mcflirt:
                new_node_input = 'in_file'
                wf.connect(mcflirt, 'par_file', dataSink,
                           'motion_params.@par_file'
                           )  # saves the motion parameters calculated before

                wf.connect(mcflirt, 'par_file',
                           save_file_list_in_motion_params, 'in_motion_params')

                wf.connect(mcflirt, 'out_file', wf_motion_correction_bet,
                           'from_mcflirt.in_file')

            elif new_node == motionOutliers:

                wf.connect(meanfuncmask, 'mask_file', motionOutliers, 'mask')

                wf.connect(motionOutliers, 'out_file', dataSink,
                           'motionOutliers.@out_file')

                wf.connect(motionOutliers, 'out_metric_plot', dataSink,
                           'motionOutliers.@out_metric_plot')

                wf.connect(motionOutliers, 'out_metric_values', dataSink,
                           'motionOutliers.@out_metric_values')

                wf.connect(motionOutliers, 'out_file',
                           save_file_list_in_motion_outliers,
                           'in_motion_outliers')

                new_node_input = 'in_file'

                wf.connect(old_node, old_node_output, new_node, new_node_input)

                continue

            wf.connect(old_node, old_node_output, new_node, new_node_input)

            old_node = new_node

        else:
            if idx == 3:
                # new_node = from_mcflirt
                # new_node_input = 'from_mcflirt.in_file'

                wf.connect(old_node, old_node_output, wf_motion_correction_bet,
                           'from_mcflirt.in_file')

                # old_node = new_node

    TEMP_DIR_FOR_STORAGE = opj(base_directory, 'crash_files')
    wf.config = {"execution": {"crashdump_dir": TEMP_DIR_FOR_STORAGE}}

    # Visualize the detailed graph
    # from IPython.display import Image

    wf.write_graph(graph2use='flat', format='png', simple_form=True)

    # Run it in parallel
    wf.run('MultiProc', plugin_args={'n_procs': num_proc})
Exemplo n.º 28
0
    def run(self):
        matlab_cmd = self.paths['spm_path'] + ' ' + self.paths[
            'mcr_path'] + '/ script'
        spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
        print(matlab_cmd)
        print('SPM version: ' + str(spm.SPMCommand().version))

        experiment_dir = opj(self.paths['input_path'], 'output/')
        output_dir = 'datasink'
        working_dir = 'workingdir'

        subject_list = self.subject_list

        # list of subject identifiers
        fwhm = self.parameters[
            'fwhm']  # Smoothing widths to apply (Gaussian kernel size)
        tr = self.parameters['tr']  # Repetition time
        init_volume = self.parameters[
            'init_volume']  # Firts volumen identification which will use in the pipeline
        iso_size = self.parameters[
            'iso_size']  # Isometric resample of functional images to voxel size (in mm)
        low_pass = self.parameters['low_pass']
        high_pass = self.parameters['high_pass']
        t1_relative_path = self.paths['t1_relative_path']
        fmri_relative_path = self.paths['fmri_relative_path']

        # ExtractROI - skip dummy scans
        extract = Node(ExtractROI(t_min=init_volume,
                                  t_size=-1,
                                  output_type='NIFTI'),
                       name="extract")  #FSL

        # MCFLIRT - motion correction
        mcflirt = Node(MCFLIRT(mean_vol=True,
                               save_plots=True,
                               output_type='NIFTI'),
                       name="motion_correction")  #FSL

        # SliceTimer - correct for slice wise acquisition
        slicetimer = Node(SliceTimer(index_dir=False,
                                     interleaved=True,
                                     output_type='NIFTI',
                                     time_repetition=tr),
                          name="slice_timing_correction")  #FSL

        # Smooth - image smoothing

        denoise = Node(Denoise(), name="denoising")  #Interfaces with dipy

        smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")  #SPM

        n4bias = Node(N4Bias(out_file='t1_n4bias.nii.gz'),
                      name='n4bias')  #Interface with SimpleITK

        descomposition = Node(Descomposition(n_components=20,
                                             low_pass=0.1,
                                             high_pass=0.01,
                                             tr=tr),
                              name='descomposition')  #Interface with nilearn

        # Artifact Detection - determines outliers in functional images
        art = Node(ArtifactDetect(norm_threshold=2,
                                  zintensity_threshold=3,
                                  mask_type='spm_global',
                                  parameter_source='FSL',
                                  use_differences=[True, False],
                                  plot_type='svg'),
                   name="artifact_detection")  #Rapidart

        extract_confounds_ws_csf = Node(
            ExtractConfounds(out_file='ev_without_gs.csv'),
            name='extract_confounds_ws_csf')  #Interfece

        extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                                     delimiter=','),
                                    name='extract_confounds_global_signal')

        signal_extraction = Node(SignalExtraction(
            time_series_out_file='time_series.csv',
            correlation_matrix_out_file='correlation_matrix.png',
            labels_parcellation_path=self.paths['labels_parcellation_path'],
            mask_mni_path=self.paths['mask_mni_path'],
            tr=tr,
            low_pass=low_pass,
            high_pass=high_pass,
            plot=False),
                                 name='signal_extraction')
        signal_extraction.iterables = [('image_parcellation_path',
                                        self.paths['image_parcellation_path'])]

        art_remotion = Node(
            ArtifacRemotion(out_file='fmri_art_removed.nii'),
            name='artifact_remotion')  #This interface requires implementation

        # BET - Skullstrip anatomical anf funtional images
        bet_t1 = Node(BET(frac=0.5,
                          robust=True,
                          mask=True,
                          output_type='NIFTI_GZ'),
                      name="bet_t1")  #FSL

        # FAST - Image Segmentation
        segmentation = Node(FAST(output_type='NIFTI'),
                            name="segmentation")  #FSL

        # Normalize - normalizes functional and structural images to the MNI template
        normalize_fmri = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                              name="normalize_fmri")  #SPM

        gunzip = Node(Gunzip(), name="gunzip")

        normalize_t1 = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                            name="normalize_t1")

        normalize_masks = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                               name="normalize_masks")

        # Threshold - Threshold WM probability image
        threshold = Node(Threshold(thresh=0.5,
                                   args='-bin',
                                   output_type='NIFTI_GZ'),
                         name="wm_mask_threshold")

        # FLIRT - pre-alignment of functional images to anatomical images
        coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'),
                         name="linear_warp_estimation")

        # FLIRT - coregistration of functional images to anatomical images with BBR
        coreg_bbr = Node(FLIRT(dof=6,
                               cost='bbr',
                               schedule=opj(os.getenv('FSLDIR'),
                                            'etc/flirtsch/bbr.sch'),
                               output_type='NIFTI_GZ'),
                         name="nonlinear_warp_estimation")

        # Apply coregistration warp to functional images
        applywarp = Node(FLIRT(interp='spline',
                               apply_isoxfm=iso_size,
                               output_type='NIFTI'),
                         name="registration_fmri")

        # Apply coregistration warp to mean file
        applywarp_mean = Node(FLIRT(interp='spline',
                                    apply_isoxfm=iso_size,
                                    output_type='NIFTI_GZ'),
                              name="registration_mean_fmri")

        # Infosource - a function free node to iterate over the list of subject names
        infosource = Node(IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = [('subject_id', subject_list)]

        # SelectFiles - to grab the data (alternativ to DataGrabber)
        anat_file = opj('{subject_id}', t1_relative_path)
        func_file = opj('{subject_id}', fmri_relative_path)

        #anat_file = opj('{subject_id}/anat/', 'data.nii')
        #func_file = opj('{subject_id}/func/', 'data.nii')

        templates = {'anat': anat_file, 'func': func_file}

        selectfiles = Node(SelectFiles(
            templates, base_directory=self.paths['input_path']),
                           name="selectfiles")

        # Datasink - creates output folder for important outputs
        datasink = Node(DataSink(base_directory=experiment_dir,
                                 container=output_dir),
                        name="datasink")

        # Create a coregistration workflow
        coregwf = Workflow(name='coreg_fmri_to_t1')
        coregwf.base_dir = opj(experiment_dir, working_dir)

        # Create a preprocessing workflow
        preproc = Workflow(name='preproc')
        preproc.base_dir = opj(experiment_dir, working_dir)

        # Connect all components of the coregistration workflow

        coregwf.connect([
            (bet_t1, n4bias, [('out_file', 'in_file')]),
            (n4bias, segmentation, [('out_file', 'in_files')]),
            (segmentation, threshold, [(('partial_volume_files', get_latest),
                                        'in_file')]),
            (n4bias, coreg_pre, [('out_file', 'reference')]),
            (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
            (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
            (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
            (n4bias, applywarp, [('out_file', 'reference')]),
            (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')
                                         ]),
            (n4bias, applywarp_mean, [('out_file', 'reference')]),
        ])

        ## Use the following DataSink output substitutions
        substitutions = [('_subject_id_', 'sub-')]
        #                 ('_fwhm_', 'fwhm-'),
        #                 ('_roi', ''),
        #                 ('_mcf', ''),
        #                 ('_st', ''),
        #                 ('_flirt', ''),
        #                 ('.nii_mean_reg', '_mean'),
        #                 ('.nii.par', '.par'),
        #                 ]
        # subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]

        # substitutions.extend(subjFolders)
        datasink.inputs.substitutions = substitutions

        # Connect all components of the preprocessing workflow
        preproc.connect([
            (infosource, selectfiles, [('subject_id', 'subject_id')]),
            (selectfiles, extract, [('func', 'in_file')]),
            (extract, mcflirt, [('roi_file', 'in_file')]),
            (mcflirt, slicetimer, [('out_file', 'in_file')]),
            (selectfiles, denoise, [('anat', 'in_file')]),
            (denoise, coregwf, [('out_file', 'bet_t1.in_file'),
                                ('out_file',
                                 'nonlinear_warp_estimation.reference')]),
            (mcflirt, coregwf,
             [('mean_img', 'linear_warp_estimation.in_file'),
              ('mean_img', 'nonlinear_warp_estimation.in_file'),
              ('mean_img', 'registration_mean_fmri.in_file')]),
            (slicetimer, coregwf, [('slice_time_corrected_file',
                                    'registration_fmri.in_file')]),
            (coregwf, art, [('registration_fmri.out_file', 'realigned_files')
                            ]),
            (mcflirt, art, [('par_file', 'realignment_parameters')]),
            (art, art_remotion, [('outlier_files', 'outlier_files')]),
            (coregwf, art_remotion, [('registration_fmri.out_file', 'in_file')
                                     ]),
            (coregwf, gunzip, [('n4bias.out_file', 'in_file')]),
            (selectfiles, normalize_fmri, [('anat', 'image_to_align')]),
            (art_remotion, normalize_fmri, [('out_file', 'apply_to_files')]),
            (selectfiles, normalize_t1, [('anat', 'image_to_align')]),
            (gunzip, normalize_t1, [('out_file', 'apply_to_files')]),
            (selectfiles, normalize_masks, [('anat', 'image_to_align')]),
            (coregwf, normalize_masks, [(('segmentation.partial_volume_files',
                                          get_wm_csf), 'apply_to_files')]),
            (normalize_fmri, smooth, [('normalized_files', 'in_files')]),
            (smooth, extract_confounds_ws_csf, [('smoothed_files', 'in_file')
                                                ]),
            (normalize_masks, extract_confounds_ws_csf, [('normalized_files',
                                                          'list_mask')]),
            (mcflirt, extract_confounds_ws_csf, [('par_file', 'file_concat')]),
            (art, extract_confounds_ws_csf, [('outlier_files', 'outlier_files')
                                             ]),

            # (smooth, extract_confounds_gs, [('smoothed_files', 'in_file')]),
            # (normalize_t1, extract_confounds_gs, [(('normalized_files',change_to_list), 'list_mask')]),
            # (extract_confounds_ws_csf, extract_confounds_gs, [('out_file', 'file_concat')]),
            (smooth, signal_extraction, [('smoothed_files', 'in_file')]),
            # (extract_confounds_gs, signal_extraction, [('out_file', 'confounds_file')]),
            (extract_confounds_ws_csf, signal_extraction,
             [('out_file', 'confounds_file')]),

            #(smooth, descomposition, [('smoothed_files', 'in_file')]),
            #(extract_confounds_ws_csf, descomposition, [('out_file', 'confounds_file')]),

            # (extract_confounds_gs, datasink, [('out_file', 'preprocessing.@confounds_with_gs')]),
            (denoise, datasink, [('out_file', 'preprocessing.@t1_denoised')]),
            (extract_confounds_ws_csf, datasink,
             [('out_file', 'preprocessing.@confounds_without_gs')]),
            (smooth, datasink, [('smoothed_files', 'preprocessing.@smoothed')
                                ]),
            (normalize_fmri, datasink, [('normalized_files',
                                         'preprocessing.@fmri_normalized')]),
            (normalize_t1, datasink, [('normalized_files',
                                       'preprocessing.@t1_normalized')]),
            (normalize_masks, datasink, [('normalized_files',
                                          'preprocessing.@masks_normalized')]),
            (signal_extraction, datasink, [('time_series_out_file',
                                            'preprocessing.@time_serie')]),
            (signal_extraction, datasink,
             [('correlation_matrix_out_file',
               'preprocessing.@correlation_matrix')])
        ])
        #(signal_extraction, datasink,
        # [('fmri_cleaned_out_file', 'preprocessing.@fmri_cleaned_out_file')])])
        #,
        #(descomposition, datasink, [('out_file', 'preprocessing.@descomposition')]),
        #(descomposition, datasink, [('plot_files', 'preprocessing.@descomposition_plot_files')])
        #])

        preproc.write_graph(graph2use='colored',
                            format='png',
                            simple_form=True)
        preproc.run()