def create_workflow(subject_id, outdir, file_url):
    """Create a workflow for a single participant"""

    sink_directory = os.path.join(outdir, subject_id)

    wf = Workflow(name=subject_id)

    getter = Node(Function(input_names=['url'],
                           output_names=['localfile'],
                           function=download_file),
                  name="download_url")
    getter.inputs.url = file_url

    orienter = Node(Reorient2Std(), name='reorient_brain')
    wf.connect(getter, 'localfile', orienter, 'in_file')

    better = Node(BET(), name='extract_brain')
    wf.connect(orienter, 'out_file', better, 'in_file')

    faster = Node(FAST(), name='segment_brain')
    wf.connect(better, 'out_file', faster, 'in_files')

    firster = Node(FIRST(), name='parcellate_brain')
    structures = [
        'L_Hipp', 'R_Hipp', 'L_Accu', 'R_Accu', 'L_Amyg', 'R_Amyg', 'L_Caud',
        'R_Caud', 'L_Pall', 'R_Pall', 'L_Puta', 'R_Puta', 'L_Thal', 'R_Thal'
    ]
    firster.inputs.list_of_specific_structures = structures
    wf.connect(orienter, 'out_file', firster, 'in_file')

    fslstatser = MapNode(ImageStats(),
                         iterfield=['op_string'],
                         name="compute_segment_stats")
    fslstatser.inputs.op_string = [
        '-l {thr1} -u {thr2} -v'.format(thr1=val + 0.5, thr2=val + 1.5)
        for val in range(3)
    ]
    wf.connect(faster, 'partial_volume_map', fslstatser, 'in_file')

    jsonfiler = Node(Function(
        input_names=['stats', 'seg_file', 'structure_map', 'struct_file'],
        output_names=['out_file'],
        function=toJSON),
                     name='save_json')
    structure_map = [('Background', 0), ('Left-Thalamus-Proper', 10),
                     ('Left-Caudate', 11), ('Left-Putamen', 12),
                     ('Left-Pallidum', 13), ('Left-Hippocampus', 17),
                     ('Left-Amygdala', 18), ('Left-Accumbens-area', 26),
                     ('Right-Thalamus-Proper', 49), ('Right-Caudate', 50),
                     ('Right-Putamen', 51), ('Right-Pallidum', 52),
                     ('Right-Hippocampus', 53), ('Right-Amygdala', 54),
                     ('Right-Accumbens-area', 58)]
    jsonfiler.inputs.structure_map = structure_map
    wf.connect(fslstatser, 'out_stat', jsonfiler, 'stats')
    wf.connect(firster, 'segmentation_file', jsonfiler, 'seg_file')

    sinker = Node(DataSink(), name='store_results')
    sinker.inputs.base_directory = sink_directory
    wf.connect(better, 'out_file', sinker, 'brain')
    wf.connect(faster, 'bias_field', sinker, 'segs.@bias_field')
    wf.connect(faster, 'partial_volume_files', sinker, 'segs.@partial_files')
    wf.connect(faster, 'partial_volume_map', sinker, 'segs.@partial_map')
    wf.connect(faster, 'probability_maps', sinker, 'segs.@prob_maps')
    wf.connect(faster, 'restored_image', sinker, 'segs.@restored')
    wf.connect(faster, 'tissue_class_files', sinker, 'segs.@tissue_files')
    wf.connect(faster, 'tissue_class_map', sinker, 'segs.@tissue_map')
    wf.connect(firster, 'bvars', sinker, 'parcels.@bvars')
    wf.connect(firster, 'original_segmentations', sinker, 'parcels.@origsegs')
    wf.connect(firster, 'segmentation_file', sinker, 'parcels.@segfile')
    wf.connect(firster, 'vtk_surfaces', sinker, 'parcels.@vtk')
    wf.connect(jsonfiler, 'out_file', sinker, '@stats')

    return wf
Exemple #2
0
def preproc_workflow(input_dir,
                     output_dir,
                     subject_list,
                     ses_list,
                     anat_file,
                     func_file,
                     scan_size=477,
                     bet_frac=0.37):
    """
    The preprocessing workflow used in the preparation of the psilocybin vs escitalopram rsFMRI scans.
    Workflows and notes are defined throughout. Inputs are designed to be general and masks/default MNI space is provided

    :param input_dir: The input file directory containing all scans in BIDS format
    :param output_dir: The output file directory
    :param subject_list: a list of subject numbers
    :param ses_list: a list of scan numbers (session numbers)
    :param anat_file: The format of the anatomical scan within the input directory
    :param func_file: The format of the functional scan within the input directory
    :param scan_size: The length of the scan by number of images, most 10 minutes scans are around 400-500 depending
    upon scanner defaults and parameters - confirm by looking at your data
    :param bet_frac: brain extraction fractional intensity threshold
    :return: the preprocessing workflow
    """
    preproc = Workflow(name='preproc')
    preproc.base_dir = output_dir

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'ses']),
                      name="infosource")

    infosource.iterables = [('subject_id', subject_list), ('ses', ses_list)]

    # SelectFiles - to grab the data (alternative to DataGrabber)
    templates = {
        'anat': anat_file,
        'func': func_file
    }  # define the template of each file input

    selectfiles = Node(SelectFiles(templates, base_directory=input_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=output_dir, container=output_dir),
                    name="datasink")

    preproc.connect([(infosource, selectfiles, [('subject_id', 'subject_id'),
                                                ('ses', 'ses')])])
    ''' 
    This is your functional processing workflow, used to trim scans, despike the signal, slice-time correct, 
    and motion correct your data 
    '''

    fproc = Workflow(name='fproc')  # the functional processing workflow

    # ExtractROI - skip dummy scans at the beginning of the recording by removing the first three
    trim = Node(ExtractROI(t_min=3, t_size=scan_size, output_type='NIFTI_GZ'),
                name="trim")

    # 3dDespike - despike
    despike = Node(Despike(outputtype='NIFTI_GZ', args='-NEW'), name="despike")
    fproc.connect([(trim, despike, [('roi_file', 'in_file')])])
    preproc.connect([(selectfiles, fproc, [('func', 'trim.in_file')])])

    # 3dTshift - slice time correction
    slicetime = Node(TShift(outputtype='NIFTI_GZ', tpattern='alt+z2'),
                     name="slicetime")
    fproc.connect([(despike, slicetime, [('out_file', 'in_file')])])

    # 3dVolreg - correct motion and output 1d matrix
    moco = Node(Volreg(outputtype='NIFTI_GZ',
                       interp='Fourier',
                       zpad=4,
                       args='-twopass'),
                name="moco")
    fproc.connect([(slicetime, moco, [('out_file', 'in_file')])])

    moco_bpfdt = Node(
        MOCObpfdt(), name='moco_bpfdt'
    )  # use the matlab function to correct the motion regressor
    fproc.connect([(moco, moco_bpfdt, [('oned_file', 'in_file')])])
    '''
    This is the co-registration workflow using FSL and ANTs
    '''

    coreg = Workflow(name='coreg')

    # BET - structural data brain extraction
    bet_anat = Node(BET(output_type='NIFTI_GZ', frac=bet_frac, robust=True),
                    name="bet_anat")

    # FSL segmentation process to get WM map
    seg = Node(FAST(bias_iters=6,
                    img_type=1,
                    output_biascorrected=True,
                    output_type='NIFTI_GZ'),
               name="seg")
    coreg.connect([(bet_anat, seg, [('out_file', 'in_files')])])

    # functional to structural registration
    mean = Node(MCFLIRT(mean_vol=True, output_type='NIFTI_GZ'), name="mean")

    # BBR using linear methods for initial transform fit
    func2struc = Node(FLIRT(cost='bbr', dof=6, output_type='NIFTI_GZ'),
                      name='func2struc')
    coreg.connect([(seg, func2struc, [('restored_image', 'reference')])])
    coreg.connect([(mean, func2struc, [('mean_img', 'in_file')])])
    coreg.connect([(seg, func2struc, [(('tissue_class_files', pickindex, 2),
                                       'wm_seg')])])

    # convert the FSL linear transform into a C3d format for AFNI
    f2s_c3d = Node(C3dAffineTool(itk_transform=True, fsl2ras=True),
                   name='f2s_c3d')
    coreg.connect([(func2struc, f2s_c3d, [('out_matrix_file', 'transform_file')
                                          ])])
    coreg.connect([(mean, f2s_c3d, [('mean_img', 'source_file')])])
    coreg.connect([(seg, f2s_c3d, [('restored_image', 'reference_file')])])

    # Functional to structural registration via ANTs non-linear registration
    reg = Node(Registration(
        fixed_image='default_images/MNI152_T1_2mm_brain.nii.gz',
        transforms=['Affine', 'SyN'],
        transform_parameters=[(0.1, ), (0.1, 3.0, 0.0)],
        number_of_iterations=[[1500, 1000, 1000], [100, 70, 50, 20]],
        dimension=3,
        write_composite_transform=True,
        collapse_output_transforms=True,
        metric=['MI'] + ['CC'],
        metric_weight=[1] * 2,
        radius_or_number_of_bins=[32] + [4],
        convergence_threshold=[1.e-8, 1.e-9],
        convergence_window_size=[20] + [10],
        smoothing_sigmas=[[2, 1, 0], [4, 2, 1, 0]],
        sigma_units=['vox'] * 2,
        shrink_factors=[[4, 2, 1], [6, 4, 2, 1]],
        use_histogram_matching=[False] + [True],
        use_estimate_learning_rate_once=[True, True],
        output_warped_image=True),
               name='reg')

    coreg.connect([(seg, reg, [('restored_image', 'moving_image')])
                   ])  # connect segmentation node to registration node

    merge1 = Node(niu.Merge(2), iterfield=['in2'],
                  name='merge1')  # merge the linear and nonlinear transforms
    coreg.connect([(f2s_c3d, merge1, [('itk_transform', 'in2')])])
    coreg.connect([(reg, merge1, [('composite_transform', 'in1')])])

    # warp the functional images into MNI space using the transforms from FLIRT and SYN
    warp = Node(ApplyTransforms(
        reference_image='default_images/MNI152_T1_2mm_brain.nii.gz',
        input_image_type=3),
                name='warp')
    coreg.connect([(moco, warp, [('out_file', 'input_image')])])
    coreg.connect([(merge1, warp, [('out', 'transforms')])])

    preproc.connect([(selectfiles, coreg, [('anat', 'bet_anat.in_file')])])
    preproc.connect([(fproc, coreg, [('moco.out_file', 'mean.in_file')])])
    '''
    Scrubbing workflow - find the motion outliers, bandpass filter, re-mean the data after bpf
    '''

    scrub = Workflow(name='scrub')

    # Generate the Scrubbing Regressor
    scrub_metrics = Node(MotionOutliers(dummy=4,
                                        out_file='FD_outliers.1D',
                                        metric='fd',
                                        threshold=0.4),
                         name="scrub_metrics")

    # regress out timepoints
    scrub_frames = Node(Bandpass(highpass=0,
                                 lowpass=99999,
                                 outputtype='NIFTI_GZ'),
                        name='scrub_frames')
    scrub.connect([(scrub_metrics, scrub_frames, [('out_file',
                                                   'orthogonalize_file')])])
    preproc.connect([(coreg, scrub, [('warp.output_image',
                                      'scrub_frames.in_file')])])
    preproc.connect([(selectfiles, scrub, [('func', 'scrub_metrics.in_file')])
                     ])

    # mean image for remeaning after bandpass
    premean = Node(TStat(args='-mean', outputtype='NIFTI_GZ'), name='premean')
    # remean the image
    remean2 = Node(Calc(expr='a+b', outputtype='NIFTI_GZ'), name='remean2')
    scrub.connect([(scrub_frames, remean2, [('out_file', 'in_file_a')])])
    scrub.connect([(premean, remean2, [('out_file', 'in_file_b')])])
    preproc.connect([(coreg, scrub, [('warp.output_image', 'premean.in_file')])
                     ])
    '''
    Regressors for final cleaning steps
    '''

    regressors = Workflow(name='regressors')

    # Using registered structural image to create the masks for both WM and CSF
    regbet = Node(BET(robust=True, frac=0.37, output_type='NIFTI_GZ'),
                  name='regbet')

    regseg = Node(FAST(img_type=1,
                       output_type='NIFTI_GZ',
                       no_pve=True,
                       no_bias=True,
                       segments=True),
                  name='regseg')
    regressors.connect([(regbet, regseg, [('out_file', 'in_files')])])
    preproc.connect([(coreg, regressors, [('reg.warped_image',
                                           'regbet.in_file')])])
    '''
    Create a cerebrospinal fluid (CSF) regressor 
    '''

    # subtract subcortical GM from the CSF mask
    subcortgm = Node(BinaryMaths(
        operation='sub',
        operand_file='default_images/subcortical_gm_mask_bin.nii.gz',
        output_type='NIFTI_GZ',
        args='-bin'),
                     name='subcortgm')
    regressors.connect([(regseg, subcortgm, [(('tissue_class_files', pickindex,
                                               0), 'in_file')])])

    # Fill the mask holes

    fillcsf = Node(MaskTool(fill_holes=True, outputtype='NIFTI_GZ'),
                   name='fillcsf')
    regressors.connect([(subcortgm, fillcsf, [('out_file', 'in_file')])])

    # Erode the mask

    erocsf = Node(MaskTool(outputtype='NIFTI_GZ', dilate_inputs='-1'),
                  name='erocsf')
    regressors.connect([(fillcsf, erocsf, [('out_file', 'in_file')])])

    # Take mean csf signal from functional image
    meancsf = Node(ImageMeants(output_type='NIFTI_GZ'), name='meancsf')
    regressors.connect([(erocsf, meancsf, [('out_file', 'mask')])])
    preproc.connect([(coreg, regressors, [('warp.output_image',
                                           'meancsf.in_file')])])

    bpf_dt_csf = Node(CSFbpfdt(), name='bpf_dt_csf')
    regressors.connect([(meancsf, bpf_dt_csf, [('out_file', 'in_file')])])
    '''
    Creates a local white matter regressor
    '''

    # subtract subcortical gm
    subcortgm2 = Node(BinaryMaths(
        operation='sub',
        operand_file='default_images/subcortical_gm_mask_bin.nii.gz',
        output_type='NIFTI_GZ',
        args='-bin'),
                      name='subcortgm2')
    regressors.connect([(regseg, subcortgm2, [(('tissue_class_files',
                                                pickindex, 2), 'in_file')])])

    # fill mask
    fillwm = Node(MaskTool(fill_holes=True, outputtype='NIFTI_GZ'),
                  name='fillwm')
    regressors.connect([(subcortgm2, fillwm, [('out_file', 'in_file')])])

    # erod mask
    erowm = Node(MaskTool(outputtype='NIFTI_GZ', dilate_inputs='-1'),
                 name='erowm')
    regressors.connect([(fillwm, erowm, [('out_file', 'in_file')])])

    # generate local wm
    localwm = Node(Localstat(neighborhood=('SPHERE', 25),
                             stat='mean',
                             nonmask=True,
                             outputtype='NIFTI_GZ'),
                   name='localwm')
    regressors.connect([(erowm, localwm, [('out_file', 'mask_file')])])
    preproc.connect([(coreg, regressors, [('warp.output_image',
                                           'localwm.in_file')])])

    # bandpass filter the local wm regressor
    localwm_bpf = Node(Fourier(highpass=0.01,
                               lowpass=0.08,
                               args='-retrend',
                               outputtype='NIFTI_GZ'),
                       name='loacwm_bpf')
    regressors.connect([(localwm, localwm_bpf, [('out_file', 'in_file')])])

    # detrend the local wm regressor

    localwm_bpf_dt = Node(Detrend(args='-polort 2', outputtype='NIFTI_GZ'),
                          name='localwm_bpf_dt')
    regressors.connect([(localwm_bpf, localwm_bpf_dt, [('out_file', 'in_file')
                                                       ])])
    '''
    Clean up your functional image with the regressors you have created above
    '''

    # create a mask for blurring filtering, and detrending

    clean = Workflow(name='clean')

    mask = Node(BET(mask=True, functional=True), name='mask')

    mean_mask = Node(MCFLIRT(mean_vol=True, output_type='NIFTI_GZ'),
                     name="mean_mask")

    dilf = Node(DilateImage(operation='max', output_type='NIFTI_GZ'),
                name='dilf')
    clean.connect([(mask, dilf, [('mask_file', 'in_file')])])
    preproc.connect([(scrub, clean, [('remean2.out_file', 'mask.in_file')])])

    fill = Node(MaskTool(in_file='default_images/MNI152_T1_2mm_brain.nii.gz',
                         fill_holes=True,
                         outputtype='NIFTI_GZ'),
                name='fill')

    axb = Node(Calc(expr='a*b', outputtype='NIFTI_GZ'), name='axb')
    clean.connect([(dilf, axb, [('out_file', 'in_file_a')])])
    clean.connect([(fill, axb, [('out_file', 'in_file_b')])])

    bxc = Node(Calc(expr='ispositive(a)*b', outputtype='NIFTI_GZ'), name='bxc')
    clean.connect([(mean_mask, bxc, [('mean_img', 'in_file_a')])])
    clean.connect([(axb, bxc, [('out_file', 'in_file_b')])])
    preproc.connect([(scrub, clean, [('remean2.out_file', 'mean_mask.in_file')
                                     ])])

    #### BLUR, FOURIER BPF, and DETREND

    blurinmask = Node(BlurInMask(fwhm=6, outputtype='NIFTI_GZ'),
                      name='blurinmask')
    clean.connect([(bxc, blurinmask, [('out_file', 'mask')])])
    preproc.connect([(scrub, clean, [('remean2.out_file', 'blurinmask.in_file')
                                     ])])

    fourier = Node(Fourier(highpass=0.01,
                           lowpass=0.08,
                           retrend=True,
                           outputtype='NIFTI_GZ'),
                   name='fourier')
    clean.connect([(blurinmask, fourier, [('out_file', 'in_file')])])

    tstat = Node(TStat(args='-mean', outputtype='NIFTI_GZ'), name='tstat')
    clean.connect([(fourier, tstat, [('out_file', 'in_file')])])

    detrend = Node(Detrend(args='-polort 2', outputtype='NIFTI_GZ'),
                   name='detrend')
    clean.connect([(fourier, detrend, [('out_file', 'in_file')])])

    remean = Node(Calc(expr='a+b', outputtype='NIFTI_GZ'), name='remean')
    clean.connect([(detrend, remean, [('out_file', 'in_file_a')])])
    clean.connect([(tstat, remean, [('out_file', 'in_file_b')])])

    concat = Node(ConcatModel(), name='concat')

    # Removes nuisance regressors via regression function
    clean_rs = Node(Bandpass(highpass=0, lowpass=99999, outputtype='NIFTI_GZ'),
                    name='clean_rs')

    clean.connect([(concat, clean_rs, [('out_file', 'orthogonalize_file')])])

    remean1 = Node(Calc(expr='a+b', outputtype='NIFTI_GZ'), name='remean1')
    clean.connect([(clean_rs, remean1, [('out_file', 'in_file_a')])])
    clean.connect([(tstat, remean1, [('out_file', 'in_file_b')])])

    preproc.connect([(regressors, clean, [('bpf_dt_csf.out_file',
                                           'concat.in_file_a')])])
    preproc.connect([(fproc, clean, [('moco_bpfdt.out_file',
                                      'concat.in_file_b')])])

    preproc.connect([(regressors, clean, [('localwm_bpf_dt.out_file',
                                           'clean_rs.orthogonalize_dset')])])
    clean.connect([(remean, clean_rs, [('out_file', 'in_file')])])
    '''
    Write graphical output detailing the workflows and nodes 
    '''

    fproc.write_graph(graph2use='flat',
                      format='png',
                      simple_form=True,
                      dotfilename='./fproc.dot')
    fproc.write_graph(graph2use='colored',
                      format='png',
                      simple_form=True,
                      dotfilename='./fproc_color.dot')

    coreg.write_graph(graph2use='flat',
                      format='png',
                      simple_form=True,
                      dotfilename='./coreg.dot')
    coreg.write_graph(graph2use='colored',
                      format='png',
                      simple_form=True,
                      dotfilename='./coreg_color.dot')

    scrub.write_graph(graph2use='flat',
                      format='png',
                      simple_form=True,
                      dotfilename='./scrub.dot')
    scrub.write_graph(graph2use='colored',
                      format='png',
                      simple_form=True,
                      dotfilename='./scrub_color.dot')

    regressors.write_graph(graph2use='flat',
                           format='png',
                           simple_form=True,
                           dotfilename='./reg.dot')
    regressors.write_graph(graph2use='colored',
                           format='png',
                           simple_form=True,
                           dotfilename='./reg_color.dot')

    preproc.write_graph(graph2use='flat',
                        format='png',
                        simple_form=True,
                        dotfilename='./preproc.dot')
    preproc.write_graph(graph2use='colored',
                        format='png',
                        simple_form=True,
                        dotfilename='./preproc_color.dot')

    return preproc
Exemple #3
0
    tr=TR,
    plot=True),
                         name='signal_extraction')

art_remotion = Node(ArtifacRemotion(out_file='fmri_art_removed.nii'),
                    name='artifact_remotion')

# BET - Skullstrip anatomical anf funtional images
bet_t1 = Node(BET(frac=0.55, robust=True, mask=True, output_type='NIFTI_GZ'),
              name="bet_t1")

bet_fmri = Node(BET(frac=0.6, functional=True, output_type='NIFTI_GZ'),
                name="bet_fmri")

# FAST - Image Segmentation
segmentation = Node(FAST(output_type='NIFTI'), name="segmentation")

# Normalize - normalizes functional and structural images to the MNI template
normalize_fmri = Node(Normalize12(jobtype='estwrite',
                                  tpm=template,
                                  write_voxel_sizes=[2, 2, 2],
                                  write_bounding_box=[[-90, -126, -72],
                                                      [90, 90, 108]]),
                      name="normalize_fmri")

gunzip = Node(Gunzip(), name="gunzip")

normalize_t1 = Node(Normalize12(
    jobtype='estwrite',
    tpm=template,
    write_voxel_sizes=[iso_size, iso_size, iso_size],
Exemple #4
0
for kx in range(25):
    subject_list=[subject_list0[kx]]      
    
    #------------------------------------6.1 Nodos de seleccion de sujetos-------------------
    
    infosource = Node(IdentityInterface(fields=['asubject_id', 'session_num']), name="infosource")
    infosource.iterables = [('asubject_id', subject_list), ('session_num', session)]        
    selectfiles = Node(SelectFiles(templates, base_directory=Subjects_dir), name="selectfiles")
    
    #------------------------------------6.2 Nodos de Corregistro--------------------------------
    
    bet_anat = Node(BET(frac=0.5, robust=True, mask=True, output_type='NIFTI_GZ'),
                    name="bet_anat")
    
    segmentation = Node(FAST(output_type='NIFTI_GZ'),
                        name="segmentation")
    
    threshold = Node(Threshold(thresh=0.5, args='-bin', output_type='NIFTI_GZ'),
                     name="threshold")
    
    coreg_pre = Node(FLIRT(dof=dofx, output_type='NIFTI_GZ'),
                     name="coreg_pre")
    
    coreg_bbr = Node(FLIRT(dof=dofx, cost='bbr', schedule=opj(os.getenv('FSLDIR'),'etc/flirtsch/bbr.sch'), output_type='NIFTI_GZ'),
                     name="coreg_bbr")
                                         
    MNI=Node(Function(input_names=[], output_names=['out_file'], function=pif.template_MNI),
             name='Tamplated_MNI')
                           
    Normalization = Node(FLIRT(dof=dofx, output_type='NIFTI_GZ'),
Exemple #5
0
def preprocessing(*argu):

    argu = argu[0]
    json_file = argu[1]

    with open(json_file, 'r') as jsonfile:
        info = json.load(jsonfile, object_pairs_hook=OrderedDict)

    subject_list = info["subject_list"]
    experiment_dir = info["experiment_dir"]
    output_dir = 'datasink'
    working_dir = 'workingdir'

    task_list = info["task_list"]

    fwhm = [*map(int, info["fwhm"])]
    TR = float(info["TR"])
    iso_size = 4
    slice_list = [*map(int, info["slice order"])]

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=int(info["dummy scans"]),
                              t_size=-1,
                              output_type='NIFTI'),
                   name="extract")

    slicetime = Node(SliceTiming(num_slices=len(slice_list),
                                 ref_slice=int(median(slice_list)),
                                 slice_order=slice_list,
                                 time_repetition=TR,
                                 time_acquisition=TR - (TR / len(slice_list))),
                     name="slicetime")

    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="mcflirt")

    # Smooth - image smoothing
    smooth = Node(Smooth(), name="smooth")
    smooth.iterables = ("fwhm", fwhm)

    # Artifact Detection - determines outliers in functional images
    art = Node(ArtifactDetect(norm_threshold=2,
                              zintensity_threshold=3,
                              mask_type='spm_global',
                              parameter_source='FSL',
                              use_differences=[True, False],
                              plot_type='svg'),
               name="art")

    # BET - Skullstrip anatomical Image
    bet_anat = Node(BET(frac=0.5, robust=True, output_type='NIFTI_GZ'),
                    name="bet_anat")

    # FAST - Image Segmentation
    segmentation = Node(FAST(output_type='NIFTI_GZ'),
                        name="segmentation",
                        mem_gb=4)

    # Select WM segmentation file from segmentation output
    def get_wm(files):
        return files[-1]

    # Threshold - Threshold WM probability image
    threshold = Node(Threshold(thresh=0.5, args='-bin',
                               output_type='NIFTI_GZ'),
                     name="threshold")

    # FLIRT - pre-alignment of functional images to anatomical images
    coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'), name="coreg_pre")

    # FLIRT - coregistration of functional images to anatomical images with BBR
    coreg_bbr = Node(FLIRT(dof=6,
                           cost='bbr',
                           schedule=opj(os.getenv('FSLDIR'),
                                        'etc/flirtsch/bbr.sch'),
                           output_type='NIFTI_GZ'),
                     name="coreg_bbr")

    # Apply coregistration warp to functional images
    applywarp = Node(FLIRT(interp='spline',
                           apply_isoxfm=iso_size,
                           output_type='NIFTI'),
                     name="applywarp")

    # Apply coregistration warp to mean file
    applywarp_mean = Node(FLIRT(interp='spline',
                                apply_isoxfm=iso_size,
                                output_type='NIFTI_GZ'),
                          name="applywarp_mean")

    # Create a coregistration workflow
    coregwf = Workflow(name='coregwf')
    coregwf.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the coregistration workflow
    coregwf.connect([
        (bet_anat, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', get_wm),
                                    'in_file')]),
        (bet_anat, coreg_pre, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (bet_anat, applywarp, [('out_file', 'reference')]),
        (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')]),
        (bet_anat, applywarp_mean, [('out_file', 'reference')]),
    ])

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'task_name']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list),
                            ('task_name', task_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    anat_file = opj('sub-{subject_id}', 'anat', 'sub-{subject_id}_T1w.nii.gz')
    func_file = opj('sub-{subject_id}', 'func',
                    'sub-{subject_id}_task-{task_name}_bold.nii.gz')

    templates = {'anat': anat_file, 'func': func_file}
    selectfiles = Node(SelectFiles(templates,
                                   base_directory=info["base directory"]),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    ## Use the following DataSink output substitutions
    substitutions = [
        ('_subject_id_', 'sub-'),
        ('_task_name_', '/task-'),
        ('_fwhm_', 'fwhm-'),
        ('_roi', ''),
        ('_mcf', ''),
        ('_st', ''),
        ('_flirt', ''),
        ('.nii_mean_reg', '_mean'),
        ('.nii.par', '.par'),
    ]
    subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]
    substitutions.extend(subjFolders)
    datasink.inputs.substitutions = substitutions

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the preprocessing workflow
    preproc.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id'),
                                   ('task_name', 'task_name')]),
        (selectfiles, extract, [('func', 'in_file')]),
        (extract, slicetime, [('roi_file', 'in_files')]),
        (slicetime, mcflirt, [('timecorrected_files', 'in_file')]),
        (selectfiles, coregwf, [('anat', 'bet_anat.in_file'),
                                ('anat', 'coreg_bbr.reference')]),
        (mcflirt, coregwf, [('mean_img', 'coreg_pre.in_file'),
                            ('mean_img', 'coreg_bbr.in_file'),
                            ('mean_img', 'applywarp_mean.in_file')]),
        (mcflirt, coregwf, [('out_file', 'applywarp.in_file')]),
        (coregwf, smooth, [('applywarp.out_file', 'in_files')]),
        (mcflirt, datasink, [('par_file', 'preproc.@par')]),
        (smooth, datasink, [('smoothed_files', 'preproc.@smooth')]),
        (coregwf, datasink, [('applywarp_mean.out_file', 'preproc.@mean')]),
        (coregwf, art, [('applywarp.out_file', 'realigned_files')]),
        (mcflirt, art, [('par_file', 'realignment_parameters')]),
        (coregwf, datasink, [('coreg_bbr.out_matrix_file',
                              'preproc.@mat_file'),
                             ('bet_anat.out_file', 'preproc.@brain')]),
        (art, datasink, [('outlier_files', 'preproc.@outlier_files'),
                         ('plot_files', 'preproc.@plot_files')]),
    ])
    # Create preproc output graph# Creat # Create
    preproc.write_graph(graph2use='colored', format='png', simple_form=True)

    # Visualize the graph
    img1 = imread(opj(preproc.base_dir, 'preproc', 'graph.png'))
    plt.imshow(img1)
    plt.xticks([]), plt.yticks([])
    plt.show()

    # Visualize the detailed graph# Visua # Visual
    preproc.write_graph(graph2use='flat', format='png', simple_form=True)
    img2 = imread(opj(preproc.base_dir, 'preproc', 'graph_detailed.png'))
    plt.imshow(img2)
    plt.xticks([]), plt.yticks([])
    plt.show()

    print("Workflow all set. Check the workflow image :)")

    response = input('Should run the workflow? Enter yes or no :')

    if response == 'yes':
        preproc.run('MultiProc', plugin_args={'n_procs': 10})
    elif response == 'no':
        print('Exits the program since you entered no')
    else:
        raise RuntimeError('Should enter either yes or no')
Exemple #6
0
def run(base_dir):
    template = '/home/brainlab/Desktop/Rudas/Data/Parcellation/TPM.nii'
    matlab_cmd = '/home/brainlab/Desktop/Rudas/Tools/spm12_r7487/spm12/run_spm12.sh /home/brainlab/Desktop/Rudas/Tools/MCR/v713/ script'
    spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)

    print('SPM version: ' + str(spm.SPMCommand().version))

    structural_dir = '/home/brainlab/Desktop/Rudas/Data/Propofol/Structurals/'
    experiment_dir = opj(base_dir, 'output/')
    output_dir = 'datasink'
    working_dir = 'workingdir'
    '''

    subject_list = ['2014_05_02_02CB',
                    '2014_05_16_16RA',
                    '2014_05_30_30AQ',
                    '2014_07_04_04HD']
    '''
    subject_list = [
        '2014_05_02_02CB', '2014_05_16_16RA', '2014_05_30_30AQ',
        '2014_07_04_04HD', '2014_07_04_04SG', '2014_08_13_13CA',
        '2014_10_08_08BC', '2014_10_08_08VR', '2014_10_22_22CY',
        '2014_10_22_22TK', '2014_11_17_17EK', '2014_11_17_17NA',
        '2014_11_19_19SA', '2014_11_19_AK', '2014_11_25.25JK',
        '2014_11_27_27HF', '2014_12_10_10JR'
    ]

    # list of subject identifiers

    fwhm = 8  # Smoothing widths to apply (Gaussian kernel size)
    TR = 2  # Repetition time
    init_volume = 0  # Firts volumen identification which will use in the pipeline
    iso_size = 2  # Isometric resample of functional images to voxel size (in mm)

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=init_volume,
                              t_size=-1,
                              output_type='NIFTI'),
                   name="extract")

    # MCFLIRT - motion correction
    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="motion_correction")

    # SliceTimer - correct for slice wise acquisition
    slicetimer = Node(SliceTimer(index_dir=False,
                                 interleaved=True,
                                 output_type='NIFTI',
                                 time_repetition=TR),
                      name="slice_timing_correction")

    # Smooth - image smoothing
    smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")

    n4bias = Node(N4Bias(out_file='t1_n4bias.nii.gz'), name='n4bias')

    descomposition = Node(Descomposition(n_components=20,
                                         low_pass=0.1,
                                         high_pass=0.01,
                                         tr=TR),
                          name='descomposition')

    # Artifact Detection - determines outliers in functional images
    art = Node(ArtifactDetect(norm_threshold=2,
                              zintensity_threshold=3,
                              mask_type='spm_global',
                              parameter_source='FSL',
                              use_differences=[True, False],
                              plot_type='svg'),
               name="artifact_detection")

    extract_confounds_ws_csf = Node(
        ExtractConfounds(out_file='ev_without_gs.csv'),
        name='extract_confounds_ws_csf')

    extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                                 delimiter=','),
                                name='extract_confounds_global_signal')

    signal_extraction = Node(SignalExtraction(
        time_series_out_file='time_series.csv',
        correlation_matrix_out_file='correlation_matrix.png',
        atlas_identifier='cort-maxprob-thr25-2mm',
        tr=TR,
        plot=True),
                             name='signal_extraction')

    art_remotion = Node(ArtifacRemotion(out_file='fmri_art_removed.nii'),
                        name='artifact_remotion')

    # BET - Skullstrip anatomical anf funtional images
    bet_t1 = Node(BET(frac=0.5, robust=True, mask=True,
                      output_type='NIFTI_GZ'),
                  name="bet_t1")

    # FAST - Image Segmentation
    segmentation = Node(FAST(output_type='NIFTI'), name="segmentation")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize_fmri = Node(Normalize12(jobtype='estwrite',
                                      tpm=template,
                                      write_voxel_sizes=[2, 2, 2],
                                      write_bounding_box=[[-90, -126, -72],
                                                          [90, 90, 108]]),
                          name="normalize_fmri")

    gunzip = Node(Gunzip(), name="gunzip")

    normalize_t1 = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                        name="normalize_t1")

    normalize_masks = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                           name="normalize_masks")

    # Threshold - Threshold WM probability image
    threshold = Node(Threshold(thresh=0.5, args='-bin',
                               output_type='NIFTI_GZ'),
                     name="wm_mask_threshold")

    # FLIRT - pre-alignment of functional images to anatomical images
    coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'),
                     name="linear_warp_estimation")

    # FLIRT - coregistration of functional images to anatomical images with BBR
    coreg_bbr = Node(FLIRT(dof=6,
                           cost='bbr',
                           schedule=opj(os.getenv('FSLDIR'),
                                        'etc/flirtsch/bbr.sch'),
                           output_type='NIFTI_GZ'),
                     name="nonlinear_warp_estimation")

    # Apply coregistration warp to functional images
    applywarp = Node(FLIRT(interp='spline',
                           apply_isoxfm=iso_size,
                           output_type='NIFTI'),
                     name="registration_fmri")

    # Apply coregistration warp to mean file
    applywarp_mean = Node(FLIRT(interp='spline',
                                apply_isoxfm=iso_size,
                                output_type='NIFTI_GZ'),
                          name="registration_mean_fmri")

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    anat_file = opj(structural_dir, '{subject_id}', 't1.nii')
    func_file = opj('{subject_id}', 'fmri.nii')

    templates = {'anat': anat_file, 'func': func_file}

    selectfiles = Node(SelectFiles(templates, base_directory=base_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    # Create a coregistration workflow
    coregwf = Workflow(name='coreg_fmri_to_t1')
    coregwf.base_dir = opj(experiment_dir, working_dir)

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the coregistration workflow

    coregwf.connect([
        (bet_t1, n4bias, [('out_file', 'in_file')]),
        (n4bias, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', get_latest),
                                    'in_file')]),
        (n4bias, coreg_pre, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp, [('out_file', 'reference')]),
        (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp_mean, [('out_file', 'reference')]),
    ])

    ## Use the following DataSink output substitutions
    substitutions = [('_subject_id_', 'sub-')]
    #                 ('_fwhm_', 'fwhm-'),
    #                 ('_roi', ''),
    #                 ('_mcf', ''),
    #                 ('_st', ''),
    #                 ('_flirt', ''),
    #                 ('.nii_mean_reg', '_mean'),
    #                 ('.nii.par', '.par'),
    #                 ]
    #subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]

    #substitutions.extend(subjFolders)
    datasink.inputs.substitutions = substitutions

    # Connect all components of the preprocessing workflow
    preproc.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id')]),
        (selectfiles, extract, [('func', 'in_file')]),
        (extract, mcflirt, [('roi_file', 'in_file')]),
        (mcflirt, slicetimer, [('out_file', 'in_file')]),
        (selectfiles, coregwf, [('anat', 'bet_t1.in_file'),
                                ('anat', 'nonlinear_warp_estimation.reference')
                                ]),
        (mcflirt, coregwf, [('mean_img', 'linear_warp_estimation.in_file'),
                            ('mean_img', 'nonlinear_warp_estimation.in_file'),
                            ('mean_img', 'registration_mean_fmri.in_file')]),
        (slicetimer, coregwf, [('slice_time_corrected_file',
                                'registration_fmri.in_file')]),
        (coregwf, art, [('registration_fmri.out_file', 'realigned_files')]),
        (mcflirt, art, [('par_file', 'realignment_parameters')]),
        (art, art_remotion, [('outlier_files', 'outlier_files')]),
        (coregwf, art_remotion, [('registration_fmri.out_file', 'in_file')]),
        (coregwf, gunzip, [('n4bias.out_file', 'in_file')]),
        (selectfiles, normalize_fmri, [('anat', 'image_to_align')]),
        (art_remotion, normalize_fmri, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_t1, [('anat', 'image_to_align')]),
        (gunzip, normalize_t1, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_masks, [('anat', 'image_to_align')]),
        (coregwf, normalize_masks, [(('segmentation.partial_volume_files',
                                      get_wm_csf), 'apply_to_files')]),
        (normalize_fmri, smooth, [('normalized_files', 'in_files')]),
        (smooth, extract_confounds_ws_csf, [('smoothed_files', 'in_file')]),
        (normalize_masks, extract_confounds_ws_csf, [('normalized_files',
                                                      'list_mask')]),
        (mcflirt, extract_confounds_ws_csf, [('par_file', 'file_concat')]),

        #(smooth, extract_confounds_gs, [('smoothed_files', 'in_file')]),
        #(normalize_t1, extract_confounds_gs, [(('normalized_files',change_to_list), 'list_mask')]),
        #(extract_confounds_ws_csf, extract_confounds_gs, [('out_file', 'file_concat')]),
        (smooth, signal_extraction, [('smoothed_files', 'in_file')]),
        #(extract_confounds_gs, signal_extraction, [('out_file', 'confounds_file')]),
        (extract_confounds_ws_csf, signal_extraction, [('out_file',
                                                        'confounds_file')]),

        #(smooth, descomposition, [('smoothed_files', 'in_file')]),
        #(extract_confounds_ws_csf, descomposition, [('out_file', 'confounds_file')]),

        #(extract_confounds_gs, datasink, [('out_file', 'preprocessing.@confounds_with_gs')]),
        (extract_confounds_ws_csf, datasink,
         [('out_file', 'preprocessing.@confounds_without_gs')]),
        (smooth, datasink, [('smoothed_files', 'preprocessing.@smoothed')]),
        (normalize_fmri, datasink, [('normalized_files',
                                     'preprocessing.@fmri_normalized')]),
        (normalize_t1, datasink, [('normalized_files',
                                   'preprocessing.@t1_normalized')]),
        (normalize_masks, datasink, [('normalized_files',
                                      'preprocessing.@masks_normalized')]),
        (signal_extraction, datasink, [('time_series_out_file',
                                        'preprocessing.@time_serie')]),
        (signal_extraction, datasink, [('correlation_matrix_out_file',
                                        'preprocessing.@correlation_matrix')]),
        (signal_extraction, datasink,
         [('fmri_cleaned_out_file', 'preprocessing.@fmri_cleaned_out_file')]),
        #(descomposition, datasink, [('out_file', 'preprocessing.@descomposition')]),
        #(descomposition, datasink, [('plot_files', 'preprocessing.@descomposition_plot_files')])
    ])

    preproc.write_graph(graph2use='colored', format='png', simple_form=True)
    preproc.run()
Exemple #7
0
    def transform(self, X, y=None):

        initcwd = os.getcwd()
        in_files_search_param = self.gather_steps[1]

        if type(X[0]) == str and  \
           self.backend == 'fsl' and \
           self.gather_steps[0] != 'source':

            in_files_dir = os.path.join(self.project_path, 'derivatives',
                                        self.pipeline_name, 'steps',
                                        self.gather_steps[0])

            layout = BIDSLayout(in_files_dir)

            X.copy()

            for subject in X:

                in_files = []
                for path in layout.get(subject=subject,
                                       **in_files_search_param):
                    path = path.filename
                    in_files.append(path)

                for in_file in in_files:

                    dirname = os.path.dirname(in_file)
                    prev_files = os.listdir(dirname)

                    os.chdir(dirname)
                    fastfsl = FAST(in_files=in_file, **self.backend_param)
                    fastfsl.run()

                    step_dir = os.path.join(self.project_path, 'derivatives',
                                            self.pipeline_name, 'steps',
                                            self.transformer_name)

                    curr_files = os.listdir(dirname)
                    for dir_file in curr_files:
                        if dir_file not in prev_files:
                            filepath_source = os.path.join(dirname, dir_file)
                            filepath_destination = get_destination_path_for_fast(
                                step_dir, filepath_source)
                            os.rename(filepath_source, filepath_destination)


        elif type(X[0]) == str and \
             self.backend == 'fsl' and \
             self.gather_steps[0] == 'source':

            in_files_dir = self.project_path

            layout = BIDSLayout(in_files_dir)

            X = X.copy()

            for subject in X:
                print('\n\nSUBJECT: {}'.format(subject))

                in_files = []
                for path in layout.get(subject=subject,
                                       **in_files_search_param):
                    path = path.filename
                    if 'derivatives' not in path.split(os.sep):
                        in_files.append(path)

                for in_file in in_files:
                    print('in_file: {}'.format(in_file))

                    dirname = os.path.dirname(in_file)
                    prev_files = os.listdir(dirname)

                    print('prev_files: {}'.format(prev_files))
                    print('Processing...')

                    os.chdir(dirname)
                    fastfsl = FAST(in_files=in_file, **self.backend_param)
                    fastfsl.run()

                    step_dir = os.path.join(self.project_path, 'derivatives',
                                            self.pipeline_name, 'steps',
                                            self.transformer_name)

                    curr_files = os.listdir(dirname)

                    print('step_dir: {}'.format(step_dir))
                    print('curr_files: {}'.format(curr_files))

                    for dir_file in curr_files:
                        if dir_file not in prev_files:
                            filepath_source = os.path.join(dirname, dir_file)
                            filepath_destination = get_destination_path_for_fast(
                                step_dir, filepath_source)

                            print(
                                'filepath_source: {}'.format(filepath_source))
                            print('filepath_destination: {}'.format(
                                filepath_destination))

                            os.rename(filepath_source, filepath_destination)
        os.chdir(initcwd)
        return X
def get_wf_tissue_masks(name='wf_tissue_masks'):
    '''
    This Function gives a workflow that resamples the T1 brains, extracts the
    tissue types thresholds at 0.5 and registers them to T2* space
    It then registers the tissue priors to the T2* space and then performs a
    bitwise AND between two maps.
    '''
    # csf_tissue_prior_path, gm_tissue_prior_path, wm_tissue_prior_path,
    # threshold = 0.5

    wf_tissue_masks = Workflow(name=name)

    inputspec = Node(IdentityInterface(fields=[
        'resampled_anat_file_path', 'func2anat_mat_path', 'std2func_mat_path',
        'reference_func_file_path', 'brain_mask_eroded', 'threshold'
    ]),
                     name="inputspec")

    # FSL FAST node to segment the T1 brain
    fast = Node(FAST(out_basename='fast_'), name='fast')
    # probability_maps=True,segments=True,
    wf_tissue_masks.connect(inputspec, 'resampled_anat_file_path', fast,
                            'in_files')

    #  Invert the func2anat matrix to get anat2func
    inv_mat = Node(ConvertXFM(invert_xfm=True), name='inv_mat')
    wf_tissue_masks.connect(inputspec, 'func2anat_mat_path', inv_mat,
                            'in_file')

    # Transform the above segmented tissue masks to the functional space using the inverse matrix
    anat2func_xform_csf = Node(FLIRT(output_type='NIFTI',
                                     apply_xfm=True,
                                     interp='sinc'),
                               name='anat2func_xform_csf')

    wf_tissue_masks.connect(inputspec, 'reference_func_file_path',
                            anat2func_xform_csf, 'reference')
    wf_tissue_masks.connect(inv_mat, 'out_file', anat2func_xform_csf,
                            'in_matrix_file')

    anat2func_xform_wm = Node(FLIRT(output_type='NIFTI',
                                    apply_xfm=True,
                                    interp='sinc'),
                              name='anat2func_xform_wm')
    wf_tissue_masks.connect(inputspec, 'reference_func_file_path',
                            anat2func_xform_wm, 'reference')
    wf_tissue_masks.connect(inv_mat, 'out_file', anat2func_xform_wm,
                            'in_matrix_file')

    std2func_xform_eroded_brain = Node(FLIRT(output_type='NIFTI',
                                             apply_xfm=True,
                                             interp='nearestneighbour'),
                                       name='std2func_xform_eroded_brain')
    wf_tissue_masks.connect(inputspec, 'reference_func_file_path',
                            std2func_xform_eroded_brain, 'reference')
    wf_tissue_masks.connect(inputspec, 'std2func_mat_path',
                            std2func_xform_eroded_brain, 'in_matrix_file')

    def select_item_from_array(arr, index=0):
        import numpy as np
        arr = np.array(arr)
        return arr[index]

    wf_tissue_masks.connect(
        fast, ('partial_volume_files', select_item_from_array, 0),
        anat2func_xform_csf, 'in_file')
    wf_tissue_masks.connect(
        fast, ('partial_volume_files', select_item_from_array, 2),
        anat2func_xform_wm, 'in_file')
    wf_tissue_masks.connect(inputspec, 'brain_mask_eroded',
                            std2func_xform_eroded_brain, 'in_file')

    # Threshold

    def get_opstring(threshold):
        op = '-thr ' + str(threshold) + ' -bin'
        return op

    # print(inputspec.outputs)
    # ----- CSF ------

    threshold_csf = Node(interface=ImageMaths(suffix='_thresh'),
                         name='threshold_csf')
    # threshold_csf.inputs.op_string = '-thresh '+str(inputspec.outputs.threshold)+' -bin'
    wf_tissue_masks.connect(inputspec, ('threshold', get_opstring),
                            threshold_csf, 'op_string')
    wf_tissue_masks.connect(anat2func_xform_csf, 'out_file', threshold_csf,
                            'in_file')

    # ------- GM --------

    # threshold_gm = Node(interface=ImageMaths(op_string='-thresh',
    #                                             suffix='_thresh'),
    #                    name='threshold_gm')
    #
    #
    # wf_tissue_priors.connect(inputspec, ('threshold', get_opstring), threshold_gm, 'op_string' )
    # wf_tissue_priors.connect(fast, partial_volume_map[1], threshold_gm, 'in_file')
    #
    # -------- WM --------

    threshold_wm = Node(interface=ImageMaths(suffix='_thresh'),
                        name='threshold_wm')
    wf_tissue_masks.connect(inputspec, ('threshold', get_opstring),
                            threshold_wm, 'op_string')
    wf_tissue_masks.connect(anat2func_xform_wm, 'out_file', threshold_wm,
                            'in_file')

    #  -------------------

    #
    # wf_tissue_masks.connect(threshold_csf, 'out_file', std2func_xform_csf, 'in_file')
    # wf_tissue_masks.connect(threshold_wm, 'out_file', std2func_xform_wm, 'in_file')

    # Masking the outer brain CSF

    csf_mask = Node(interface=ApplyMask(), name='csf_mask')
    wf_tissue_masks.connect(threshold_csf, 'out_file', csf_mask, 'in_file')
    wf_tissue_masks.connect(std2func_xform_eroded_brain, 'out_file', csf_mask,
                            'mask_file')

    # Masking the outer brain WM that might be present due to poor BET

    wm_mask = Node(interface=ApplyMask(), name='wm_mask')
    wf_tissue_masks.connect(threshold_wm, 'out_file', wm_mask, 'in_file')
    wf_tissue_masks.connect(std2func_xform_eroded_brain, 'out_file', wm_mask,
                            'mask_file')

    # wm_mask = Node(interface=ApplyMask(),
    #                    name='wm_mask')
    # wf_tissue_masks.connect(std2func_xform_wm, 'out_file', wm_mask, 'in_file')
    # wf_tissue_masks.connect(std2func_xform_wm_prior, 'out_file', wm_mask, 'mask_file')

    outputspec = Node(IdentityInterface(fields=['csf_mask', 'wm_mask']),
                      name="outputspec")

    wf_tissue_masks.connect(csf_mask, 'out_file', outputspec, 'csf_mask')
    # wf_tissue_priors.connect(threshold_gm, 'out_file', outputspec, 'gm_tissue_prior_path')
    wf_tissue_masks.connect(wm_mask, 'out_file', outputspec, 'wm_mask')

    return wf_tissue_masks
     "versions": [{
         "title": BET().version or "1.0",
         "description":
         f"Default BET version for nipype {_NIPYPE_VERSION}.",  # noqa: E501
         "input": BET_INPUT_SPECIFICATION,
         "output": BET_OUTPUT_SPECIFICATION,
         "nested_results_attribute": "outputs.get_traitsfree",
     }],
 },
 {
     "title":
     "FAST",
     "description":
     "FAST (FMRIB's Automated Segmentation Tool) segments a 3D image of the brain into different tissue types (Grey Matter, White Matter, CSF, etc.), whilst also correcting for spatial intensity variations (also known as bias field or RF inhomogeneities).",  # noqa: E501
     "versions": [{
         "title": FAST().version or "1.0",
         "description":
         f"Default FAST version for nipype {_NIPYPE_VERSION}.",  # noqa: E501
         "input": FAST_INPUT_SPECIFICATION,
         "output": FAST_OUTPUT_SPECIFICATION,
     }],
 },
 {
     "title":
     "FLIRT",
     "description":
     "FLIRT (FMRIB's Linear Image Registration Tool) is a fully automated robust and accurate tool for linear (affine) intra- and inter-modal brain image registration.",  # noqa: E501
     "versions": [{
         "title": FLIRT().version or "1.0",
         "description":
         f"Default FLIRT version for nipype {_NIPYPE_VERSION}.",  # noqa: E501
Exemple #10
0
def pals(config: dict):
    # Get config file defining workflow
    # configs = json.load(open(config_file, 'r'))
    print('Starting: initializing workflow.')
    # Build pipelie
    wf = Workflow(name='PALS')

    # bidsLayout = bids.BIDSLayout(config['BIDSRoot'])
    # Get data
    loader = BIDSDataGrabber(index_derivatives=False)
    loader.inputs.base_dir = config['BIDSRoot']
    loader.inputs.subject = config['Subject']
    if (config['Session'] is not None):
        loader.inputs.session = config['Session']
    loader.inputs.output_query = {
        't1w': dict(**config['T1Entities'], invalid_filters='allow')
    }
    loader.inputs.extra_derivatives = [config['BIDSRoot']]
    loader = Node(loader, name='BIDSgrabber')

    entities = {
        'subject': config['Subject'],
        'session': config['Session'],
        'suffix': 'T1w',
        'extension': '.nii.gz'
    }

    # Reorient to radiological
    if (config['Analysis']['Reorient']):
        radio = MapNode(
            Reorient(orientation=config['Analysis']['Orientation']),
            name="reorientation",
            iterfield='in_file')
        if ('Reorient' in config['Outputs'].keys()):
            reorient_sink = MapNode(Function(function=copyfile,
                                             input_names=['src', 'dst']),
                                    name='reorient_copy',
                                    iterfield='src')
            path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_desc-' + config[
                'Analysis']['Orientation'] + '_{suffix}{extension}'
            reorient_filename = join(config['Outputs']['Reorient'],
                                     path_pattern.format(**entities))
            pathlib.Path(os.path.dirname(reorient_filename)).mkdir(
                parents=True, exist_ok=True)
            reorient_sink.inputs.dst = reorient_filename
            wf.connect([(radio, reorient_sink, [('out_file', 'src')])])

    else:
        radio = MapNode(Function(function=infile_to_outfile,
                                 input_names='in_file',
                                 output_names='out_file'),
                        name='identity',
                        iterfield='in_file')

    # Brain extraction
    bet = node_fetch.extraction_node(config, **config['BrainExtraction'])
    if ('BrainExtraction' in config['Outputs'].keys()):
        path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_space-' + \
                       config['Outputs']['StartRegistrationSpace'] + '_desc-brain_mask{extension}'
        brain_mask_sink = MapNode(Function(function=copyfile,
                                           input_names=['src', 'dst']),
                                  name='brain_mask_sink',
                                  iterfield='src')
        brain_mask_out = join(config['Outputs']['BrainExtraction'],
                              path_pattern.format(**entities))
        pathlib.Path(os.path.dirname(brain_mask_out)).mkdir(parents=True,
                                                            exist_ok=True)
        brain_mask_sink.inputs.dst = brain_mask_out

    ## Lesion load calculation
    # Registration
    reg = node_fetch.registration_node(config, **config['Registration'])
    if ('RegistrationTransform' in config['Outputs'].keys()):

        path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_space-' + \
                       config['Outputs']['StartRegistrationSpace'] + '_desc-transform.mat'

        registration_transform_filename = join(
            config['Outputs']['RegistrationTransform'],
            path_pattern.format(**entities))
        registration_transform_sink = MapNode(Function(
            function=copyfile, input_names=['src', 'dst']),
                                              name='registration_transf_sink',
                                              iterfield='src')
        pathlib.Path(os.path.dirname(registration_transform_filename)).mkdir(
            parents=True, exist_ok=True)
        registration_transform_sink.inputs.dst = registration_transform_filename
        wf.connect([(reg, registration_transform_sink, [('out_matrix_file',
                                                         'src')])])

    # Get mask
    mask_path_fetcher = Node(BIDSDataGrabber(
        base_dir=config['LesionRoot'],
        subject=config['Subject'],
        index_derivatives=False,
        output_query={
            'mask': dict(**config['LesionEntities'], invalid_filters='allow')
        },
        extra_derivatives=[config['LesionRoot']]),
                             name='mask_grabber')
    if (config['Session'] is not None):
        mask_path_fetcher.inputs.session = config['Session']

    # Apply reg file to lesion mask
    apply_xfm = node_fetch.apply_xfm_node(config)

    # Lesion load calculation
    if (config['Analysis']['LesionLoadCalculation']):
        lesion_load = MapNode(Function(function=overlap,
                                       input_names=['ref_mask', 'roi_list'],
                                       output_names='out_list'),
                              name='overlap_calc',
                              iterfield=['ref_mask'])
        roi_list = []
        if (os.path.exists(config['ROIDir'])):
            buf = os.listdir(config['ROIDir'])
            roi_list = [
                os.path.abspath(os.path.join(config['ROIDir'], b)) for b in buf
            ]
        else:
            warnings.warn(f"ROIDir ({config['ROIDir']}) doesn't exist.")
        buf = config['ROIList']
        roi_list += [os.path.abspath(b) for b in buf]
        lesion_load.inputs.roi_list = roi_list

        # CSV output
        csv_output = MapNode(Function(
            function=csv_writer,
            input_names=['filename', 'data_dict', 'subject', 'session']),
                             name='csv_output',
                             iterfield=['data_dict'])
        csv_output.inputs.subject = config['Subject']
        csv_output.inputs.session = config['Session']
        path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_desc-LesionLoad.csv'
        csv_out_filename = join(config['Outputs']['RegistrationTransform'],
                                path_pattern.format(**entities))
        csv_output.inputs.filename = csv_out_filename

        wf.connect([(apply_xfm, lesion_load, [('out_file', 'ref_mask')]),
                    (lesion_load, csv_output, [('out_list', 'data_dict')])])

    ## Lesion correction
    if (config['Analysis']['LesionCorrection']):
        ## White matter removal node. Does the white matter correction; has multiple inputs that need to be supplied.
        wm_removal = MapNode(Function(
            function=white_matter_correction,
            input_names=[
                'image', 'wm_mask', 'lesion_mask', 'max_difference_fraction'
            ],
            output_names=['out_data', 'corrected_volume']),
                             name='wm_removal',
                             iterfield=['image', 'wm_mask', 'lesion_mask'])
        wm_removal.inputs.max_difference_fraction = config['LesionCorrection'][
            'WhiteMatterSpread']

        ## File loaders
        # Loads the subject image, passes it to wm_removal node
        subject_image_loader = MapNode(Function(function=image_load,
                                                input_names=['in_filename'],
                                                output_names='out_image'),
                                       name='file_load0',
                                       iterfield='in_filename')
        wf.connect([
            (radio, subject_image_loader, [('out_file', 'in_filename')]),
            (subject_image_loader, wm_removal, [('out_image', 'image')])
        ])

        # Loads the mask image, passes it to wm_removal node
        mask_image_loader = MapNode(Function(function=image_load,
                                             input_names=['in_filename'],
                                             output_names='out_image'),
                                    name='file_load2',
                                    iterfield='in_filename')
        wf.connect([
            (mask_path_fetcher, mask_image_loader, [('mask', 'in_filename')]),
            (mask_image_loader, wm_removal, [('out_image', 'lesion_mask')])
        ])

        # Save lesion mask with white matter voxels removed
        output_image = MapNode(Function(
            function=image_write,
            input_names=['image', 'reference', 'file_name']),
                               name='image_writer0',
                               iterfield=['image', 'reference'])
        path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_space-' + \
                       config['Outputs']['StartRegistrationSpace'] + '_desc-CorrectedLesion_mask{extension}'
        lesion_corrected_filename = join(config['Outputs']['LesionCorrected'],
                                         path_pattern.format(**entities))
        output_image.inputs.file_name = lesion_corrected_filename
        wf.connect([(wm_removal, output_image, [('out_data', 'image')]),
                    (mask_path_fetcher, output_image, [('mask', 'reference')])
                    ])

        ## CSV output
        csv_output_corr = MapNode(Function(function=csv_writer,
                                           input_names=[
                                               'filename', 'subject',
                                               'session', 'data', 'data_name'
                                           ]),
                                  name='csv_output_corr',
                                  iterfield=['data'])
        csv_output_corr.inputs.subject = config['Subject']
        csv_output_corr.inputs.session = config['Session']
        csv_output_corr.inputs.data_name = 'CorrectedVolume'

        path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_desc-LesionLoad.csv'
        csv_out_filename = join(config['Outputs']['RegistrationTransform'],
                                path_pattern.format(**entities))
        csv_output_corr.inputs.filename = csv_out_filename

        wf.connect([(wm_removal, csv_output_corr, [('corrected_volume', 'data')
                                                   ])])

        ## White matter segmentation; either do segmentation or load the file
        if (config['Analysis']['WhiteMatterSegmentation']):
            # Config is set to do white matter segmentation
            # T1 intensity normalization
            t1_norm = MapNode(Function(
                function=rescale_image,
                input_names=['image', 'range_min', 'range_max', 'save_image'],
                output_names='out_file'),
                              name='normalization',
                              iterfield=['image'])
            t1_norm.inputs.range_min = config['LesionCorrection'][
                'ImageNormMin']
            t1_norm.inputs.range_max = config['LesionCorrection'][
                'ImageNormMax']
            t1_norm.inputs.save_image = True
            wf.connect([(bet, t1_norm, [('out_file', 'image')])])

            # White matter segmentation
            wm_seg = MapNode(FAST(), name="wm_seg", iterfield='in_files')
            wm_seg.inputs.out_basename = "segmentation"
            wm_seg.inputs.img_type = 1
            wm_seg.inputs.number_classes = 3
            wm_seg.inputs.hyper = 0.1
            wm_seg.inputs.iters_afterbias = 4
            wm_seg.inputs.bias_lowpass = 20
            wm_seg.inputs.segments = True
            wm_seg.inputs.no_pve = True
            ex_last = MapNode(Function(function=extract_last,
                                       input_names=['in_list'],
                                       output_names='out_entry'),
                              name='ex_last',
                              iterfield='in_list')

            file_load1 = MapNode(Function(function=image_load,
                                          input_names=['in_filename'],
                                          output_names='out_image'),
                                 name='file_load1',
                                 iterfield='in_filename')
            # White matter output; only necessary if white matter is segmented
            wm_map = MapNode(Function(
                function=image_write,
                input_names=['image', 'reference', 'file_name']),
                             name='image_writer1',
                             iterfield=['image', 'reference'])
            path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_space-' + \
                           config['Outputs']['StartRegistrationSpace'] + '_desc-WhiteMatter_mask{extension}'
            wm_map_filename = join(config['Outputs']['LesionCorrected'],
                                   path_pattern.format(**entities))
            wm_map.inputs.file_name = wm_map_filename
            wf.connect([(file_load1, wm_map, [('out_image', 'image')]),
                        (mask_path_fetcher, wm_map, [('mask', 'reference')])])
            # Connect nodes in workflow
            wf.connect([
                (wm_seg, ex_last, [('tissue_class_files', 'in_list')]),
                (t1_norm, wm_seg, [('out_file', 'in_files')]),
                # (ex_last, wm_map, [('out_entry', 'image')]),
                (ex_last, file_load1, [('out_entry', 'in_filename')]),
                (file_load1, wm_removal, [('out_image', 'wm_mask')])
            ])

        elif (config['Analysis']['LesionCorrection']):
            # No white matter segmentation should be done, but lesion correction is expected.
            # White matter segmentation must be supplied
            wm_seg_path = config['WhiteMatterSegmentationFile']
            if (len(wm_seg_path) == 0 or not os.path.exists(wm_seg_path)):
                # Check if file exists at output
                path_pattern = 'sub-{subject}/ses-{session}/anat/sub-{subject}_ses-{session}_space-' + \
                               config['Outputs']['StartRegistrationSpace'] + '_desc-WhiteMatter_mask{extension}'
                wm_map_filename = join(config['Outputs']['LesionCorrected'],
                                       path_pattern.format(**entities))
                if (os.path.exists(wm_map_filename)):
                    wm_seg_path = wm_map_filename
            else:
                raise ValueError(
                    'Config file is inconsistent; if WhiteMatterSegmentation is false but LesionCorrection'
                    ' is true, then WhiteMatterSegmentationFile must be defined and must exist.'
                )
            file_load1 = MapNode(Function(function=image_load,
                                          input_names=['in_filename'],
                                          output_names='out_image'),
                                 name='file_load1',
                                 iterfield='in_filename')
            file_load1.inputs.in_filename = wm_seg_path

            # Connect nodes in workflow
            wf.connect([(file_load1, wm_removal, [('out_image', 'wm_mask')])])

    # Connecting workflow.
    wf.connect([
        # Starter
        (loader, radio, [('t1w', 'in_file')]),
        (radio, bet, [('out_file', 'in_file')]),
        (bet, reg, [('out_file', 'in_file')]),
        (reg, apply_xfm, [('out_matrix_file', 'in_matrix_file')]),
        (mask_path_fetcher, apply_xfm, [('mask', 'in_file')]),
    ])

    try:
        graph_out = config['Outputs'][
            'LesionCorrected'] + '/sub-{subject}/ses-{session}/anat/'.format(
                **entities)
        wf.write_graph(graph2use='orig',
                       dotfilename=join(graph_out, 'graph.dot'),
                       format='png')
        os.remove(graph_out + 'graph.dot')
        os.remove(graph_out + 'graph_detailed.dot')
    except OSError:
        warnings.warn(
            "graphviz not installed; can't produce graph. See http://www.graphviz.org/download/ for "
            "installation instructions.")
    wf.run()
    return wf
Exemple #11
0
        del d["partial_volume_files"]
        return d


interfaces = {
    "BET": {
        BET().version: BET
    },
    "CAT12 Segmentation": {
        "12.6": Cat12Segmentation
    },
    "fslreorient2std": {
        Reorient2Std().version: Reorient2Std
    },
    "FAST": {
        FAST().version: FastWrapper
    },
    "FLIRT": {
        FLIRT().version: FLIRT
    },
    "FNIRT": {
        FNIRT().version: FNIRT
    },
    "FSL Anatomical Processing Script": {
        FslAnat.__version__: FslAnat
    },
    "SUSAN": {
        SUSAN().version: SUSAN
    },
    "ReconAll": {
        ReconAll().version: ReconAll
Exemple #12
0
    def run(self):
        matlab_cmd = self.paths['spm_path'] + ' ' + self.paths[
            'mcr_path'] + '/ script'
        spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
        print(matlab_cmd)
        print('SPM version: ' + str(spm.SPMCommand().version))

        experiment_dir = opj(self.paths['input_path'], 'output/')
        output_dir = 'datasink'
        working_dir = 'workingdir'

        subject_list = self.subject_list

        # list of subject identifiers
        fwhm = self.parameters[
            'fwhm']  # Smoothing widths to apply (Gaussian kernel size)
        tr = self.parameters['tr']  # Repetition time
        init_volume = self.parameters[
            'init_volume']  # Firts volumen identification which will use in the pipeline
        iso_size = self.parameters[
            'iso_size']  # Isometric resample of functional images to voxel size (in mm)
        low_pass = self.parameters['low_pass']
        high_pass = self.parameters['high_pass']
        t1_relative_path = self.paths['t1_relative_path']
        fmri_relative_path = self.paths['fmri_relative_path']

        # ExtractROI - skip dummy scans
        extract = Node(ExtractROI(t_min=init_volume,
                                  t_size=-1,
                                  output_type='NIFTI'),
                       name="extract")  #FSL

        # MCFLIRT - motion correction
        mcflirt = Node(MCFLIRT(mean_vol=True,
                               save_plots=True,
                               output_type='NIFTI'),
                       name="motion_correction")  #FSL

        # SliceTimer - correct for slice wise acquisition
        slicetimer = Node(SliceTimer(index_dir=False,
                                     interleaved=True,
                                     output_type='NIFTI',
                                     time_repetition=tr),
                          name="slice_timing_correction")  #FSL

        # Smooth - image smoothing

        denoise = Node(Denoise(), name="denoising")  #Interfaces with dipy

        smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")  #SPM

        n4bias = Node(N4Bias(out_file='t1_n4bias.nii.gz'),
                      name='n4bias')  #Interface with SimpleITK

        descomposition = Node(Descomposition(n_components=20,
                                             low_pass=0.1,
                                             high_pass=0.01,
                                             tr=tr),
                              name='descomposition')  #Interface with nilearn

        # Artifact Detection - determines outliers in functional images
        art = Node(ArtifactDetect(norm_threshold=2,
                                  zintensity_threshold=3,
                                  mask_type='spm_global',
                                  parameter_source='FSL',
                                  use_differences=[True, False],
                                  plot_type='svg'),
                   name="artifact_detection")  #Rapidart

        extract_confounds_ws_csf = Node(
            ExtractConfounds(out_file='ev_without_gs.csv'),
            name='extract_confounds_ws_csf')  #Interfece

        extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                                     delimiter=','),
                                    name='extract_confounds_global_signal')

        signal_extraction = Node(SignalExtraction(
            time_series_out_file='time_series.csv',
            correlation_matrix_out_file='correlation_matrix.png',
            labels_parcellation_path=self.paths['labels_parcellation_path'],
            mask_mni_path=self.paths['mask_mni_path'],
            tr=tr,
            low_pass=low_pass,
            high_pass=high_pass,
            plot=False),
                                 name='signal_extraction')
        signal_extraction.iterables = [('image_parcellation_path',
                                        self.paths['image_parcellation_path'])]

        art_remotion = Node(
            ArtifacRemotion(out_file='fmri_art_removed.nii'),
            name='artifact_remotion')  #This interface requires implementation

        # BET - Skullstrip anatomical anf funtional images
        bet_t1 = Node(BET(frac=0.5,
                          robust=True,
                          mask=True,
                          output_type='NIFTI_GZ'),
                      name="bet_t1")  #FSL

        # FAST - Image Segmentation
        segmentation = Node(FAST(output_type='NIFTI'),
                            name="segmentation")  #FSL

        # Normalize - normalizes functional and structural images to the MNI template
        normalize_fmri = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                              name="normalize_fmri")  #SPM

        gunzip = Node(Gunzip(), name="gunzip")

        normalize_t1 = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                            name="normalize_t1")

        normalize_masks = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                               name="normalize_masks")

        # Threshold - Threshold WM probability image
        threshold = Node(Threshold(thresh=0.5,
                                   args='-bin',
                                   output_type='NIFTI_GZ'),
                         name="wm_mask_threshold")

        # FLIRT - pre-alignment of functional images to anatomical images
        coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'),
                         name="linear_warp_estimation")

        # FLIRT - coregistration of functional images to anatomical images with BBR
        coreg_bbr = Node(FLIRT(dof=6,
                               cost='bbr',
                               schedule=opj(os.getenv('FSLDIR'),
                                            'etc/flirtsch/bbr.sch'),
                               output_type='NIFTI_GZ'),
                         name="nonlinear_warp_estimation")

        # Apply coregistration warp to functional images
        applywarp = Node(FLIRT(interp='spline',
                               apply_isoxfm=iso_size,
                               output_type='NIFTI'),
                         name="registration_fmri")

        # Apply coregistration warp to mean file
        applywarp_mean = Node(FLIRT(interp='spline',
                                    apply_isoxfm=iso_size,
                                    output_type='NIFTI_GZ'),
                              name="registration_mean_fmri")

        # Infosource - a function free node to iterate over the list of subject names
        infosource = Node(IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = [('subject_id', subject_list)]

        # SelectFiles - to grab the data (alternativ to DataGrabber)
        anat_file = opj('{subject_id}', t1_relative_path)
        func_file = opj('{subject_id}', fmri_relative_path)

        #anat_file = opj('{subject_id}/anat/', 'data.nii')
        #func_file = opj('{subject_id}/func/', 'data.nii')

        templates = {'anat': anat_file, 'func': func_file}

        selectfiles = Node(SelectFiles(
            templates, base_directory=self.paths['input_path']),
                           name="selectfiles")

        # Datasink - creates output folder for important outputs
        datasink = Node(DataSink(base_directory=experiment_dir,
                                 container=output_dir),
                        name="datasink")

        # Create a coregistration workflow
        coregwf = Workflow(name='coreg_fmri_to_t1')
        coregwf.base_dir = opj(experiment_dir, working_dir)

        # Create a preprocessing workflow
        preproc = Workflow(name='preproc')
        preproc.base_dir = opj(experiment_dir, working_dir)

        # Connect all components of the coregistration workflow

        coregwf.connect([
            (bet_t1, n4bias, [('out_file', 'in_file')]),
            (n4bias, segmentation, [('out_file', 'in_files')]),
            (segmentation, threshold, [(('partial_volume_files', get_latest),
                                        'in_file')]),
            (n4bias, coreg_pre, [('out_file', 'reference')]),
            (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
            (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
            (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
            (n4bias, applywarp, [('out_file', 'reference')]),
            (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')
                                         ]),
            (n4bias, applywarp_mean, [('out_file', 'reference')]),
        ])

        ## Use the following DataSink output substitutions
        substitutions = [('_subject_id_', 'sub-')]
        #                 ('_fwhm_', 'fwhm-'),
        #                 ('_roi', ''),
        #                 ('_mcf', ''),
        #                 ('_st', ''),
        #                 ('_flirt', ''),
        #                 ('.nii_mean_reg', '_mean'),
        #                 ('.nii.par', '.par'),
        #                 ]
        # subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]

        # substitutions.extend(subjFolders)
        datasink.inputs.substitutions = substitutions

        # Connect all components of the preprocessing workflow
        preproc.connect([
            (infosource, selectfiles, [('subject_id', 'subject_id')]),
            (selectfiles, extract, [('func', 'in_file')]),
            (extract, mcflirt, [('roi_file', 'in_file')]),
            (mcflirt, slicetimer, [('out_file', 'in_file')]),
            (selectfiles, denoise, [('anat', 'in_file')]),
            (denoise, coregwf, [('out_file', 'bet_t1.in_file'),
                                ('out_file',
                                 'nonlinear_warp_estimation.reference')]),
            (mcflirt, coregwf,
             [('mean_img', 'linear_warp_estimation.in_file'),
              ('mean_img', 'nonlinear_warp_estimation.in_file'),
              ('mean_img', 'registration_mean_fmri.in_file')]),
            (slicetimer, coregwf, [('slice_time_corrected_file',
                                    'registration_fmri.in_file')]),
            (coregwf, art, [('registration_fmri.out_file', 'realigned_files')
                            ]),
            (mcflirt, art, [('par_file', 'realignment_parameters')]),
            (art, art_remotion, [('outlier_files', 'outlier_files')]),
            (coregwf, art_remotion, [('registration_fmri.out_file', 'in_file')
                                     ]),
            (coregwf, gunzip, [('n4bias.out_file', 'in_file')]),
            (selectfiles, normalize_fmri, [('anat', 'image_to_align')]),
            (art_remotion, normalize_fmri, [('out_file', 'apply_to_files')]),
            (selectfiles, normalize_t1, [('anat', 'image_to_align')]),
            (gunzip, normalize_t1, [('out_file', 'apply_to_files')]),
            (selectfiles, normalize_masks, [('anat', 'image_to_align')]),
            (coregwf, normalize_masks, [(('segmentation.partial_volume_files',
                                          get_wm_csf), 'apply_to_files')]),
            (normalize_fmri, smooth, [('normalized_files', 'in_files')]),
            (smooth, extract_confounds_ws_csf, [('smoothed_files', 'in_file')
                                                ]),
            (normalize_masks, extract_confounds_ws_csf, [('normalized_files',
                                                          'list_mask')]),
            (mcflirt, extract_confounds_ws_csf, [('par_file', 'file_concat')]),
            (art, extract_confounds_ws_csf, [('outlier_files', 'outlier_files')
                                             ]),

            # (smooth, extract_confounds_gs, [('smoothed_files', 'in_file')]),
            # (normalize_t1, extract_confounds_gs, [(('normalized_files',change_to_list), 'list_mask')]),
            # (extract_confounds_ws_csf, extract_confounds_gs, [('out_file', 'file_concat')]),
            (smooth, signal_extraction, [('smoothed_files', 'in_file')]),
            # (extract_confounds_gs, signal_extraction, [('out_file', 'confounds_file')]),
            (extract_confounds_ws_csf, signal_extraction,
             [('out_file', 'confounds_file')]),

            #(smooth, descomposition, [('smoothed_files', 'in_file')]),
            #(extract_confounds_ws_csf, descomposition, [('out_file', 'confounds_file')]),

            # (extract_confounds_gs, datasink, [('out_file', 'preprocessing.@confounds_with_gs')]),
            (denoise, datasink, [('out_file', 'preprocessing.@t1_denoised')]),
            (extract_confounds_ws_csf, datasink,
             [('out_file', 'preprocessing.@confounds_without_gs')]),
            (smooth, datasink, [('smoothed_files', 'preprocessing.@smoothed')
                                ]),
            (normalize_fmri, datasink, [('normalized_files',
                                         'preprocessing.@fmri_normalized')]),
            (normalize_t1, datasink, [('normalized_files',
                                       'preprocessing.@t1_normalized')]),
            (normalize_masks, datasink, [('normalized_files',
                                          'preprocessing.@masks_normalized')]),
            (signal_extraction, datasink, [('time_series_out_file',
                                            'preprocessing.@time_serie')]),
            (signal_extraction, datasink,
             [('correlation_matrix_out_file',
               'preprocessing.@correlation_matrix')])
        ])
        #(signal_extraction, datasink,
        # [('fmri_cleaned_out_file', 'preprocessing.@fmri_cleaned_out_file')])])
        #,
        #(descomposition, datasink, [('out_file', 'preprocessing.@descomposition')]),
        #(descomposition, datasink, [('plot_files', 'preprocessing.@descomposition_plot_files')])
        #])

        preproc.write_graph(graph2use='colored',
                            format='png',
                            simple_form=True)
        preproc.run()