Exemplo n.º 1
0
def create_normalize_pipeline(name='normalize'):
    # workflow
    normalize = Workflow(name='normalize')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=['epi_coreg',
    'tr']),
    name='inputnode')
    outputnode = Node(interface=util.IdentityInterface(fields=[
    'normalized_file']),
    name='outputnode')

    # time-normalize scans
    normalize_time=Node(util.Function(input_names=['in_file','tr'],
    output_names=['out_file'],
    function=time_normalizer),
    name='normalize_time')
    normalize_time.plugin_args={'submit_specs': 'request_memory = 17000'}
    normalize.connect([(inputnode, normalize_time, [('tr', 'tr')]),
    (inputnode, normalize_time, [('epi_coreg', 'in_file')]),
    (normalize_time, outputnode, [('out_file', 'normalized_file')])
    ])
    
    # time-normalize scans    
    
    return normalize
Exemplo n.º 2
0
def create_normalize_pipeline(name='normalize'):
    # workflow
    normalize = Workflow(name='normalize')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=['epi_coreg',
                                                              'tr']),
                     name='inputnode')
    outputnode = Node(interface=util.IdentityInterface(fields=[
        'normalized_file']),
        name='outputnode')

    # time-normalize scans
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    normalize.connect([(inputnode, normalize_time, [('tr', 'tr')]),
                       (inputnode, normalize_time, [('epi_coreg', 'in_file')]),
                       (normalize_time, outputnode, [('out_file', 'normalized_file')])
                       ])

    # time-normalize scans    

    return normalize
def create_converter_structural_pipeline(working_dir, ds_dir, name="converter_struct"):
    # initiate workflow
    converter_wf = Workflow(name=name)
    converter_wf.base_dir = os.path.join(working_dir, "LeiCA_resting")

    # set fsl output
    fsl.FSLCommand.set_default_output_type("NIFTI_GZ")

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=["t1w_dicom"]), name="inputnode")

    outputnode = Node(util.IdentityInterface(fields=["t1w"]), name="outputnode")

    niftisink = Node(nio.DataSink(), name="niftisink")
    niftisink.inputs.base_directory = os.path.join(ds_dir, "raw_niftis")

    # convert to nifti
    # todo check if geometry bugs attac. use dcm2nii?
    converter_t1w = Node(DcmStack(embed_meta=True), name="converter_t1w")
    converter_t1w.plugin_args = {"submit_specs": "request_memory = 2000"}
    converter_t1w.inputs.out_format = "t1w"

    converter_wf.connect(inputnode, "t1w_dicom", converter_t1w, "dicom_files")

    # reorient to standard orientation
    reor_2_std = Node(fsl.Reorient2Std(), name="reor_2_std")
    converter_wf.connect(converter_t1w, "out_file", reor_2_std, "in_file")

    converter_wf.connect(reor_2_std, "out_file", outputnode, "t1w")

    # save original niftis
    converter_wf.connect(reor_2_std, "out_file", niftisink, "sMRI")

    converter_wf.write_graph(dotfilename="converter_struct", graph2use="flat", format="pdf")
    return converter_wf
Exemplo n.º 4
0
def create_reconall_pipeline(name='reconall'):
    reconall = Workflow(name='reconall')
    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['anat',
                                                    'fs_subjects_dir',
                                                    'fs_subject_id'
                                                    ]),
                     name='inputnode')
    outputnode = Node(util.IdentityInterface(fields=['fs_subjects_dir',
                                                     'fs_subject_id']),
                      name='outputnode')
    # run reconall
    recon_all = Node(fs.ReconAll(args='-autorecon2 -nuiterations 7 -no-isrunning -hippo-subfields'),
                     name="recon_all")
    # recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3
    recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'}
    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/', '_')

    reconall.connect([(inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'),
                                              ('anat', 'T1_files'),
                                              (('fs_subject_id', sub_id), 'subject_id')]),
                      (recon_all, outputnode, [('subject_id', 'fs_subject_id'),
                                               ('subjects_dir', 'fs_subjects_dir')])
                      ])
    return reconall
Exemplo n.º 5
0
def create_reconall_pipeline(name='reconall'):
    reconall = Workflow(name='reconall')
    #inputnode
    inputnode = Node(util.IdentityInterface(
        fields=['anat', 'fs_subjects_dir', 'fs_subject_id']),
                     name='inputnode')
    outputnode = Node(
        util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']),
        name='outputnode')
    # run reconall
    recon_all = Node(
        fs.ReconAll(
            args='-all -hippo-subfields -no-isrunning'
        ),  #for RSV152 took out s because of preprocessing with version 6.0
        name="recon_all")
    #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3
    recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'}

    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/', '_')

    reconall.connect([
        (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'),
                                ('anat', 'T1_files'),
                                (('fs_subject_id', sub_id), 'subject_id')]),
        (recon_all, outputnode, [('subject_id', 'fs_subject_id'),
                                 ('subjects_dir', 'fs_subjects_dir')])
    ])
    return reconall
Exemplo n.º 6
0
def create_reconall_pipeline(name='reconall'):
    reconall=Workflow(name='reconall')
    #inputnode
    inputnode=Node(util.IdentityInterface(fields=['anat',
    'fs_subjects_dir',
    'fs_subject_id'
    ]),
    name='inputnode')
    outputnode=Node(util.IdentityInterface(fields=['fs_subjects_dir',
    'fs_subject_id']),
    name='outputnode')
    
    def rename_subject_for_fu(input_id):
        output_id=input_id+"_fu"
        return output_id
       
    #modify subject name so it can be saved in the same folder as other LIFE- freesurfer data
    rename=Node(util.Function(input_names=['input_id'], 
                            output_names=['output_id'],
                            function = rename_subject_for_fu), name="rename")  
    
    # run reconall
    recon_all = Node(fs.ReconAll(args='-all -hippo-subfields -no-isrunning', openmp=24), #FS version 6.0: -hippocampal-subfields-T1, version 5.3.. -hippo-subfields
    name="recon_all")
    #recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3
    recon_all.plugin_args={'submit_specs': 'request_memory = 9000'}
    reconall.connect([
    (inputnode, rename, [('fs_subject_id', 'input_id')]),
    (rename, recon_all, [('output_id', 'subject_id')]),
    (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'),
                            ('anat', 'T1_files')]),
    (recon_all, outputnode, [('subject_id', 'fs_subject_id'),
    ('subjects_dir', 'fs_subjects_dir')])
    ])
    return reconall
Exemplo n.º 7
0
def create_reconall_pipeline(name='reconall'):

    reconall = Workflow(name='reconall')

    #inputnode
    inputnode = Node(util.IdentityInterface(
        fields=['anat', 'fs_subjects_dir', 'fs_subject_id']),
                     name='inputnode')

    outputnode = Node(
        util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']),
        name='outputnode')

    # run reconall
    recon_all = Node(fs.ReconAll(args='-nuiterations 7 -no-isrunning'),
                     name="recon_all")
    recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'}

    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/', '_')

    reconall.connect([
        (inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'),
                                ('anat', 'T1_files'),
                                (('fs_subject_id', sub_id), 'subject_id')]),
        (recon_all, outputnode, [('subject_id', 'fs_subject_id'),
                                 ('subjects_dir', 'fs_subjects_dir')])
    ])

    return reconall
Exemplo n.º 8
0
def create_converter_diffusion_pipeline(working_dir,
                                        ds_dir,
                                        name='converter_diffusion'):
    # initiate workflow
    converter_wf = Workflow(name=name)
    converter_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting')

    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['dMRI_dicom']),
                     name='inputnode')

    outputnode = Node(util.IdentityInterface(fields=['dMRI']),
                      name='outputnode')

    niftisink = Node(nio.DataSink(), name='niftisink')
    niftisink.inputs.base_directory = os.path.join(ds_dir, 'raw_niftis')

    #######

    converter_dMRI = Node(Dcm2nii(), name="converter_dMRI")
    converter_dMRI.inputs.gzip_output = True
    converter_dMRI.inputs.nii_output = True
    converter_dMRI.inputs.anonymize = False
    converter_dMRI.plugin_args = {'submit_specs': 'request_memory = 2000'}
    converter_wf.connect(inputnode, 'dMRI_dicom', converter_dMRI,
                         'source_names')

    dMRI_rename = Node(util.Rename(format_string='DTI_mx_137.nii.gz'),
                       name='dMRI_rename')
    converter_wf.connect(converter_dMRI, 'converted_files', dMRI_rename,
                         'in_file')

    bvecs_rename = Node(util.Rename(format_string='DTI_mx_137.bvecs'),
                        name='bvecs_rename')
    converter_wf.connect(converter_dMRI, 'bvecs', bvecs_rename, 'in_file')

    bvals_rename = Node(util.Rename(format_string='DTI_mx_137.bvals'),
                        name='bvals_rename')
    converter_wf.connect(converter_dMRI, "bvals", bvals_rename, 'in_file')

    # reorient to standard orientation
    reor_2_std = Node(fsl.Reorient2Std(), name='reor_2_std')
    converter_wf.connect(dMRI_rename, 'out_file', reor_2_std, 'in_file')
    converter_wf.connect(reor_2_std, 'out_file', outputnode, 'dMRI')

    # save original niftis
    converter_wf.connect(reor_2_std, 'out_file', niftisink, 'dMRI.@dwi')
    converter_wf.connect(bvals_rename, 'out_file', niftisink, 'dMRI.@bvals')
    converter_wf.connect(bvecs_rename, 'out_file', niftisink, 'dMRI.@bvecs')

    converter_wf.write_graph(dotfilename='converter_struct',
                             graph2use='flat',
                             format='pdf')
    return converter_wf
Exemplo n.º 9
0
def create_denoise_pipeline(name='denoise'):
    # workflow
    denoise = Workflow(name='denoise')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=[
        'anat_brain', 'brain_mask', 'epi_denoised', 'highpass_sigma',
        'lowpass_sigma', 'tr'
    ]),
                     name='inputnode')
    outputnode = Node(
        interface=util.IdentityInterface(fields=[  # FL added fullspectrum
            'normalized_file'
        ]),
        name='outputnode')

    # bandpass filter denoised file
    bandpass_filter = Node(
        fsl.TemporalFilter(out_file='rest_denoised_bandpassed.nii.gz'),
        name='bandpass_filter')
    bandpass_filter.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (inputnode, bandpass_filter, [('highpass_sigma', 'highpass_sigma'),
                                      ('lowpass_sigma', 'lowpass_sigma')]),
        # (filter2, bandpass_filter, [('out_res', 'in_file')]),
        # (filter2, outputnode, [('out_res', 'ts_fullspectrum')]),
        (inputnode, bandpass_filter, [('epi_denoised', 'in_file')])
    ])
    # time-normalize scans
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (inputnode, normalize_time, [('tr', 'tr')]),
        (bandpass_filter, normalize_time, [('out_file', 'in_file')]),
        (normalize_time, outputnode, [('out_file', 'normalized_file')])
    ])
    return denoise
Exemplo n.º 10
0
def create_converter_diffusion_pipeline(working_dir, ds_dir, name="converter_diffusion"):
    # initiate workflow
    converter_wf = Workflow(name=name)
    converter_wf.base_dir = os.path.join(working_dir, "LeiCA_resting")

    # set fsl output
    fsl.FSLCommand.set_default_output_type("NIFTI_GZ")

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=["dMRI_dicom"]), name="inputnode")

    outputnode = Node(util.IdentityInterface(fields=["dMRI"]), name="outputnode")

    niftisink = Node(nio.DataSink(), name="niftisink")
    niftisink.inputs.base_directory = os.path.join(ds_dir, "raw_niftis")

    #######

    converter_dMRI = Node(Dcm2nii(), name="converter_dMRI")
    converter_dMRI.inputs.gzip_output = True
    converter_dMRI.inputs.nii_output = True
    converter_dMRI.inputs.anonymize = False
    converter_dMRI.plugin_args = {"submit_specs": "request_memory = 2000"}
    converter_wf.connect(inputnode, "dMRI_dicom", converter_dMRI, "source_names")

    dMRI_rename = Node(util.Rename(format_string="DTI_mx_137.nii.gz"), name="dMRI_rename")
    converter_wf.connect(converter_dMRI, "converted_files", dMRI_rename, "in_file")

    bvecs_rename = Node(util.Rename(format_string="DTI_mx_137.bvecs"), name="bvecs_rename")
    converter_wf.connect(converter_dMRI, "bvecs", bvecs_rename, "in_file")

    bvals_rename = Node(util.Rename(format_string="DTI_mx_137.bvals"), name="bvals_rename")
    converter_wf.connect(converter_dMRI, "bvals", bvals_rename, "in_file")

    # reorient to standard orientation
    reor_2_std = Node(fsl.Reorient2Std(), name="reor_2_std")
    converter_wf.connect(dMRI_rename, "out_file", reor_2_std, "in_file")
    converter_wf.connect(reor_2_std, "out_file", outputnode, "dMRI")

    # save original niftis
    converter_wf.connect(reor_2_std, "out_file", niftisink, "dMRI.@dwi")
    converter_wf.connect(bvals_rename, "out_file", niftisink, "dMRI.@bvals")
    converter_wf.connect(bvecs_rename, "out_file", niftisink, "dMRI.@bvecs")

    converter_wf.write_graph(dotfilename="converter_struct", graph2use="flat", format="pdf")
    return converter_wf
Exemplo n.º 11
0
def create_converter_structural_pipeline(working_dir,
                                         ds_dir,
                                         name='converter_struct'):
    # initiate workflow
    converter_wf = Workflow(name=name)
    converter_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting')

    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['t1w_dicom']),
                     name='inputnode')

    outputnode = Node(util.IdentityInterface(fields=['t1w']),
                      name='outputnode')

    niftisink = Node(nio.DataSink(), name='niftisink')
    niftisink.inputs.base_directory = os.path.join(ds_dir, 'raw_niftis')

    # convert to nifti
    # todo check if geometry bugs attac. use dcm2nii?
    converter_t1w = Node(DcmStack(embed_meta=True), name='converter_t1w')
    converter_t1w.plugin_args = {'submit_specs': 'request_memory = 2000'}
    converter_t1w.inputs.out_format = 't1w'

    converter_wf.connect(inputnode, 't1w_dicom', converter_t1w, 'dicom_files')

    # reorient to standard orientation
    reor_2_std = Node(fsl.Reorient2Std(), name='reor_2_std')
    converter_wf.connect(converter_t1w, 'out_file', reor_2_std, 'in_file')

    converter_wf.connect(reor_2_std, 'out_file', outputnode, 't1w')

    # save original niftis
    converter_wf.connect(reor_2_std, 'out_file', niftisink, 'sMRI')

    converter_wf.write_graph(dotfilename='converter_struct',
                             graph2use='flat',
                             format='pdf')
    return converter_wf
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir):
    
    '''
    Workflow to run brackground masking and then freesurfer recon-all
    on "lowres" MP2RAGE data
    '''
    
    # main workflow
    struct_preproc = Workflow(name='mp2rage_preproc')
    struct_preproc.base_dir = working_dir
    struct_preproc.config['execution']['crashdump_dir'] = struct_preproc.base_dir + "/crash_files"
    
    # select files
    templates={'inv2': 'raw/mp2rage/inv2.nii.gz',
               't1map': 'raw/mp2rage/t1map.nii.gz',
               'uni': 'raw/mp2rage/uni.nii.gz'}
    selectfiles = Node(nio.SelectFiles(templates,
                                       base_directory=data_dir),
                       name="selectfiles")
    
    # mp2rage background masking
    background = Node(JistIntensityMp2rageMasking(outMasked=True,
                                            outMasked2=True,
                                            outSignal2=True), 
                      name='background')
    
    
    
    # workflow to run freesurfer reconall
    
    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/','_')
    
    recon_all = Node(fs.ReconAll(args='-nuiterations 7 -no-isrunning'),
                     name="recon_all")
    recon_all.plugin_args={'submit_specs': 'request_memory = 9000'}
    recon_all.inputs.subjects_dir=freesurfer_dir
    recon_all.inputs.subject_id=sub_id(subject)
    
    
    #sink to store files
    sink = Node(nio.DataSink(base_directory=out_dir,
                             parameterization=False,
                             substitutions=[('outStripped', 'uni_stripped'),
                                            ('outMasked2', 'uni_masked'),
                                            ('outSignal2', 'background_mask'),
                                            ('outOriginal', 'uni_reoriented'),
                                            ('outMask', 'skullstrip_mask'),
                                            ('transform_Warped', 'T1_brain2std')]),
                 name='sink')
    
    
    # connections
    struct_preproc.connect([(selectfiles, background, [('inv2', 'inSecond'),
                                                       ('t1map', 'inQuantitative'),
                                                       ('uni', 'inT1weighted')]),
                            (background, recon_all, [('outMasked2','T1files')]),
                            (background, sink, [('outMasked2','preprocessed.mp2rage.@uni_masked'),
                                                ('outSignal2','preprocessed.mp2rage.@background_mask')]),
                     ])
    
    #struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    return struct_preproc
Exemplo n.º 13
0
grp_merge_copes.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
grp_merge_copes.inputs.ignore_exception = False
grp_merge_copes.inputs.output_type = 'NIFTI_GZ'
grp_merge_copes.inputs.terminal_output = 'stream'
group_wf.connect(inputspec, 'copes', grp_merge_copes, 'in_files')

#node for Randomise
grp_randomise = Node(fsl.Randomise(), name='grp_randomise')
grp_randomise.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
grp_randomise.inputs.ignore_exception = False
grp_randomise.inputs.tfce = True
grp_randomise.inputs.base_name = 'oneSampT'
grp_randomise.inputs.num_perm = 5000
grp_randomise.inputs.output_type = 'NIFTI_GZ'
grp_randomise.inputs.terminal_output = 'stream'
grp_randomise.plugin_args = {'bsub_args': '-m IB_40C_1.5T_1'}
group_wf.connect(grp_l2model, 'design_mat', grp_randomise, 'design_mat')
group_wf.connect(grp_l2model, 'design_con', grp_randomise, 'tcon')
group_wf.connect(grp_merge_copes, 'merged_file', grp_randomise, 'in_file')
group_wf.connect(inputspec, 'brain_mask', grp_randomise, 'mask')

#node for group_randomise.sinker
group_randomise_sinker = Node(DataSink(infields=None),
                              name='group_randomise_sinker')
group_randomise_sinker.inputs.base_directory = '/home/data/madlab/data/mri/wmaze/grplvl/model_GLM1.2_randomise'
group_randomise_sinker.inputs.ignore_exception = False
group_randomise_sinker.inputs.parameterization = True
group_wf.connect(grp_randomise, 't_corrected_p_files', group_randomise_sinker,
                 'output.corrected.@tcorr_p_files')
group_wf.connect(grp_randomise, 'tstat_files', group_randomise_sinker,
                 'output.@tstat_files')
Exemplo n.º 14
0
# make filelist
translist = Node(util.Merge(2), name='translist')
mni.connect([(selectfiles, translist, [('affine', 'in2'), ('warp', 'in1')])])

# apply all transforms
applytransform = Node(
    ants.ApplyTransforms(
        input_image_type=3,
        #output_image='rest_preprocessed2mni.nii.gz',
        interpolation='BSpline',
        invert_transform_flags=[False, False]),
    name='applytransform')

applytransform.inputs.reference_image = template
applytransform.plugin_args = {'submit_specs': 'request_memory = 30000'}
mni.connect([(selectfiles, applytransform, [('rest', 'input_image')]),
             (translist, applytransform, [('out', 'transforms')])])

# tune down image to float
changedt = Node(fsl.ChangeDataType(output_datatype='float',
                                   out_file='rest_preprocessed2mni.nii.gz'),
                name='changedt')
changedt.plugin_args = {'submit_specs': 'request_memory = 30000'}
mni.connect([(applytransform, changedt, [('output_image', 'in_file')])])


# make base directory
def makebase(subject_id, out_dir):
    return out_dir % subject_id
Exemplo n.º 15
0
def create_registration_pipeline(working_dir,
                                 freesurfer_dir,
                                 ds_dir,
                                 name='registration'):
    """
    find transformations between struct, funct, and MNI
    """

    # initiate workflow
    reg_wf = Workflow(name=name)
    reg_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting',
                                   'rsfMRI_preprocessing')

    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir)

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=[
        'initial_mean_epi_moco', 't1w', 't1w_brain', 'subject_id',
        'wm_mask_4_bbr', 'struct_brain_mask'
    ]),
                     name='inputnode')

    outputnode = Node(util.IdentityInterface(fields=[
        'struct_2_MNI_warp', 'epi_2_struct_mat', 'struct_2_epi_mat',
        'epi_2_MNI_warp', 'MNI_2_epi_warp', 'fs_2_struct_mat',
        'mean_epi_structSpace', 'mean_epi_MNIspace', 'struct_MNIspace'
    ]),
                      name='outputnode')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]

    ##########################################
    # TOC REGISTRATION MATS AND WARPS
    ##########################################
    # I. STRUCT -> MNI
    ## 1. STRUCT -> MNI with FLIRT
    ## 2. CALC. WARP STRUCT -> MNI with FNIRT

    # II.EPI -> STRUCT
    ## 3. calc EPI->STRUCT initial registration
    ## 4. run EPI->STRUCT via bbr
    ## 5. INVERT to get: STRUCT -> EPI

    # III. COMBINE I. & II.: EPI -> MNI
    ## 6. COMBINE MATS: EPI -> MNI
    ## 7. MNI -> EPI

    ##########################################
    # CREATE REGISTRATION MATS AND WARPS
    ##########################################

    # I. STRUCT -> MNI
    ##########################################
    # 1. REGISTER STRUCT -> MNI with FLIRT
    struct_2_MNI_mat = Node(fsl.FLIRT(dof=12), name='struct_2_MNI_mat')
    struct_2_MNI_mat.inputs.reference = fsl.Info.standard_image(
        'MNI152_T1_2mm_brain.nii.gz')

    reg_wf.connect(inputnode, 't1w_brain', struct_2_MNI_mat, 'in_file')
    reg_wf.connect(struct_2_MNI_mat, 'out_matrix_file', outputnode,
                   'struct_2_MNI_mat_flirt')

    # 2. CALC. WARP STRUCT -> MNI with FNIRT
    # cf. wrt. 2mm
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1311&L=FSL&P=R86108&1=FSL&9=A&J=on&d=No+Match%3BMatch%3BMatches&z=4
    struct_2_MNI_warp = Node(fsl.FNIRT(), name='struct_2_MNI_warp')
    struct_2_MNI_warp.inputs.config_file = 'T1_2_MNI152_2mm'
    struct_2_MNI_warp.inputs.ref_file = fsl.Info.standard_image(
        'MNI152_T1_2mm.nii.gz')
    struct_2_MNI_warp.inputs.field_file = 'struct_2_MNI_warp.nii.gz'
    struct_2_MNI_warp.plugin_args = {'submit_specs': 'request_memory = 4000'}

    reg_wf.connect(inputnode, 't1w', struct_2_MNI_warp, 'in_file')
    reg_wf.connect(struct_2_MNI_mat, 'out_matrix_file', struct_2_MNI_warp,
                   'affine_file')
    reg_wf.connect(struct_2_MNI_warp, 'field_file', ds,
                   'registration.struct_2_MNI_warp')
    reg_wf.connect(struct_2_MNI_warp, 'field_file', outputnode,
                   'struct_2_MNI_warp')
    reg_wf.connect(struct_2_MNI_warp, 'warped_file', outputnode,
                   'struct_MNIspace')
    reg_wf.connect(struct_2_MNI_warp, 'warped_file', ds,
                   'registration.struct_MNIspace')

    # II.EPI -> STRUCT (via bbr)
    ##########################################

    # 3. calc EPI->STRUCT initial registration with flirt dof=6 and corratio
    epi_2_struct_flirt6_mat = Node(fsl.FLIRT(dof=6, cost='corratio'),
                                   name='epi_2_struct_flirt6_mat')
    epi_2_struct_flirt6_mat.inputs.out_file = 'epi_structSpace_flirt6.nii.gz'
    reg_wf.connect(inputnode, 't1w_brain', epi_2_struct_flirt6_mat,
                   'reference')
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', epi_2_struct_flirt6_mat,
                   'in_file')

    # 4. run EPI->STRUCT via bbr
    bbr_shedule = os.path.join(os.getenv('FSLDIR'), 'etc/flirtsch/bbr.sch')
    epi_2_struct_bbr_mat = Node(interface=fsl.FLIRT(dof=6, cost='bbr'),
                                name='epi_2_struct_bbr_mat')
    epi_2_struct_bbr_mat.inputs.schedule = bbr_shedule
    epi_2_struct_bbr_mat.inputs.out_file = 'epi_structSpace.nii.gz'
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', epi_2_struct_bbr_mat,
                   'in_file')
    reg_wf.connect(inputnode, 't1w_brain', epi_2_struct_bbr_mat, 'reference')
    reg_wf.connect(epi_2_struct_flirt6_mat, 'out_matrix_file',
                   epi_2_struct_bbr_mat, 'in_matrix_file')
    reg_wf.connect(inputnode, 'wm_mask_4_bbr', epi_2_struct_bbr_mat, 'wm_seg')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_matrix_file', ds,
                   'registration.epi_2_struct_mat')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_file', outputnode,
                   'mean_epi_structSpace')

    # 5. INVERT to get: STRUCT -> EPI
    struct_2_epi_mat = Node(fsl.ConvertXFM(invert_xfm=True),
                            name='struct_2_epi_mat')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_matrix_file', struct_2_epi_mat,
                   'in_file')
    reg_wf.connect(struct_2_epi_mat, 'out_file', outputnode,
                   'struct_2_epi_mat')

    # III. COMBINE I. & II.: EPI -> MNI
    ##########################################
    # 6. COMBINE MATS: EPI -> MNI
    epi_2_MNI_warp = Node(fsl.ConvertWarp(), name='epi_2_MNI_warp')
    epi_2_MNI_warp.inputs.reference = fsl.Info.standard_image(
        'MNI152_T1_2mm.nii.gz')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_matrix_file', epi_2_MNI_warp,
                   'premat')  # epi2struct
    reg_wf.connect(struct_2_MNI_warp, 'field_file', epi_2_MNI_warp,
                   'warp1')  # struct2mni
    reg_wf.connect(epi_2_MNI_warp, 'out_file', outputnode, 'epi_2_MNI_warp')
    reg_wf.connect(epi_2_MNI_warp, 'out_file', ds,
                   'registration.epi_2_MNI_warp')

    # output: out_file

    # 7. MNI -> EPI
    MNI_2_epi_warp = Node(fsl.InvWarp(), name='MNI_2_epi_warp')
    MNI_2_epi_warp.inputs.reference = fsl.Info.standard_image(
        'MNI152_T1_2mm.nii.gz')
    reg_wf.connect(epi_2_MNI_warp, 'out_file', MNI_2_epi_warp, 'warp')
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', MNI_2_epi_warp,
                   'reference')
    reg_wf.connect(MNI_2_epi_warp, 'inverse_warp', outputnode,
                   'MNI_2_epi_warp')
    # output: inverse_warp

    ##########################################
    # TRANSFORM VOLUMES
    ##########################################

    # CREATE STRUCT IN EPI SPACE FOR DEBUGGING
    struct_epiSpace = Node(fsl.ApplyXfm(), name='struct_epiSpace')
    struct_epiSpace.inputs.out_file = 'struct_brain_epiSpace.nii.gz'
    reg_wf.connect(inputnode, 't1w_brain', struct_epiSpace, 'in_file')
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', struct_epiSpace,
                   'reference')
    reg_wf.connect(struct_2_epi_mat, 'out_file', struct_epiSpace,
                   'in_matrix_file')
    reg_wf.connect(struct_epiSpace, 'out_file', ds, 'QC.struct_brain_epiSpace')

    # CREATE EPI IN MNI SPACE
    mean_epi_MNIspace = Node(fsl.ApplyWarp(), name='mean_epi_MNIspace')
    mean_epi_MNIspace.inputs.ref_file = fsl.Info.standard_image(
        'MNI152_T1_2mm_brain.nii.gz')
    mean_epi_MNIspace.inputs.out_file = 'mean_epi_MNIspace.nii.gz'
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', mean_epi_MNIspace,
                   'in_file')
    reg_wf.connect(epi_2_MNI_warp, 'out_file', mean_epi_MNIspace, 'field_file')
    reg_wf.connect(mean_epi_MNIspace, 'out_file', ds,
                   'registration.mean_epi_MNIspace')
    reg_wf.connect(mean_epi_MNIspace, 'out_file', outputnode,
                   'mean_epi_MNIspace')

    # CREATE MNI IN EPI SPACE FOR DEBUGGING
    MNI_epiSpace = Node(fsl.ApplyWarp(), name='MNI_epiSpace')
    MNI_epiSpace.inputs.in_file = fsl.Info.standard_image(
        'MNI152_T1_2mm_brain.nii.gz')
    MNI_epiSpace.inputs.out_file = 'MNI_epiSpace.nii.gz'
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', MNI_epiSpace,
                   'ref_file')
    reg_wf.connect(MNI_2_epi_warp, 'inverse_warp', MNI_epiSpace, 'field_file')
    reg_wf.connect(MNI_epiSpace, 'out_file', ds, 'registration.MNI_epiSpace')

    reg_wf.write_graph(dotfilename=reg_wf.name, graph2use='flat', format='pdf')

    return reg_wf
Exemplo n.º 16
0
def create_sca_pipeline(working_dir, rois_list, ds_dir, name='sca'):
    afni.base.AFNICommand.set_default_output_type('NIFTI_GZ')
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    sca_wf = Workflow(name=name)
    sca_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['rs_preprocessed',
                                                    'epi_2_MNI_warp']),
                     name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=['functional_mask',
                                                     'seed_based_z']),
                      name='outputnode')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]

    epi_MNIspace = Node(fsl.ApplyWarp(), name='epi_MNIspace')
    epi_MNIspace.inputs.ref_file = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
    epi_MNIspace.plugin_args = {'submit_specs': 'request_memory = 4000'}

    sca_wf.connect(inputnode, 'rs_preprocessed', epi_MNIspace, 'in_file')
    sca_wf.connect(inputnode, 'epi_2_MNI_warp' , epi_MNIspace, 'field_file')


    epi_mask = Node(interface=afni.Automask(), name='epi_mask')
    sca_wf.connect(epi_MNIspace, 'out_file', epi_mask, 'in_file')
    sca_wf.connect(epi_mask, 'out_file', outputnode, 'functional_mask')

    roi_infosource = Node(util.IdentityInterface(fields=['roi']), name='roi_infosource')
    roi_infosource.iterables = ('roi', rois_list)

    point = Node(afni.Calc(), name='point')
    point.inputs.in_file_a = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
    point.inputs.outputtype = 'NIFTI_GZ'
    point.inputs.out_file = 'roi_point.nii.gz'
    def roi2exp(coord):
        return 'step(4-(x%+d)*(x%+d)-(y%+d)*(y%+d)-(z%+d)*(z%+d))'%(coord[0], coord[0], coord[1], coord[1], -coord[2], -coord[2])
    sca_wf.connect(roi_infosource, ('roi', roi2exp), point, 'expr')

    def format_filename(roi_str):
        import string
        valid_chars = '-_.%s%s' % (string.ascii_letters, string.digits)
        return 'roi_'+''.join(c for c in str(roi_str).replace(',','_') if c in valid_chars)+'_roi.nii.gz'

    sphere = Node(fsl.ImageMaths(), name='sphere')
    sphere.inputs.out_data_type = 'float'
    sphere.inputs.op_string = '-kernel sphere 8 -fmean -bin'
    sca_wf.connect(point, 'out_file', sphere, 'in_file')
    sca_wf.connect(roi_infosource, ('roi', format_filename), sphere, 'out_file')

    #fixme
    # smoothing = Node(fsl.maths.IsotropicSmooth(), name='smoothing')
    # smoothing.iterables = ('fwhm', [1, 6])
    # sca_wf.connect(epi_MNIspace, 'out_file', smoothing, 'in_file')

    extract_timeseries = Node(afni.Maskave(), name='extract_timeseries')
    extract_timeseries.inputs.quiet = True
    sca_wf.connect(sphere, 'out_file', extract_timeseries, 'mask')
    #fixme
    sca_wf.connect(epi_MNIspace, 'out_file', extract_timeseries, 'in_file')
    #sca_wf.connect(smoothing, 'out_file', extract_timeseries, 'in_file')

    correlation_map = Node(afni.Fim(), name='correlation_map')
    correlation_map.inputs.out = 'Correlation'
    correlation_map.inputs.outputtype = 'NIFTI_GZ'
    correlation_map.inputs.out_file = 'corr_map.nii.gz'
    sca_wf.connect(extract_timeseries, 'out_file', correlation_map, 'ideal_file')
    sca_wf.connect(epi_MNIspace, 'out_file', correlation_map, 'in_file')

    z_trans = Node(interface=afni.Calc(), name='z_trans')
    z_trans.inputs.expr = 'log((1+a)/(1-a))/2'
    z_trans.inputs.outputtype = 'NIFTI_GZ'
    sca_wf.connect(correlation_map, 'out_file', z_trans, 'in_file_a')
    sca_wf.connect(z_trans, 'out_file', outputnode, 'seed_based_z')
    sca_wf.connect(z_trans, 'out_file', ds, 'sca.seed_based_z')


    # # plot rs corr on surf
    # plot_rs = Node(interface=util.Function(input_names=['in_file', 'thr_list','roi_coords'],
    #                                        output_names=['out_file_list'],
    #                                        function=plot_rs_surf),
    #                name='plot_rs')
    # plot_rs.inputs.thr_list = [(.2,1)]
    # sca_wf.connect(correlation_map, 'out_file', plot_rs, 'in_file')
    # sca_wf.connect(roi_infosource, 'roi', plot_rs, 'roi_coords')




    sca_wf.write_graph(dotfilename='sca', graph2use='flat', format='pdf')


    return sca_wf
def calc_centrality_metrics(cfg):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as freesurfer

    import CPAC.network_centrality.resting_state_centrality as cpac_centrality
    import CPAC.network_centrality.z_score as cpac_centrality_z_score


    # INPUT PARAMETERS
    dicom_dir = cfg['dicom_dir']
    preprocessed_data_dir = cfg['preprocessed_data_dir']

    working_dir = cfg['working_dir']
    freesurfer_dir = cfg['freesurfer_dir']
    template_dir = cfg['template_dir']
    script_dir = cfg['script_dir']
    ds_dir = cfg['ds_dir']

    subjects_list = cfg['subjects_list']
    TR_list = cfg['TR_list']

    use_n_procs = cfg['use_n_procs']
    plugin_name = cfg['plugin_name']



    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir)

    wf = Workflow(name='LeiCA_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': False,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]
    ds.inputs.regexp_substitutions = [('_subject_id_[A0-9]*/', ''), (
    '_z_score[0-9]*/', '')]  # , #('dc/_TR_id_[0-9]*/', ''), ('evc/_TR_id_[0-9]*/','')]

    #####################################
    # SET ITERATORS
    #####################################
    # GET SCAN TR_ID ITERATOR
    scan_infosource = Node(util.IdentityInterface(fields=['TR_id']), name='scan_infosource')
    scan_infosource.iterables = ('TR_id', TR_list)

    subjects_infosource = Node(util.IdentityInterface(fields=['subject_id']), name='subjects_infosource')
    subjects_infosource.iterables = ('subject_id', subjects_list)

    def add_subject_id_to_ds_dir_fct(subject_id, ds_path):
        import os
        out_path = os.path.join(ds_path, subject_id)
        return out_path

    add_subject_id_to_ds_dir = Node(util.Function(input_names=['subject_id', 'ds_path'],
                                                  output_names=['out_path'],
                                                  function=add_subject_id_to_ds_dir_fct),
                                    name='add_subject_id_to_ds_dir')
    wf.connect(subjects_infosource, 'subject_id', add_subject_id_to_ds_dir, 'subject_id')
    add_subject_id_to_ds_dir.inputs.ds_path = ds_dir

    wf.connect(add_subject_id_to_ds_dir, 'out_path', ds, 'base_directory')


    # get atlas data
    templates_atlases = {'GM_mask_MNI_2mm': 'SPM_GM/SPM_GM_mask_2mm.nii.gz',
                         'GM_mask_MNI_3mm': 'SPM_GM/SPM_GM_mask_3mm.nii.gz',
                         'FSL_MNI_3mm_template': 'MNI152_T1_3mm_brain.nii.gz',
                         'vmhc_symm_brain': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_symmetric.nii.gz',
                         'vmhc_symm_brain_3mm': 'cpac_image_resources/symmetric/MNI152_T1_3mm_brain_symmetric.nii.gz',
                         'vmhc_symm_skull': 'cpac_image_resources/symmetric/MNI152_T1_2mm_symmetric.nii.gz',
                         'vmhc_symm_brain_mask_dil': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz',
                         'vmhc_config_file_2mm': 'cpac_image_resources/symmetric/T1_2_MNI152_2mm_symmetric.cnf'
                         }

    selectfiles_anat_templates = Node(nio.SelectFiles(templates_atlases,
                                                      base_directory=template_dir),
                                      name="selectfiles_anat_templates")


    # GET SUBJECT SPECIFIC FUNCTIONAL AND STRUCTURAL DATA
    selectfiles_templates = {
        'epi_2_MNI_warp': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_MNI_warp/TR_{TR_id}/*.nii.gz',
        'epi_mask': '{subject_id}/rsfMRI_preprocessing/masks/brain_mask_epiSpace/TR_{TR_id}/*.nii.gz',
        'preproc_epi_full_spectrum': '{subject_id}/rsfMRI_preprocessing/epis/01_denoised/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp': '{subject_id}/rsfMRI_preprocessing/epis/02_denoised_BP/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp_tNorm': '{subject_id}/rsfMRI_preprocessing/epis/03_denoised_BP_tNorm/TR_{TR_id}/*.nii.gz',
        'epi_2_struct_mat': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_struct_mat/TR_{TR_id}/*.mat',
        't1w': '{subject_id}/raw_niftis/sMRI/t1w_reoriented.nii.gz',
        't1w_brain': '{subject_id}/rsfMRI_preprocessing/struct_prep/t1w_brain/t1w_reoriented_maths.nii.gz',
    }

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name="selectfiles")
    wf.connect(scan_infosource, 'TR_id', selectfiles, 'TR_id')
    wf.connect(subjects_infosource, 'subject_id', selectfiles, 'subject_id')
    # selectfiles.inputs.subject_id = subject_id



    # CREATE TRANSFORMATIONS
    # creat MNI 2 epi warp
    MNI_2_epi_warp = Node(fsl.InvWarp(), name='MNI_2_epi_warp')
    MNI_2_epi_warp.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
    wf.connect(selectfiles, 'epi_mask', MNI_2_epi_warp, 'reference')
    wf.connect(selectfiles, 'epi_2_MNI_warp', MNI_2_epi_warp, 'warp')




    # CREATE TS IN MNI SPACE
    epi_bp_tNorm_MNIspace_3mm = Node(fsl.ApplyWarp(), name='epi_bp_tNorm_MNIspace_3mm')
    epi_bp_tNorm_MNIspace_3mm.inputs.interp = 'spline'
    epi_bp_tNorm_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_bp_tNorm_MNIspace_3mm, 'ref_file')
    wf.connect(selectfiles, 'preproc_epi_bp_tNorm', epi_bp_tNorm_MNIspace_3mm, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', epi_bp_tNorm_MNIspace_3mm, 'field_file')
    wf.connect(epi_bp_tNorm_MNIspace_3mm, 'out_file', ds, 'rsfMRI_preprocessing.epis_MNI_3mm.03_denoised_BP_tNorm')

    #####################
    # CALCULATE METRICS
    #####################




    # DEGREE
    # fixme
    # a_mem = 5
    # fixme
    a_mem = 20
    dc = cpac_centrality.create_resting_state_graphs(allocated_memory=a_mem,
                                                     wf_name='dc')  # allocated_memory = a_mem, wf_name = 'dc')
    # dc.plugin_args = {'submit_specs': 'request_memory = 6000'}
    # fixme
    dc.plugin_args = {'submit_specs': 'request_memory = 20000'}

    dc.inputs.inputspec.method_option = 0  # 0 for degree centrality, 1 for eigenvector centrality, 2 for lFCD
    dc.inputs.inputspec.threshold_option = 0  # 0 for probability p_value, 1 for sparsity threshold, any other for threshold value
    dc.inputs.inputspec.threshold = 0.0001
    dc.inputs.inputspec.weight_options = [True,
                                          True]  # list of two booleans for binarize and weighted options respectively
    wf.connect(epi_bp_tNorm_MNIspace_3mm, 'out_file', dc, 'inputspec.subject')
    wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', dc, 'inputspec.template')
    wf.connect(dc, 'outputspec.centrality_outputs', ds, 'metrics.centrality.dc.@centrality_outputs')
    wf.connect(dc, 'outputspec.correlation_matrix', ds, 'metrics.centrality.dc.@correlation_matrix')
    wf.connect(dc, 'outputspec.graph_outputs', ds, 'metrics.centrality.dc.@graph_outputs')

    # DC Z-SCORE
    dc_Z = cpac_centrality_z_score.get_cent_zscore(wf_name='dc_Z')
    wf.connect(dc, 'outputspec.centrality_outputs', dc_Z, 'inputspec.input_file')
    wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', dc_Z, 'inputspec.mask_file')
    wf.connect(dc_Z, 'outputspec.z_score_img', ds, 'metrics.centrality.dc_z.@output')

    a_mem = 20
    evc = cpac_centrality.create_resting_state_graphs(allocated_memory=a_mem, wf_name='evc')
    evc.plugin_args = {'submit_specs': 'request_memory = 20000'}

    evc.inputs.inputspec.method_option = 1  # 0 for degree centrality, 1 for eigenvector centrality, 2 for lFCD
    evc.inputs.inputspec.threshold_option = 0  # 0 for probability p_value, 1 for sparsity threshold, any other for threshold value
    evc.inputs.inputspec.threshold = 0.0001
    evc.inputs.inputspec.weight_options = [True,
                                           True]  # list of two booleans for binarize and weighted options respectively
    wf.connect(epi_bp_tNorm_MNIspace_3mm, 'out_file', evc, 'inputspec.subject')
    wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', evc, 'inputspec.template')
    wf.connect(evc, 'outputspec.centrality_outputs', ds, 'metrics.centrality.evc.@centrality_outputs')
    wf.connect(evc, 'outputspec.correlation_matrix', ds, 'metrics.centrality.evc.@correlation_matrix')
    wf.connect(evc, 'outputspec.graph_outputs', ds, 'metrics.centrality.evc.@graph_outputs')

    # EVC Z-SCORE
    evc_Z = cpac_centrality_z_score.get_cent_zscore(wf_name='evc_Z')
    wf.connect(evc, 'outputspec.centrality_outputs', evc_Z, 'inputspec.input_file')
    wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', evc_Z, 'inputspec.mask_file')
    wf.connect(evc_Z, 'outputspec.z_score_img', ds, 'metrics.centrality.evc_z.@output')

    wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name)
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})


# apply fmap fullwarp
apply_fmap = Node(fsl.ApplyWarp(interp='spline',
                               relwarp=True,
                               out_file='fmap_ts.nii.gz', 
                               datatype='float'),
                 name='apply_fmap') 
   
apply_ts.connect([(inputnode, apply_fmap, [('moco_ts', 'in_file'),
                                         ('fmap_fullwarp', 'field_file')]),
                 (resamp_anat, apply_fmap, [('out_file', 'ref_file')]),
                 (apply_fmap, outputnode, [('out_file', 'fmap_ts')])
                 ])
apply_fmap.plugin_args={'initial_specs': 'request_memory = 8000'}


# apply topup fullwarp
apply_topup = Node(fsl.ApplyWarp(interp='spline',
                               relwarp=True,
                               out_file='topup_ts.nii.gz', 
                               datatype='float'),
                 name='apply_topup') 
   
apply_ts.connect([(inputnode, apply_topup, [('moco_ts', 'in_file'),
                                            ('topup_fullwarp', 'field_file')]),
                 (resamp_anat, apply_topup, [('out_file', 'ref_file')]),
                 (apply_topup, outputnode, [('out_file', 'topup_ts')])
                 ])
apply_topup.plugin_args={'initial_specs': 'request_memory = 8000'}
Exemplo n.º 19
0
def create_converter_functional_pipeline(working_dir,
                                         ds_dir,
                                         name='converter_funct'):
    # initiate workflow
    converter_wf = Workflow(name=name)
    converter_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting')

    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # I/O NODE
    inputnode = Node(
        util.IdentityInterface(fields=['epi_dicom', 'out_format']),
        name='inputnode')

    outputnode = Node(util.IdentityInterface(fields=['epi', 'TR_ms']),
                      name='outputnode')

    niftisink = Node(nio.DataSink(), name='niftisink')
    niftisink.inputs.base_directory = os.path.join(ds_dir, 'raw_niftis')
    niftisink.inputs.substitutions = [('_TR_id_', 'TR_')]

    # convert to nifti
    # todo check if geometry bugs attac. use dcm2nii?
    converter_epi = Node(DcmStack(embed_meta=True), name='converter_epi')
    converter_epi.plugin_args = {'submit_specs': 'request_memory = 2000'}

    def reformat_filename_fct(TR_str):
        return 'rsfMRI_' + TR_str

    reformat_filename = Node(util.Function(input_names=['TR_str'],
                                           output_names=['filename'],
                                           function=reformat_filename_fct),
                             name='reformat_filename')

    converter_wf.connect(inputnode, 'out_format', reformat_filename, 'TR_str')
    converter_wf.connect(inputnode, 'epi_dicom', converter_epi, 'dicom_files')
    converter_wf.connect(reformat_filename, 'filename', converter_epi,
                         'out_format')

    # reorient to standard orientation
    reor_2_std = Node(fsl.Reorient2Std(), name='reor_2_std')
    converter_wf.connect(converter_epi, 'out_file', reor_2_std, 'in_file')

    converter_wf.connect(reor_2_std, 'out_file', outputnode, 'epi')

    # save original niftis
    converter_wf.connect(reor_2_std, 'out_file', niftisink, 'rsfMRI')

    # GET TR FROM .nii
    def check_TR_fct(TR):
        print ' '
        print 'check_TR_fct checks validity of TR'
        print('imported TR is %s' % TR)
        print '  '
        try:
            float(TR)
        except ValueError:
            isvalid_TR = 0
            raise Exception(
                'ERROR: TR COULD NOT AUTOMATICALLY BE EXTRACTED FROM EPI.\nEXECUTION STOPPED'
            )
        else:
            isvalid_TR = 1
            print 'TR is valid'
        if isvalid_TR:
            if float(TR <= 0):
                raise Exception(
                    'ERROR: TR NOT VALID (<=0).\nEXECUTION STOPPED')
        return float(TR)

    get_TR = Node(ImageInfo(), name='get_TR')
    converter_wf.connect(reor_2_std, 'out_file', get_TR, 'in_file')

    check_TR = Node(util.Function(input_names=['TR'],
                                  output_names=['TR_ms'],
                                  function=check_TR_fct),
                    name='check_TR')

    converter_wf.connect(get_TR, 'TR', check_TR, 'TR')
    converter_wf.connect(check_TR, 'TR_ms', outputnode, 'TR_ms')

    converter_wf.write_graph(dotfilename=converter_wf.name,
                             graph2use='flat',
                             format='pdf')

    return converter_wf
Exemplo n.º 20
0
                   name="selectfiles")

# make filelist
def makelist(in1, in2, in3, in4):
    return [in1, in2, in3, in4]

make_list = Node(util.Function(input_names=['in1', 'in2', 'in3', 'in4'],
                               output_names=['file_list'],
                               function=makelist),
                               name='make_list')

# concatenate scans
concatenate=Node(fsl.Merge(dimension='t',
                           merged_file='rest_concatenated.nii.gz'),
                 name='concatenate')
concatenate.plugin_args={'submit_specs': 'request_memory = 20000'}

# sink
sink = Node(nio.DataSink(base_directory=out_dir,
                         parameterization=False),
                name='sink')

concat.connect([(selectfiles, make_list, [('rest1a', 'in1'),
                                          ('rest1b', 'in2'),
                                          ('rest2a', 'in3'),
                                          ('rest2b', 'in4')]),
                (make_list, concatenate, [('file_list', 'in_files')]),
                (concatenate, sink, [('merged_file', '@rest_concat')])
                ])

concat.run()
Exemplo n.º 21
0
# make filelist
translist = Node(util.Merge(2),
                     name='translist')
mni.connect([(selectfiles, translist, [('affine', 'in2'),
                                       ('warp', 'in1')])])


# apply all transforms
applytransform = Node(ants.ApplyTransforms(input_image_type = 3,
                                           #output_image='rest_preprocessed2mni.nii.gz',
                                           interpolation = 'BSpline',
                                           invert_transform_flags=[False, False]),
                      name='applytransform')
   
applytransform.inputs.reference_image=template
applytransform.plugin_args={'submit_specs': 'request_memory = 30000'}
mni.connect([(selectfiles, applytransform, [('rest', 'input_image')]),
             (translist, applytransform, [('out', 'transforms')])
             ])

# tune down image to float
changedt = Node(fsl.ChangeDataType(output_datatype='float',
                                   out_file='rest_preprocessed2mni.nii.gz'),
                name='changedt')
changedt.plugin_args={'submit_specs': 'request_memory = 30000'}
mni.connect([(applytransform, changedt, [('output_image', 'in_file')])])


# make base directory
def makebase(subject_id, out_dir):
    return out_dir%subject_id
transformlist = Node(interface=Function(input_names=['string1', 'string2'],
                                        output_names=['transformlist'],
                                        function=makelist),
                     name='transformlist')
    
apply_ts.connect([(inputnode, transformlist, [('nonlin_epi2anat_warp', 'string2'),
                                              ('nonlin_anat2epi_itk', 'string1')
                                              ])
                 ])
  
nonlin_apply = Node(ants.ApplyTransforms(input_image_type=3,
                                         output_image='nonlin_ts.nii.gz',
                                         invert_transform_flags=[True,False],
                                         interpolation = 'BSpline'),
                    'nonlin_apply')
nonlin_apply.plugin_args={'initial_specs': 'request_memory = 22000'}
  
apply_ts.connect([(inputnode, nonlin_apply, [('moco_ts', 'input_image'),
                                             #('anat_head', 'reference_image')
                                               ]),
                  (resamp_anat, nonlin_apply, [('out_file', 'reference_image')]),
                  (transformlist, nonlin_apply, [('transformlist', 'transforms')]),
                  (nonlin_apply, outputnode, [('output_image', 'nonlin_ts')])
                  ])

# set up workflow, in- and output
apply_ts.base_dir='/scr/kansas1/huntenburg/'
data_dir='/scr/jessica2/Schaare/LEMON/'
#out_dir = '/scr/kansas1/huntenburg/timeseries/'
#applywarp_linear.config['execution']={'remove_unnecessary_outputs': 'False'}
apply_ts.config['execution']['crashdump_dir'] = apply_ts.base_dir + "/crash_files"
Exemplo n.º 23
0
def create_denoise_pipeline(working_dir, ds_dir, name='denoise'):
    # workflow
    denoise_wf = Workflow(name=name)
    denoise_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting',
                                       'rsfMRI_preprocessing')

    # I/O NODES
    inputnode = Node(interface=util.IdentityInterface(fields=[
        'subject_id', 'epi', 'mean_epi', 'par_moco', 'struct_2_epi_mat',
        'MNI_2_epi_warp', 'lat_ventricle_mask_MNI', 'wm_mask', 'csf_mask',
        'brain_mask_epiSpace', 'TR_ms', 'lp_cutoff_freq', 'hp_cutoff_freq'
    ]),
                     name='inputnode')

    outputnode = Node(interface=util.IdentityInterface(
        fields=['outlier_files', 'rs_preprocessed']),
                      name='outputnode')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]

    ######### TRANSFORM MASKS

    wm_mask_epiSpace = Node(fsl.ApplyXfm(apply_xfm=True,
                                         interp='nearestneighbour'),
                            name='wm_mask_epiSpace')
    wm_mask_epiSpace.inputs.out_file = 'wm_mask_epiSpace.nii.gz'
    denoise_wf.connect([(inputnode, wm_mask_epiSpace,
                         [('wm_mask', 'in_file'), ('mean_epi', 'reference'),
                          ('struct_2_epi_mat', 'in_matrix_file')])])

    denoise_wf.connect(wm_mask_epiSpace, 'out_file', ds,
                       'masks.wm_mask_epiSpace')

    csf_mask_epiSpace = Node(fsl.ApplyXfm(apply_xfm=True,
                                          interp='nearestneighbour'),
                             name='csf_mask_epiSpace')
    denoise_wf.connect([(inputnode, csf_mask_epiSpace,
                         [('csf_mask', 'in_file'), ('mean_epi', 'reference'),
                          ('struct_2_epi_mat', 'in_matrix_file')])])

    # MOVE LATERAL VENTRICLE MASK INTO EPI SPACE
    lat_ventricle_mask_epiSpace = Node(fsl.ApplyWarp(interp='nn'),
                                       name='lat_ventricle_mask_epiSpace')
    denoise_wf.connect(inputnode, 'lat_ventricle_mask_MNI',
                       lat_ventricle_mask_epiSpace, 'in_file')
    denoise_wf.connect(inputnode, 'mean_epi', lat_ventricle_mask_epiSpace,
                       'ref_file')
    denoise_wf.connect(inputnode, 'MNI_2_epi_warp',
                       lat_ventricle_mask_epiSpace, 'field_file')

    # CONFINE INDIVIDUAL CSF MASK TO LATERAL VENTRICLES
    csf_mask_lat_ventricles_epiSpace = Node(
        fsl.maths.BinaryMaths(operation='mul'),
        name='csf_mask_lat_ventricles_epiSpace')
    csf_mask_lat_ventricles_epiSpace.inputs.out_file = 'csf_mask_epiSpace.nii.gz'
    denoise_wf.connect(csf_mask_epiSpace, 'out_file',
                       csf_mask_lat_ventricles_epiSpace, 'in_file')
    denoise_wf.connect(lat_ventricle_mask_epiSpace, 'out_file',
                       csf_mask_lat_ventricles_epiSpace, 'operand_file')
    denoise_wf.connect(csf_mask_lat_ventricles_epiSpace, 'out_file', ds,
                       'masks.csf_mask_lat_ventr_epiSpace')

    # TR CONVERSION
    def get_TR_in_sec_fct(TR_ms):
        return TR_ms / 1000.0

    get_TR_in_sec = Node(util.Function(input_names=['TR_ms'],
                                       output_names=['TR_sec'],
                                       function=get_TR_in_sec_fct),
                         name='get_TR_in_sec')

    denoise_wf.connect(inputnode, 'TR_ms', get_TR_in_sec, 'TR_ms')

    # RUN ARTIFACT DETECTION
    artifact = Node(rapidart.ArtifactDetect(save_plot=True,
                                            use_norm=True,
                                            parameter_source='FSL',
                                            mask_type='file',
                                            norm_threshold=1,
                                            zintensity_threshold=3,
                                            use_differences=[True, False]),
                    name='artifact')
    artifact.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise_wf.connect(inputnode, 'epi', artifact, 'realigned_files')
    denoise_wf.connect([(inputnode, artifact, [('par_moco',
                                                'realignment_parameters')])])
    denoise_wf.connect(inputnode, 'brain_mask_epiSpace', artifact, 'mask_file')

    denoise_wf.connect([
        (artifact, ds, [('norm_files', 'denoise.artefact.@combined_motion'),
                        ('outlier_files', 'denoise.artefact.@outlier'),
                        ('intensity_files', 'denoise.artefact.@intensity'),
                        ('statistic_files', 'denoise.artefact.@outlierstats'),
                        ('plot_files', 'denoise.artefact.@outlierplots')])
    ])
    denoise_wf.connect(artifact, 'outlier_files', outputnode, 'outlier_files')

    ############################

    def combine_motion_parameters_with_outliers_fct(motion_params,
                                                    outliers_file, spike_reg):
        """Adapted from rom https://github.com/nipy/nipype/blob/master/examples/
        rsfmri_vol_surface_preprocessing_nipy.py#L261
        """

        import numpy as np
        import os
        if spike_reg:
            out_params = np.genfromtxt(motion_params)
            try:
                outlier_val = np.genfromtxt(outliers_file)
            except IOError:
                outlier_val = np.empty((0))
            for index in np.atleast_1d(outlier_val):
                outlier_vector = np.zeros((out_params.shape[0], 1))
                outlier_vector[index] = 1
                out_params = np.hstack((out_params, outlier_vector))

            out_file = os.path.join(os.getcwd(), "motion_outlier_regressor.txt"
                                    )  #"filter_regressor%02d.txt" % idx)
            np.savetxt(out_file, out_params, fmt="%.8f")
        else:
            out_file = motion_params

        return out_file

    ############################

    # COMPUTE MOTION DERIVATIVES (FRISTON24)
    friston24 = Node(util.Function(
        input_names=['in_file'],
        output_names=['friston_par'],
        function=cpac_generate_motion_statistics.calc_friston_twenty_four),
                     name='friston24')
    denoise_wf.connect(inputnode, 'par_moco', friston24, 'in_file')

    # CREATE OUTLIER DUMMY REGRESSOR AND COMBINE WITH FRISTON24
    motion_and_outlier_regressor = Node(util.Function(
        input_names=['motion_params', 'outliers_file', 'spike_reg'],
        output_names=['out_file'],
        function=combine_motion_parameters_with_outliers_fct),
                                        name='motion_and_outlier_regressor')
    motion_and_outlier_regressor.inputs.spike_reg = True
    denoise_wf.connect(friston24, 'friston_par', motion_and_outlier_regressor,
                       'motion_params')
    denoise_wf.connect(artifact, 'outlier_files', motion_and_outlier_regressor,
                       'outliers_file')

    # EXTRACT SIGNAL FROM WM AND CSF FOR COMPCOR
    wm_sig = Node(util.Function(input_names=['data_file', 'mask_file'],
                                output_names=['out_file'],
                                function=extract_signal_from_tissue),
                  name='wm_sig')

    denoise_wf.connect(inputnode, 'epi', wm_sig, 'data_file')
    denoise_wf.connect(wm_mask_epiSpace, 'out_file', wm_sig, 'mask_file')

    csf_sig = Node(util.Function(input_names=['data_file', 'mask_file'],
                                 output_names=['out_file'],
                                 function=extract_signal_from_tissue),
                   name='csf_sig')

    denoise_wf.connect(inputnode, 'epi', csf_sig, 'data_file')
    denoise_wf.connect(csf_mask_lat_ventricles_epiSpace, 'out_file', csf_sig,
                       'mask_file')

    nuisance_selector = {
        'compcor': True,
        'wm': False,
        'csf': False,
        'gm': False,
        'global': False,
        'pc1': False,
        'motion': True,
        'linear': True,
        'quadratic': True
    }

    nuisance_reg = Node(util.Function(
        input_names=[
            'subject', 'selector', 'wm_sig_file', 'csf_sig_file',
            'gm_sig_file', 'motion_file', 'compcor_ncomponents'
        ],
        output_names=['residual_file', 'regressors_file'],
        function=cpac_nuisance.calc_residuals),
                        name='nuisance_reg')

    nuisance_reg.inputs.compcor_ncomponents = 5
    nuisance_reg.inputs.selector = nuisance_selector
    denoise_wf.connect(inputnode, 'epi', nuisance_reg, 'subject')
    denoise_wf.connect(wm_sig, 'out_file', nuisance_reg, 'wm_sig_file')
    denoise_wf.connect(csf_sig, 'out_file', nuisance_reg, 'csf_sig_file')
    denoise_wf.connect(motion_and_outlier_regressor, 'out_file', nuisance_reg,
                       'motion_file')
    denoise_wf.connect(nuisance_reg, 'regressors_file', ds,
                       'denoise.regression.regressor')
    denoise_wf.connect(nuisance_reg, 'residual_file', ds, 'epis.01_denoised')

    ############################

    # BANDPASS FILTER
    # sigma calculation see
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4
    def calc_bp_sigma_fct(TR_sec, cutoff_freq):
        sigma = 1. / (2 * TR_sec * cutoff_freq)
        return sigma

    calc_lp_sigma = Node(util.Function(input_names=['TR_sec', 'cutoff_freq'],
                                       output_names=['sigma'],
                                       function=calc_bp_sigma_fct),
                         name='calc_lp_sigma')
    denoise_wf.connect(get_TR_in_sec, 'TR_sec', calc_lp_sigma, 'TR_sec')
    denoise_wf.connect(inputnode, 'lp_cutoff_freq', calc_lp_sigma,
                       'cutoff_freq')

    calc_hp_sigma = Node(util.Function(input_names=['TR_sec', 'cutoff_freq'],
                                       output_names=['sigma'],
                                       function=calc_bp_sigma_fct),
                         name='calc_hp_sigma')
    denoise_wf.connect(get_TR_in_sec, 'TR_sec', calc_hp_sigma, 'TR_sec')
    denoise_wf.connect(inputnode, 'hp_cutoff_freq', calc_hp_sigma,
                       'cutoff_freq')

    bp_filter = Node(fsl.TemporalFilter(), name='bp_filter')
    bp_filter.plugin_args = {'submit_specs': 'request_memory = 4000'}

    denoise_wf.connect(nuisance_reg, 'residual_file', bp_filter, 'in_file')
    denoise_wf.connect(calc_lp_sigma, 'sigma', bp_filter, 'lowpass_sigma')
    denoise_wf.connect(calc_hp_sigma, 'sigma', bp_filter, 'highpass_sigma')
    denoise_wf.connect(bp_filter, 'out_file', ds, 'epis.02_denoised_BP')

    # TIME-NORMALIZE SCAN
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    #fixme req mem needed?
    #normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise_wf.connect(bp_filter, 'out_file', normalize_time, 'in_file')
    denoise_wf.connect(get_TR_in_sec, 'TR_sec', normalize_time, 'tr')

    denoise_wf.connect(normalize_time, 'out_file', outputnode,
                       'rs_preprocessed')
    denoise_wf.connect(normalize_time, 'out_file', ds,
                       'epis.03_denoised_BP_tNorm')

    denoise_wf.write_graph(dotfilename=denoise_wf.name,
                           graph2use='flat',
                           format='pdf')

    return denoise_wf
Exemplo n.º 24
0
                   name="selectfiles")


# make filelist
def makelist(in1, in2, in3, in4):
    return [in1, in2, in3, in4]


make_list = Node(util.Function(input_names=['in1', 'in2', 'in3', 'in4'],
                               output_names=['file_list'],
                               function=makelist),
                 name='make_list')

# concatenate scans
concatenate = Node(fsl.Merge(dimension='t',
                             merged_file='rest_concatenated.nii.gz'),
                   name='concatenate')
concatenate.plugin_args = {'submit_specs': 'request_memory = 20000'}

# sink
sink = Node(nio.DataSink(base_directory=out_dir, parameterization=False),
            name='sink')

concat.connect([(selectfiles, make_list, [('rest1a', 'in1'), ('rest1b', 'in2'),
                                          ('rest2a', 'in3'),
                                          ('rest2b', 'in4')]),
                (make_list, concatenate, [('file_list', 'in_files')]),
                (concatenate, sink, [('merged_file', '@rest_concat')])])

concat.run()
#concat.run(plugin='CondorDAGMan')
Exemplo n.º 25
0
def create_nonlinear_pipeline(name='nonlinear'):

    # workflow
    nonlinear = Workflow(name='nonlinear')

    # inputnode
    inputnode = Node(
        util.IdentityInterface(fields=[
            't1_highres',
            'epi2highres_lin',
            'epi2highres_lin_itk',
            'fov_mask',
            'brain_mask',
            #'highres2lowres_itk'
        ]),
        name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        'epi2highres_warp',
        'epi2highres_invwarp',
        'epi2highres_nonlin',
    ]),
                      name='outputnode')

    #
    #     brainmask = Node(ants.ApplyTransforms(dimension=3,
    #                                           invert_transform_flags=[True],
    #                                           interpolation = 'NearestNeighbor'),
    #                      name='brainmask')
    #
    dil_brainmask = Node(fs.Binarize(min=0.5, out_type='nii.gz', dilate=15),
                         name='dil_brainmask')

    mask_epi = Node(fsl.ApplyMask(out_file='epi2highres_lin_masked.nii.gz'),
                    name='mask_epi')

    nonlinear.connect([  #(inputnode, brainmask, [('brain_mask', 'input_image'),
        #                        ('t1_highres', 'reference_image'),
        #                        ('highres2lowres_itk', 'transforms')]),
        #(brainmask, dil_brainmask, [('output_image', 'in_file')]),
        (inputnode, dil_brainmask, [('brain_mask', 'in_file')]),
        (dil_brainmask, mask_epi, [('binary_file', 'mask_file')]),
        (inputnode, mask_epi, [('epi2highres_lin', 'in_file')])
    ])

    # transform fov mask and apply to t1
    transform_fov = Node(
        ants.ApplyTransforms(
            dimension=3,
            #invert_transform_flags=[True, False],
            output_image='fov_mask_highres.nii.gz',
            interpolation='NearestNeighbor'),
        'transform_fov')

    dilate_fov = Node(fs.Binarize(min=0.5,
                                  dilate=5,
                                  binary_file='fov_mask_highres_dil.nii.gz'),
                      name='dilate_fov')

    mask_t1 = Node(fsl.ApplyMask(out_file='t1_fov_masked.nii.gz'),
                   name='mask_t1')

    nonlinear.connect([
        (inputnode, transform_fov, [('fov_mask', 'input_image'),
                                    ('t1_highres', 'reference_image'),
                                    ('epi2highres_lin_itk', 'transforms')]),
        (transform_fov, dilate_fov, [('output_image', 'in_file')]),
        (dilate_fov, mask_t1, [('binary_file', 'mask_file')]),
        (inputnode, mask_t1, [('t1_highres', 'in_file')]),
    ])

    # normalization with ants
    antsreg = Node(interface=ants.registration.Registration(
        dimension=3,
        metric=['CC'],
        metric_weight=[1.0],
        radius_or_number_of_bins=[4],
        sampling_strategy=['None'],
        transforms=['SyN'],
        args='-g 0.1x1x0.1',
        transform_parameters=[(0.10, 3, 0)],
        number_of_iterations=[[50, 20, 10]],
        convergence_threshold=[1e-06],
        convergence_window_size=[10],
        shrink_factors=[[4, 2, 1]],
        smoothing_sigmas=[[2, 1, 0]],
        sigma_units=['vox'],
        use_estimate_learning_rate_once=[True],
        use_histogram_matching=[True],
        collapse_output_transforms=True,
        output_inverse_warped_image=True,
        output_warped_image=True,
        interpolation='BSpline'),
                   name='antsreg')
    antsreg.plugin_args = {'submit_specs': 'request_memory = 20000'}

    nonlinear.connect([(mask_epi, antsreg, [('out_file', 'moving_image')]),
                       (mask_t1, antsreg, [('out_file', 'fixed_image')]),
                       (antsreg, outputnode,
                        [('reverse_transforms', 'epi2highres_invwarp'),
                         ('forward_transforms', 'epi2highres_warp'),
                         ('warped_image', 'epi2highres_nonlin')])])

    return nonlinear


# test_nonlinear=create_nonlinear_pipeline('nonlinear')
# test_nonlinear.base_dir='/scr/kansas1/huntenburg/7tresting/working/highres_bias_bspline_maskepi/'
# test_nonlinear.config['execution']['crashdump_dir'] = test_nonlinear.base_dir + "/crash_files"
# test_nonlinear.config['execution']['remove_unnecessary_outputs'] = False
# test_nonlinear.inputs.inputnode.anat='/scr/kansas1/huntenburg/7tresting/sub021/preprocessed/coregister/t1_resampled.nii.gz'
# test_nonlinear.inputs.inputnode.epi= '/scr/kansas1/huntenburg/7tresting/sub021/preprocessed/coregister/rest_coregistered_mean.nii.gz'
# #test_nonlinear.inputs.inputnode.anat='/scr/kansas1/huntenburg/7tresting/sub021/highres/t1.nii.gz'
# #test_nonlinear.inputs.inputnode.epi= '/scr/kansas1/huntenburg/7tresting/sub021/coreg_testing/flirt_epi2highres_t1_cr_bbr_onestep.nii.gz'
# test_nonlinear.run()#plugin='CondorDAGMan')
Exemplo n.º 26
0
def Lesion_extractor(
    name='Lesion_Extractor',
    wf_name='Test',
    base_dir='/homes_unix/alaurent/',
    input_dir=None,
    subjects=None,
    main=None,
    acc=None,
    atlas='/homes_unix/alaurent/cbstools-public-master/atlases/brain-segmentation-prior3.0/brain-atlas-quant-3.0.8.txt'
):

    wf = Workflow(wf_name)
    wf.base_dir = base_dir

    #file = open(subjects,"r")
    #subjects = file.read().split("\n")
    #file.close()

    # Subject List
    subjectList = Node(IdentityInterface(fields=['subject_id'],
                                         mandatory_inputs=True),
                       name="subList")
    subjectList.iterables = ('subject_id', [
        sub for sub in subjects if sub != '' and sub != '\n'
    ])

    # T1w and FLAIR
    scanList = Node(DataGrabber(infields=['subject_id'],
                                outfields=['T1', 'FLAIR']),
                    name="scanList")
    scanList.inputs.base_directory = input_dir
    scanList.inputs.ignore_exception = False
    scanList.inputs.raise_on_empty = True
    scanList.inputs.sort_filelist = True
    #scanList.inputs.template = '%s/%s.nii'
    #scanList.inputs.template_args = {'T1': [['subject_id','T1*']],
    #                                 'FLAIR': [['subject_id','FLAIR*']]}
    scanList.inputs.template = '%s/anat/%s'
    scanList.inputs.template_args = {
        'T1': [['subject_id', '*_T1w.nii.gz']],
        'FLAIR': [['subject_id', '*_FLAIR.nii.gz']]
    }
    wf.connect(subjectList, "subject_id", scanList, "subject_id")

    #     # T1w and FLAIR
    #     dg = Node(DataGrabber(outfields=['T1', 'FLAIR']), name="T1wFLAIR")
    #     dg.inputs.base_directory = "/homes_unix/alaurent/LesionPipeline"
    #     dg.inputs.template = "%s/NIFTI/*.nii.gz"
    #     dg.inputs.template_args['T1']=[['7']]
    #     dg.inputs.template_args['FLAIR']=[['9']]
    #     dg.inputs.sort_filelist=True

    # Reorient Volume
    T1Conv = Node(Reorient2Std(), name="ReorientVolume")
    T1Conv.inputs.ignore_exception = False
    T1Conv.inputs.terminal_output = 'none'
    T1Conv.inputs.out_file = "T1_reoriented.nii.gz"
    wf.connect(scanList, "T1", T1Conv, "in_file")

    # Reorient Volume (2)
    T2flairConv = Node(Reorient2Std(), name="ReorientVolume2")
    T2flairConv.inputs.ignore_exception = False
    T2flairConv.inputs.terminal_output = 'none'
    T2flairConv.inputs.out_file = "FLAIR_reoriented.nii.gz"
    wf.connect(scanList, "FLAIR", T2flairConv, "in_file")

    # N3 Correction
    T1NUC = Node(N4BiasFieldCorrection(), name="N3Correction")
    T1NUC.inputs.dimension = 3
    T1NUC.inputs.environ = {'NSLOTS': '1'}
    T1NUC.inputs.ignore_exception = False
    T1NUC.inputs.num_threads = 1
    T1NUC.inputs.save_bias = False
    T1NUC.inputs.terminal_output = 'none'
    wf.connect(T1Conv, "out_file", T1NUC, "input_image")

    # N3 Correction (2)
    T2flairNUC = Node(N4BiasFieldCorrection(), name="N3Correction2")
    T2flairNUC.inputs.dimension = 3
    T2flairNUC.inputs.environ = {'NSLOTS': '1'}
    T2flairNUC.inputs.ignore_exception = False
    T2flairNUC.inputs.num_threads = 1
    T2flairNUC.inputs.save_bias = False
    T2flairNUC.inputs.terminal_output = 'none'
    wf.connect(T2flairConv, "out_file", T2flairNUC, "input_image")
    '''
    #####################
    ### PRE-NORMALIZE ###
    #####################
    To make sure there's no outlier values (negative, or really high) to offset the initialization steps
    '''

    # Intensity Range Normalization
    getMaxT1NUC = Node(ImageStats(op_string='-r'), name="getMaxT1NUC")
    wf.connect(T1NUC, 'output_image', getMaxT1NUC, 'in_file')

    T1NUCirn = Node(AbcImageMaths(), name="IntensityNormalization")
    T1NUCirn.inputs.op_string = "-div"
    T1NUCirn.inputs.out_file = "normT1.nii.gz"
    wf.connect(T1NUC, 'output_image', T1NUCirn, 'in_file')
    wf.connect(getMaxT1NUC, ('out_stat', getElementFromList, 1), T1NUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2NUC = Node(ImageStats(op_string='-r'), name="getMaxT2")
    wf.connect(T2flairNUC, 'output_image', getMaxT2NUC, 'in_file')

    T2NUCirn = Node(AbcImageMaths(), name="IntensityNormalization2")
    T2NUCirn.inputs.op_string = "-div"
    T2NUCirn.inputs.out_file = "normT2.nii.gz"
    wf.connect(T2flairNUC, 'output_image', T2NUCirn, 'in_file')
    wf.connect(getMaxT2NUC, ('out_stat', getElementFromList, 1), T2NUCirn,
               "op_value")
    '''
    ########################
    #### COREGISTRATION ####
    ########################
    '''

    # Optimized Automated Registration
    T2flairCoreg = Node(FLIRT(), name="OptimizedAutomatedRegistration")
    T2flairCoreg.inputs.output_type = 'NIFTI_GZ'
    wf.connect(T2NUCirn, "out_file", T2flairCoreg, "in_file")
    wf.connect(T1NUCirn, "out_file", T2flairCoreg, "reference")
    '''    
    #########################
    #### SKULL-STRIPPING ####
    #########################
    '''

    # SPECTRE
    T1ss = Node(BET(), name="SPECTRE")
    T1ss.inputs.frac = 0.45  #0.4
    T1ss.inputs.mask = True
    T1ss.inputs.outline = True
    T1ss.inputs.robust = True
    wf.connect(T1NUCirn, "out_file", T1ss, "in_file")

    # Image Calculator
    T2ss = Node(ApplyMask(), name="ImageCalculator")
    wf.connect(T1ss, "mask_file", T2ss, "mask_file")
    wf.connect(T2flairCoreg, "out_file", T2ss, "in_file")
    '''
    ####################################
    #### 2nd LAYER OF N3 CORRECTION ####
    ####################################
    This time without the skull: there were some significant amounts of inhomogeneities leftover.
    '''

    # N3 Correction (3)
    T1ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction3")
    T1ssNUC.inputs.dimension = 3
    T1ssNUC.inputs.environ = {'NSLOTS': '1'}
    T1ssNUC.inputs.ignore_exception = False
    T1ssNUC.inputs.num_threads = 1
    T1ssNUC.inputs.save_bias = False
    T1ssNUC.inputs.terminal_output = 'none'
    wf.connect(T1ss, "out_file", T1ssNUC, "input_image")

    # N3 Correction (4)
    T2ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction4")
    T2ssNUC.inputs.dimension = 3
    T2ssNUC.inputs.environ = {'NSLOTS': '1'}
    T2ssNUC.inputs.ignore_exception = False
    T2ssNUC.inputs.num_threads = 1
    T2ssNUC.inputs.save_bias = False
    T2ssNUC.inputs.terminal_output = 'none'
    wf.connect(T2ss, "out_file", T2ssNUC, "input_image")
    '''
    ####################################
    ####    NORMALIZE FOR MGDM      ####
    ####################################
    This normalization is a bit aggressive: only useful to have a 
    cropped dynamic range into MGDM, but possibly harmful to further 
    processing, so the unprocessed images are passed to the subsequent steps.
    '''

    # Intensity Range Normalization
    getMaxT1ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT1ssNUC")
    wf.connect(T1ssNUC, 'output_image', getMaxT1ssNUC, 'in_file')

    T1ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization3")
    T1ssNUCirn.inputs.op_string = "-div"
    T1ssNUCirn.inputs.out_file = "normT1ss.nii.gz"
    wf.connect(T1ssNUC, 'output_image', T1ssNUCirn, 'in_file')
    wf.connect(getMaxT1ssNUC, ('out_stat', getElementFromList, 1), T1ssNUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT2ssNUC")
    wf.connect(T2ssNUC, 'output_image', getMaxT2ssNUC, 'in_file')

    T2ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization4")
    T2ssNUCirn.inputs.op_string = "-div"
    T2ssNUCirn.inputs.out_file = "normT2ss.nii.gz"
    wf.connect(T2ssNUC, 'output_image', T2ssNUCirn, 'in_file')
    wf.connect(getMaxT2ssNUC, ('out_stat', getElementFromList, 1), T2ssNUCirn,
               "op_value")
    '''
    ####################################
    ####      ESTIMATE CSF PV       ####
    ####################################
    Here we try to get a better handle on CSF voxels to help the segmentation step
    '''

    # Recursive Ridge Diffusion
    CSF_pv = Node(RecursiveRidgeDiffusion(), name='estimate_CSF_pv')
    CSF_pv.plugin_args = {'sbatch_args': '--mem 6000'}
    CSF_pv.inputs.ridge_intensities = "dark"
    CSF_pv.inputs.ridge_filter = "2D"
    CSF_pv.inputs.orientation = "undefined"
    CSF_pv.inputs.ang_factor = 1.0
    CSF_pv.inputs.min_scale = 0
    CSF_pv.inputs.max_scale = 3
    CSF_pv.inputs.propagation_model = "diffusion"
    CSF_pv.inputs.diffusion_factor = 0.5
    CSF_pv.inputs.similarity_scale = 0.1
    CSF_pv.inputs.neighborhood_size = 4
    CSF_pv.inputs.max_iter = 100
    CSF_pv.inputs.max_diff = 0.001
    CSF_pv.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, CSF_pv.name),
        CSF_pv, 'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', CSF_pv, 'input_image')
    '''
    ####################################
    ####            MGDM            ####
    ####################################
    '''

    # Multi-contrast Brain Segmentation
    MGDM = Node(MGDMSegmentation(), name='MGDM')
    MGDM.plugin_args = {'sbatch_args': '--mem 7000'}
    MGDM.inputs.contrast_type1 = "Mprage3T"
    MGDM.inputs.contrast_type2 = "FLAIR3T"
    MGDM.inputs.contrast_type3 = "PVDURA"
    MGDM.inputs.save_data = True
    MGDM.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, MGDM.name), MGDM,
        'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', MGDM, 'contrast_image1')
    wf.connect(T2ssNUCirn, 'out_file', MGDM, 'contrast_image2')
    wf.connect(CSF_pv, 'ridge_pv', MGDM, 'contrast_image3')

    # Enhance Region Contrast
    ERC = Node(EnhanceRegionContrast(), name='ERC')
    ERC.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC.inputs.enhanced_region = "crwm"
    ERC.inputs.contrast_background = "crgm"
    ERC.inputs.partial_voluming_distance = 2.0
    ERC.inputs.save_data = True
    ERC.inputs.atlas_file = atlas
    wf.connect(subjectList,
               ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC.name),
               ERC, 'output_dir')
    wf.connect(T1ssNUC, 'output_image', ERC, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC, 'levelset_boundary_image')

    # Enhance Region Contrast (2)
    ERC2 = Node(EnhanceRegionContrast(), name='ERC2')
    ERC2.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC2.inputs.enhanced_region = "crwm"
    ERC2.inputs.contrast_background = "crgm"
    ERC2.inputs.partial_voluming_distance = 2.0
    ERC2.inputs.save_data = True
    ERC2.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC2.name), ERC2,
        'output_dir')
    wf.connect(T2ssNUC, 'output_image', ERC2, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC2, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC2, 'levelset_boundary_image')

    # Define Multi-Region Priors
    DMRP = Node(DefineMultiRegionPriors(), name='DefineMultRegPriors')
    DMRP.plugin_args = {'sbatch_args': '--mem 6000'}
    #DMRP.inputs.defined_region = "ventricle-horns"
    #DMRP.inputs.definition_method = "closest-distance"
    DMRP.inputs.distance_offset = 3.0
    DMRP.inputs.save_data = True
    DMRP.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, DMRP.name), DMRP,
        'output_dir')
    wf.connect(MGDM, 'segmentation', DMRP, 'segmentation_image')
    wf.connect(MGDM, 'distance', DMRP, 'levelset_boundary_image')
    '''
    ###############################################
    ####      REMOVE VENTRICLE POSTERIOR       ####
    ###############################################
    Due to topology constraints, the ventricles are often not fully segmented:
    here add back all ventricle voxels from the posterior probability (without the topology constraints)
    '''

    # Posterior label
    PostLabel = Node(Split(), name='PosteriorLabel')
    PostLabel.inputs.dimension = "t"
    wf.connect(MGDM, 'labels', PostLabel, 'in_file')

    # Posterior proba
    PostProba = Node(Split(), name='PosteriorProba')
    PostProba.inputs.dimension = "t"
    wf.connect(MGDM, 'memberships', PostProba, 'in_file')

    # Threshold binary mask : ventricle label part 1
    VentLabel1 = Node(Threshold(), name="VentricleLabel1")
    VentLabel1.inputs.thresh = 10.5
    VentLabel1.inputs.direction = "below"
    wf.connect(PostLabel, ("out_files", getFirstElement), VentLabel1,
               "in_file")

    # Threshold binary mask : ventricle label part 2
    VentLabel2 = Node(Threshold(), name="VentricleLabel2")
    VentLabel2.inputs.thresh = 13.5
    VentLabel2.inputs.direction = "above"
    wf.connect(VentLabel1, "out_file", VentLabel2, "in_file")

    # Image calculator : ventricle proba
    VentProba = Node(ImageMaths(), name="VentricleProba")
    VentProba.inputs.op_string = "-mul"
    VentProba.inputs.out_file = "ventproba.nii.gz"
    wf.connect(PostProba, ("out_files", getFirstElement), VentProba, "in_file")
    wf.connect(VentLabel2, "out_file", VentProba, "in_file2")

    # Image calculator : remove inter ventricles
    RmInterVent = Node(ImageMaths(), name="RemoveInterVent")
    RmInterVent.inputs.op_string = "-sub"
    RmInterVent.inputs.out_file = "rmintervent.nii.gz"
    wf.connect(ERC, "region_pv", RmInterVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInterVent, "in_file2")

    # Image calculator : add horns
    AddHorns = Node(ImageMaths(), name="AddHorns")
    AddHorns.inputs.op_string = "-add"
    AddHorns.inputs.out_file = "rmvent.nii.gz"
    wf.connect(RmInterVent, "out_file", AddHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddHorns, "in_file2")

    # Image calculator : remove ventricles
    RmVent = Node(ImageMaths(), name="RemoveVentricles")
    RmVent.inputs.op_string = "-sub"
    RmVent.inputs.out_file = "rmvent.nii.gz"
    wf.connect(AddHorns, "out_file", RmVent, "in_file")
    wf.connect(VentProba, "out_file", RmVent, "in_file2")

    # Image calculator : remove internal capsule
    RmIC = Node(ImageMaths(), name="RemoveInternalCap")
    RmIC.inputs.op_string = "-sub"
    RmIC.inputs.out_file = "rmic.nii.gz"
    wf.connect(RmVent, "out_file", RmIC, "in_file")
    wf.connect(DMRP, "internal_capsule_pv", RmIC, "in_file2")

    # Intensity Range Normalization (3)
    getMaxRmIC = Node(ImageStats(op_string='-r'), name="getMaxRmIC")
    wf.connect(RmIC, 'out_file', getMaxRmIC, 'in_file')

    RmICirn = Node(AbcImageMaths(), name="IntensityNormalization5")
    RmICirn.inputs.op_string = "-div"
    RmICirn.inputs.out_file = "normRmIC.nii.gz"
    wf.connect(RmIC, 'out_file', RmICirn, 'in_file')
    wf.connect(getMaxRmIC, ('out_stat', getElementFromList, 1), RmICirn,
               "op_value")

    # Probability To Levelset : WM orientation
    WM_Orient = Node(ProbabilityToLevelset(), name='WM_Orientation')
    WM_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_Orient.name),
        WM_Orient, 'output_dir')
    wf.connect(RmICirn, 'out_file', WM_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in WM only
    WM_pvs = Node(RecursiveRidgeDiffusion(), name='PVS_in_WM')
    WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_pvs.inputs.ridge_intensities = "bright"
    WM_pvs.inputs.ridge_filter = "1D"
    WM_pvs.inputs.orientation = "orthogonal"
    WM_pvs.inputs.ang_factor = 1.0
    WM_pvs.inputs.min_scale = 0
    WM_pvs.inputs.max_scale = 3
    WM_pvs.inputs.propagation_model = "diffusion"
    WM_pvs.inputs.diffusion_factor = 1.0
    WM_pvs.inputs.similarity_scale = 1.0
    WM_pvs.inputs.neighborhood_size = 2
    WM_pvs.inputs.max_iter = 100
    WM_pvs.inputs.max_diff = 0.001
    WM_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_pvs.name),
        WM_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', WM_pvs, 'input_image')
    wf.connect(WM_Orient, 'levelset', WM_pvs, 'surface_levelset')
    wf.connect(RmICirn, 'out_file', WM_pvs, 'loc_prior')

    # Extract Lesions : extract WM PVS
    extract_WM_pvs = Node(LesionExtraction(), name='ExtractPVSfromWM')
    extract_WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WM_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WM_pvs.inputs.csf_boundary_partial_vol_dist = 3.0
    extract_WM_pvs.inputs.lesion_clust_dist = 1.0
    extract_WM_pvs.inputs.prob_min_thresh = 0.1
    extract_WM_pvs.inputs.prob_max_thresh = 0.33
    extract_WM_pvs.inputs.small_lesion_size = 4.0
    extract_WM_pvs.inputs.save_data = True
    extract_WM_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WM_pvs.name), extract_WM_pvs,
               'output_dir')
    wf.connect(WM_pvs, 'propagation', extract_WM_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WM_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WM_pvs, 'levelset_boundary_image')
    wf.connect(RmICirn, 'out_file', extract_WM_pvs, 'location_prior_image')
    '''
    2nd branch
    '''

    # Image calculator : internal capsule witout ventricules
    ICwoVent = Node(ImageMaths(), name="ICWithoutVentricules")
    ICwoVent.inputs.op_string = "-sub"
    ICwoVent.inputs.out_file = "icwovent.nii.gz"
    wf.connect(DMRP, "internal_capsule_pv", ICwoVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", ICwoVent, "in_file2")

    # Image calculator : remove ventricles IC
    RmVentIC = Node(ImageMaths(), name="RmVentIC")
    RmVentIC.inputs.op_string = "-sub"
    RmVentIC.inputs.out_file = "RmVentIC.nii.gz"
    wf.connect(ICwoVent, "out_file", RmVentIC, "in_file")
    wf.connect(VentProba, "out_file", RmVentIC, "in_file2")

    # Intensity Range Normalization (4)
    getMaxRmVentIC = Node(ImageStats(op_string='-r'), name="getMaxRmVentIC")
    wf.connect(RmVentIC, 'out_file', getMaxRmVentIC, 'in_file')

    RmVentICirn = Node(AbcImageMaths(), name="IntensityNormalization6")
    RmVentICirn.inputs.op_string = "-div"
    RmVentICirn.inputs.out_file = "normRmVentIC.nii.gz"
    wf.connect(RmVentIC, 'out_file', RmVentICirn, 'in_file')
    wf.connect(getMaxRmVentIC, ('out_stat', getElementFromList, 1),
               RmVentICirn, "op_value")

    # Probability To Levelset : IC orientation
    IC_Orient = Node(ProbabilityToLevelset(), name='IC_Orientation')
    IC_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_Orient.name),
        IC_Orient, 'output_dir')
    wf.connect(RmVentICirn, 'out_file', IC_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in IC only
    IC_pvs = Node(RecursiveRidgeDiffusion(), name='RecursiveRidgeDiffusion2')
    IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_pvs.inputs.ridge_intensities = "bright"
    IC_pvs.inputs.ridge_filter = "1D"
    IC_pvs.inputs.orientation = "undefined"
    IC_pvs.inputs.ang_factor = 1.0
    IC_pvs.inputs.min_scale = 0
    IC_pvs.inputs.max_scale = 3
    IC_pvs.inputs.propagation_model = "diffusion"
    IC_pvs.inputs.diffusion_factor = 1.0
    IC_pvs.inputs.similarity_scale = 1.0
    IC_pvs.inputs.neighborhood_size = 2
    IC_pvs.inputs.max_iter = 100
    IC_pvs.inputs.max_diff = 0.001
    IC_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_pvs.name),
        IC_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', IC_pvs, 'input_image')
    wf.connect(IC_Orient, 'levelset', IC_pvs, 'surface_levelset')
    wf.connect(RmVentICirn, 'out_file', IC_pvs, 'loc_prior')

    # Extract Lesions : extract IC PVS
    extract_IC_pvs = Node(LesionExtraction(), name='ExtractPVSfromIC')
    extract_IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_IC_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_IC_pvs.inputs.csf_boundary_partial_vol_dist = 4.0
    extract_IC_pvs.inputs.lesion_clust_dist = 1.0
    extract_IC_pvs.inputs.prob_min_thresh = 0.25
    extract_IC_pvs.inputs.prob_max_thresh = 0.5
    extract_IC_pvs.inputs.small_lesion_size = 4.0
    extract_IC_pvs.inputs.save_data = True
    extract_IC_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_IC_pvs.name), extract_IC_pvs,
               'output_dir')
    wf.connect(IC_pvs, 'propagation', extract_IC_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_IC_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_IC_pvs, 'levelset_boundary_image')
    wf.connect(RmVentICirn, 'out_file', extract_IC_pvs, 'location_prior_image')
    '''
    3rd branch
    '''

    # Image calculator :
    RmInter = Node(ImageMaths(), name="RemoveInterVentricules")
    RmInter.inputs.op_string = "-sub"
    RmInter.inputs.out_file = "rminter.nii.gz"
    wf.connect(ERC2, 'region_pv', RmInter, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInter, "in_file2")

    # Image calculator :
    AddVentHorns = Node(ImageMaths(), name="AddVentHorns")
    AddVentHorns.inputs.op_string = "-add"
    AddVentHorns.inputs.out_file = "rminter.nii.gz"
    wf.connect(RmInter, 'out_file', AddVentHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddVentHorns, "in_file2")

    # Intensity Range Normalization (5)
    getMaxAddVentHorns = Node(ImageStats(op_string='-r'),
                              name="getMaxAddVentHorns")
    wf.connect(AddVentHorns, 'out_file', getMaxAddVentHorns, 'in_file')

    AddVentHornsirn = Node(AbcImageMaths(), name="IntensityNormalization7")
    AddVentHornsirn.inputs.op_string = "-div"
    AddVentHornsirn.inputs.out_file = "normAddVentHorns.nii.gz"
    wf.connect(AddVentHorns, 'out_file', AddVentHornsirn, 'in_file')
    wf.connect(getMaxAddVentHorns, ('out_stat', getElementFromList, 1),
               AddVentHornsirn, "op_value")

    # Extract Lesions : extract White Matter Hyperintensities
    extract_WMH = Node(LesionExtraction(), name='Extract_WMH')
    extract_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_WMH.inputs.lesion_clust_dist = 1.0
    extract_WMH.inputs.prob_min_thresh = 0.84
    extract_WMH.inputs.prob_max_thresh = 0.84
    extract_WMH.inputs.small_lesion_size = 4.0
    extract_WMH.inputs.save_data = True
    extract_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WMH.name), extract_WMH,
               'output_dir')
    wf.connect(ERC2, 'background_proba', extract_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_WMH2 = extract_WMH.clone(name='Extract_WMH2')
    # extract_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH2.name),extract_WMH2,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH2,'location_prior_image')
    #
    # extract_WMH3 = extract_WMH.clone(name='Extract_WMH3')
    # extract_WMH3.inputs.gm_boundary_partial_vol_dist = 3.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH3.name),extract_WMH3,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     FINDING SMALL WMHs     ####
    ####################################
    Small round WMHs near the cortex are often missed by the main algorithm, 
    so we're adding this one that takes care of them.
    '''

    # Recursive Ridge Diffusion : round WMH detection
    round_WMH = Node(RecursiveRidgeDiffusion(), name='round_WMH')
    round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    round_WMH.inputs.ridge_intensities = "bright"
    round_WMH.inputs.ridge_filter = "0D"
    round_WMH.inputs.orientation = "undefined"
    round_WMH.inputs.ang_factor = 1.0
    round_WMH.inputs.min_scale = 1
    round_WMH.inputs.max_scale = 4
    round_WMH.inputs.propagation_model = "none"
    round_WMH.inputs.diffusion_factor = 1.0
    round_WMH.inputs.similarity_scale = 0.1
    round_WMH.inputs.neighborhood_size = 4
    round_WMH.inputs.max_iter = 100
    round_WMH.inputs.max_diff = 0.001
    round_WMH.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, round_WMH.name),
        round_WMH, 'output_dir')
    wf.connect(ERC2, 'background_proba', round_WMH, 'input_image')
    wf.connect(AddVentHornsirn, 'out_file', round_WMH, 'loc_prior')

    # Extract Lesions : extract round WMH
    extract_round_WMH = Node(LesionExtraction(), name='Extract_round_WMH')
    extract_round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_round_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_round_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_round_WMH.inputs.lesion_clust_dist = 1.0
    extract_round_WMH.inputs.prob_min_thresh = 0.33
    extract_round_WMH.inputs.prob_max_thresh = 0.33
    extract_round_WMH.inputs.small_lesion_size = 6.0
    extract_round_WMH.inputs.save_data = True
    extract_round_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_round_WMH.name),
               extract_round_WMH, 'output_dir')
    wf.connect(round_WMH, 'ridge_pv', extract_round_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_round_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_round_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_round_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_round_WMH2 = extract_round_WMH.clone(name='Extract_round_WMH2')
    # extract_round_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH2.name),extract_round_WMH2,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH2,'location_prior_image')
    #
    # extract_round_WMH3 = extract_round_WMH.clone(name='Extract_round_WMH3')
    # extract_round_WMH3.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH3.name),extract_round_WMH3,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     COMBINE BOTH TYPES     ####
    ####################################
    Small round WMHs and regular WMH together before thresholding
    +
    PVS from white matter and internal capsule
    '''

    # Image calculator : WM + IC DVRS
    DVRS = Node(ImageMaths(), name="DVRS")
    DVRS.inputs.op_string = "-max"
    DVRS.inputs.out_file = "DVRS_map.nii.gz"
    wf.connect(extract_WM_pvs, 'lesion_score', DVRS, "in_file")
    wf.connect(extract_IC_pvs, "lesion_score", DVRS, "in_file2")

    # Image calculator : WMH + round
    WMH = Node(ImageMaths(), name="WMH")
    WMH.inputs.op_string = "-max"
    WMH.inputs.out_file = "WMH_map.nii.gz"
    wf.connect(extract_WMH, 'lesion_score', WMH, "in_file")
    wf.connect(extract_round_WMH, "lesion_score", WMH, "in_file2")

    #===========================================================================
    # WMH2 = Node(ImageMaths(), name="WMH2")
    # WMH2.inputs.op_string = "-max"
    # WMH2.inputs.out_file = "WMH2_map.nii.gz"
    # wf.connect(extract_WMH2,'lesion_score',WMH2,"in_file")
    # wf.connect(extract_round_WMH2,"lesion_score", WMH2, "in_file2")
    #
    # WMH3 = Node(ImageMaths(), name="WMH3")
    # WMH3.inputs.op_string = "-max"
    # WMH3.inputs.out_file = "WMH3_map.nii.gz"
    # wf.connect(extract_WMH3,'lesion_score',WMH3,"in_file")
    # wf.connect(extract_round_WMH3,"lesion_score", WMH3, "in_file2")
    #===========================================================================

    # Image calculator : multiply by boundnary partial volume
    WMH_mul = Node(ImageMaths(), name="WMH_mul")
    WMH_mul.inputs.op_string = "-mul"
    WMH_mul.inputs.out_file = "final_mask.nii.gz"
    wf.connect(WMH, "out_file", WMH_mul, "in_file")
    wf.connect(MGDM, "distance", WMH_mul, "in_file2")

    #===========================================================================
    # WMH2_mul = Node(ImageMaths(), name="WMH2_mul")
    # WMH2_mul.inputs.op_string = "-mul"
    # WMH2_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH2,"out_file", WMH2_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH2_mul, "in_file2")
    #
    # WMH3_mul = Node(ImageMaths(), name="WMH3_mul")
    # WMH3_mul.inputs.op_string = "-mul"
    # WMH3_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH3,"out_file", WMH3_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH3_mul, "in_file2")
    #===========================================================================
    '''
    ##########################################
    ####      SEGMENTATION THRESHOLD      ####
    ##########################################
    A threshold of 0.5 is very conservative, because the final lesion score is the product of two probabilities.
    This needs to be optimized to a value between 0.25 and 0.5 to balance false negatives 
    (dominant at 0.5) and false positives (dominant at low values).
    '''

    # Threshold binary mask :
    DVRS_mask = Node(Threshold(), name="DVRS_mask")
    DVRS_mask.inputs.thresh = 0.25
    DVRS_mask.inputs.direction = "below"
    wf.connect(DVRS, "out_file", DVRS_mask, "in_file")

    # Threshold binary mask : 025
    WMH1_025 = Node(Threshold(), name="WMH1_025")
    WMH1_025.inputs.thresh = 0.25
    WMH1_025.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_025, "in_file")

    #===========================================================================
    # WMH2_025 = Node(Threshold(), name="WMH2_025")
    # WMH2_025.inputs.thresh = 0.25
    # WMH2_025.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_025, "in_file")
    #
    # WMH3_025 = Node(Threshold(), name="WMH3_025")
    # WMH3_025.inputs.thresh = 0.25
    # WMH3_025.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_025, "in_file")
    #===========================================================================

    # Threshold binary mask : 050
    WMH1_050 = Node(Threshold(), name="WMH1_050")
    WMH1_050.inputs.thresh = 0.50
    WMH1_050.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_050, "in_file")

    #===========================================================================
    # WMH2_050 = Node(Threshold(), name="WMH2_050")
    # WMH2_050.inputs.thresh = 0.50
    # WMH2_050.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_050, "in_file")
    #
    # WMH3_050 = Node(Threshold(), name="WMH3_050")
    # WMH3_050.inputs.thresh = 0.50
    # WMH3_050.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_050, "in_file")
    #===========================================================================

    # Threshold binary mask : 075
    WMH1_075 = Node(Threshold(), name="WMH1_075")
    WMH1_075.inputs.thresh = 0.75
    WMH1_075.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_075, "in_file")

    #===========================================================================
    # WMH2_075 = Node(Threshold(), name="WMH2_075")
    # WMH2_075.inputs.thresh = 0.75
    # WMH2_075.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_075, "in_file")
    #
    # WMH3_075 = Node(Threshold(), name="WMH3_075")
    # WMH3_075.inputs.thresh = 0.75
    # WMH3_075.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_075, "in_file")
    #===========================================================================

    ## Outputs

    DVRS_Output = Node(IdentityInterface(fields=[
        'mask', 'region', 'lesion_size', 'lesion_proba', 'boundary', 'label',
        'score'
    ]),
                       name='DVRS_Output')
    wf.connect(DVRS_mask, 'out_file', DVRS_Output, 'mask')

    WMH_output = Node(IdentityInterface(fields=[
        'mask1025', 'mask1050', 'mask1075', 'mask2025', 'mask2050', 'mask2075',
        'mask3025', 'mask3050', 'mask3075'
    ]),
                      name='WMH_output')
    wf.connect(WMH1_025, 'out_file', WMH_output, 'mask1025')
    #wf.connect(WMH2_025,'out_file',WMH_output,'mask2025')
    #wf.connect(WMH3_025,'out_file',WMH_output,'mask3025')
    wf.connect(WMH1_050, 'out_file', WMH_output, 'mask1050')
    #wf.connect(WMH2_050,'out_file',WMH_output,'mask2050')
    #wf.connect(WMH3_050,'out_file',WMH_output,'mask3050')
    wf.connect(WMH1_075, 'out_file', WMH_output, 'mask1075')
    #wf.connect(WMH2_075,'out_file',WMH_output,'mask2070')
    #wf.connect(WMH3_075,'out_file',WMH_output,'mask3075')

    return wf
        
        report = Node(Function(input_names=['subject_id', 
                                             'tsnr_file', 
                                             'realignment_parameters_file', 
                                             'mean_epi_file', 
                                             'mask_file', 
                                             'reg_file', 
                                             'fssubjects_dir', 
                                             'similarity_distribution', 
                                             'mean_FD_distribution', 
                                             'tsnr_distributions', 
                                             'output_file'], 
                                output_names=['out'],
                                function = create_report), name="report_%s"%(subject_id).replace(".", "_"))
        report.inputs.subject_id = subject_id
        report.inputs.tsnr_file = tsnr_file
        report.inputs.realignment_parameters_file = realignment_parameters_file
        report.inputs.mean_epi_file = mean_epi_file
        report.inputs.mask_file = mask_file
        report.inputs.reg_file = reg_file
        report.inputs.fssubjects_dir = fssubjects_dir
        report.inputs.similarity_distribution = similarity_distribution
        report.inputs.mean_FD_distribution = mean_FD_distribution
        report.inputs.tsnr_distributions = tsnr_distributions
        report.inputs.output_file = output_file
        report.plugin_args={'override_specs': 'request_memory = 4000'}
        wf.add_nodes([report])
              
    wf.run(plugin="CondorDAGMan")
         
Exemplo n.º 28
0
# # artefact detection
# ad = Node(ra.ArtifactDetect(save_plot=False,
#                             norm_threshold=1,
#                             zintensity_threshold=3,
#                             mask_type='spm_global',
#                             use_differences = [True, False],
#                             parameter_source='FSL'),
#           name='artefactdetect')
#
# #wf.connect(getmask, 'outputspec.mask',ad, 'mask_file') mask_type='file'


###################################################################################################################################
# tsnr (input is timeseries from inputnode)
tsnr = Node(TSNR(regress_poly=2), name="tsnr")
tsnr.plugin_args = {"initial_specs": "request_memory = 30000"}


###################################################################################################################################
# create noise mask file
getthresh = Node(interface=fsl.ImageStats(op_string="-p 98"), name="getthreshold")
getthresh.plugin_args = {"initial_specs": "request_memory = 30000"}

threshold_stddev = Node(fsl.Threshold(), name="threshold")
threshold_stddev.plugin_args = {"initial_specs": "request_memory = 30000"}

preproc.connect(tsnr, "stddev_file", threshold_stddev, "in_file")
preproc.connect(tsnr, "stddev_file", getthresh, "in_file")
preproc.connect(getthresh, "out_stat", threshold_stddev, "thresh")
preproc.connect(threshold_stddev, "out_file", outputnode, "noise_mask_file")
Exemplo n.º 29
0
def normalize_epi(subjects_list,
                  TR_list,
                  preprocessed_data_dir,
                  working_dir,
                  ds_dir,
                  template_dir,
                  plugin_name,
                  use_n_procs):

    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.io as nio
    from nipype.interfaces import fsl
    import nipype.interfaces.utility as util


    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='normalize')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')


    ds = Node(nio.DataSink(), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]
    ds.inputs.regexp_substitutions = [('_subject_id_[A0-9]*/', '')]

    #####################################
    # SET ITERATORS
    #####################################
    # GET SCAN TR_ID ITERATOR
    scan_infosource = Node(util.IdentityInterface(fields=['TR_id']), name='scan_infosource')
    scan_infosource.iterables = ('TR_id', TR_list)

    subjects_infosource = Node(util.IdentityInterface(fields=['subject_id']), name='subjects_infosource')
    subjects_infosource.iterables = ('subject_id', subjects_list)

    def add_subject_id_to_ds_dir_fct(subject_id, ds_path):
        import os
        out_path = os.path.join(ds_path, subject_id)
        return out_path

    add_subject_id_to_ds_dir = Node(util.Function(input_names=['subject_id', 'ds_path'],
                                                  output_names=['out_path'],
                                                  function=add_subject_id_to_ds_dir_fct),
                                    name='add_subject_id_to_ds_dir')
    wf.connect(subjects_infosource, 'subject_id', add_subject_id_to_ds_dir, 'subject_id')
    add_subject_id_to_ds_dir.inputs.ds_path = ds_dir

    wf.connect(add_subject_id_to_ds_dir, 'out_path', ds, 'base_directory')



   # get atlas data
    templates_atlases = {'FSL_MNI_3mm_template': 'MNI152_T1_3mm_brain.nii.gz',
                         }

    selectfiles_anat_templates = Node(nio.SelectFiles(templates_atlases,
                                                      base_directory=template_dir),
                                      name="selectfiles_anat_templates")

    # GET SUBJECT SPECIFIC FUNCTIONAL AND STRUCTURAL DATA
    selectfiles_templates = {
        'epi_2_MNI_warp': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_MNI_warp/TR_{TR_id}/*.nii.gz',
        'preproc_epi_full_spectrum': '{subject_id}/rsfMRI_preprocessing/epis/01_denoised/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp': '{subject_id}/rsfMRI_preprocessing/epis/02_denoised_BP/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp_tNorm': '{subject_id}/rsfMRI_preprocessing/epis/03_denoised_BP_tNorm/TR_{TR_id}/*.nii.gz',
    }

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name="selectfiles")
    wf.connect(scan_infosource, 'TR_id', selectfiles, 'TR_id')
    wf.connect(subjects_infosource, 'subject_id', selectfiles, 'subject_id')



    # fixme
    # CREATE TS IN MNI SPACE
    epi_MNI = Node(fsl.ApplyWarp(), name='epi_MNI')
    epi_MNI.inputs.interp = 'spline'
    epi_MNI.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles, 'preproc_epi_full_spectrum', epi_MNI, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', epi_MNI, 'field_file')
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_MNI, 'ref_file')
    epi_MNI.inputs.out_file = 'preprocessed_fullspectrum_MNI_3mm.nii.gz'

    wf.connect(epi_MNI, 'out_file', ds, 'rsfMRI_preprocessing.epis_MNI_3mm.01_denoised')


    #####################################
    # RUN WF
    #####################################
    wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name)
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
Exemplo n.º 30
0
def create_converter_functional_pipeline(working_dir, ds_dir, name="converter_funct"):
    # initiate workflow
    converter_wf = Workflow(name=name)
    converter_wf.base_dir = os.path.join(working_dir, "LeiCA_resting")

    # set fsl output
    fsl.FSLCommand.set_default_output_type("NIFTI_GZ")

    # I/O NODE
    inputnode = Node(util.IdentityInterface(fields=["epi_dicom", "out_format"]), name="inputnode")

    outputnode = Node(util.IdentityInterface(fields=["epi", "TR_ms"]), name="outputnode")

    niftisink = Node(nio.DataSink(), name="niftisink")
    niftisink.inputs.base_directory = os.path.join(ds_dir, "raw_niftis")
    niftisink.inputs.substitutions = [("_TR_id_", "TR_")]

    # convert to nifti
    # todo check if geometry bugs attac. use dcm2nii?
    converter_epi = Node(DcmStack(embed_meta=True), name="converter_epi")
    converter_epi.plugin_args = {"submit_specs": "request_memory = 2000"}

    def reformat_filename_fct(TR_str):
        return "rsfMRI_" + TR_str

    reformat_filename = Node(
        util.Function(input_names=["TR_str"], output_names=["filename"], function=reformat_filename_fct),
        name="reformat_filename",
    )

    converter_wf.connect(inputnode, "out_format", reformat_filename, "TR_str")
    converter_wf.connect(inputnode, "epi_dicom", converter_epi, "dicom_files")
    converter_wf.connect(reformat_filename, "filename", converter_epi, "out_format")

    # reorient to standard orientation
    reor_2_std = Node(fsl.Reorient2Std(), name="reor_2_std")
    converter_wf.connect(converter_epi, "out_file", reor_2_std, "in_file")

    converter_wf.connect(reor_2_std, "out_file", outputnode, "epi")

    # save original niftis
    converter_wf.connect(reor_2_std, "out_file", niftisink, "rsfMRI")

    # GET TR FROM .nii
    def check_TR_fct(TR):
        print " "
        print "check_TR_fct checks validity of TR"
        print ("imported TR is %s" % TR)
        print "  "
        try:
            float(TR)
        except ValueError:
            isvalid_TR = 0
            raise Exception("ERROR: TR COULD NOT AUTOMATICALLY BE EXTRACTED FROM EPI.\nEXECUTION STOPPED")
        else:
            isvalid_TR = 1
            print "TR is valid"
        if isvalid_TR:
            if float(TR <= 0):
                raise Exception("ERROR: TR NOT VALID (<=0).\nEXECUTION STOPPED")
        return float(TR)

    get_TR = Node(ImageInfo(), name="get_TR")
    converter_wf.connect(reor_2_std, "out_file", get_TR, "in_file")

    check_TR = Node(util.Function(input_names=["TR"], output_names=["TR_ms"], function=check_TR_fct), name="check_TR")

    converter_wf.connect(get_TR, "TR", check_TR, "TR")
    converter_wf.connect(check_TR, "TR_ms", outputnode, "TR_ms")

    converter_wf.write_graph(dotfilename=converter_wf.name, graph2use="flat", format="pdf")

    return converter_wf
Exemplo n.º 31
0
                        (subject_id))[0]
        fssubjects_dir = "/scr/adenauer1/internet_study/freesurfer/"

        output_file = "/scr/adenauer1/internet_study/results/%s/report.pdf" % (
            subject_id)

        report = Node(Function(input_names=[
            'subject_id', 'tsnr_file', 'realignment_parameters_file',
            'mean_epi_file', 'mask_file', 'reg_file', 'fssubjects_dir',
            'similarity_distribution', 'mean_FD_distribution',
            'tsnr_distributions', 'output_file'
        ],
                               output_names=['out'],
                               function=create_report),
                      name="report_%s" % (subject_id).replace(".", "_"))
        report.inputs.subject_id = subject_id
        report.inputs.tsnr_file = tsnr_file
        report.inputs.realignment_parameters_file = realignment_parameters_file
        report.inputs.mean_epi_file = mean_epi_file
        report.inputs.mask_file = mask_file
        report.inputs.reg_file = reg_file
        report.inputs.fssubjects_dir = fssubjects_dir
        report.inputs.similarity_distribution = similarity_distribution
        report.inputs.mean_FD_distribution = mean_FD_distribution
        report.inputs.tsnr_distributions = tsnr_distributions
        report.inputs.output_file = output_file
        report.plugin_args = {'override_specs': 'request_memory = 4000'}
        wf.add_nodes([report])

    wf.run(plugin="CondorDAGMan")
def create_nonlinear_pipeline(name='nonlinear'):
    
    # workflow
    nonlinear=Workflow(name='nonlinear')
    
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['t1_highres',
                                                  'epi2highres_lin',
                                                  'epi2highres_lin_itk',
                                                  'fov_mask',
                                                  'brain_mask',
                                                  'wmcsf_mask',
                                                  'highres2lowres_itk'
                                                  ]),
                   name='inputnode')
    
    # outputnode                                 
    outputnode=Node(util.IdentityInterface(fields=['epi2highres_warp',
                                                   'epi2highres_invwarp',
                                                   'epi2highres_nonlin',
                                                   'brainmask_highres',
                                                   'wmcsfmask_highres'
                                                   ]),
                    name='outputnode')
    
    # project brainmask and wmcsf mask from lowres to highres mp2rage space
    brainmask = Node(ants.ApplyTransforms(dimension=3,
                                          invert_transform_flags=[True],
                                          interpolation = 'NearestNeighbor'),
                     name='brainmask')
    
    wmcsf_mask = Node(ants.ApplyTransforms(dimension=3,
                                      invert_transform_flags=[True],
                                      interpolation = 'NearestNeighbor'),
                 name='wmcsf_mask')
    
    # mask t1
    
     
    #dilate brainmask 
    dil_brainmask = Node(fs.Binarize(min=0.5,
                                 out_type = 'nii.gz',
                                 dilate=15),
                      name='dil_brainmask')

    
    mask_epi = Node(fsl.ApplyMask(out_file='epi2highres_lin_masked.nii.gz'),
                    name='mask_epi')
    
    nonlinear.connect([(inputnode, brainmask, [('brain_mask', 'input_image'),
                                               ('t1_highres', 'reference_image'),
                                               ('highres2lowres_itk', 'transforms')]),
                       (brainmask, outputnode, [('output_image', 'brainmask_highres')]),
                       (inputnode, wmcsf_mask, [('wmcsf_mask', 'input_image'),
                                               ('t1_highres', 'reference_image'),
                                               ('highres2lowres_itk', 'transforms')]),
                       (wmcsf_mask, outputnode, [('output_image', 'wmcsfmask_highres')]),
                       (brainmask, dil_brainmask, [('output_image', 'in_file')]),
                       
                       (dil_brainmask, mask_epi, [('binary_file', 'mask_file')]),
                       (inputnode, mask_epi, [('epi2highres_lin', 'in_file')])
                       ])
    
    # transform fov mask, dilate and apply to t1       
    transform_fov = Node(ants.ApplyTransforms(dimension=3,
                                              output_image='fov_mask_highres.nii.gz',
                                              interpolation = 'NearestNeighbor'),
                          'transform_fov')
    
    dilate_fov = Node(fs.Binarize(min=0.5,
                                  dilate=5,
                                  binary_file='fov_mask_highres_dil.nii.gz'),
                      name='dilate_fov')   
    
    #mask t1 twice
    mask_t1_1 = Node(fsl.ApplyMask(out_file='t1_brain_masked.nii.gz'),
                name='mask_t1_1')
    
    mask_t1_2 = Node(fsl.ApplyMask(out_file='t1_brain_fov_masked.nii.gz'),
                    name='mask_t1_2')
    
    nonlinear.connect([(inputnode, transform_fov, [('fov_mask', 'input_image'),
                                                   ('t1_highres', 'reference_image'),
                                                   ('epi2highres_lin_itk', 'transforms')]),
                       (transform_fov, dilate_fov, [('output_image', 'in_file')]),
                       (brainmask, mask_t1_1, [('output_image', 'mask_file')]),
                       (inputnode, mask_t1_1, [('t1_highres', 'in_file')]),
                       (dilate_fov, mask_t1_2, [('binary_file', 'mask_file')]),
                       (mask_t1_1, mask_t1_2, [('out_file', 'in_file')]),
                       ])
    
    
    # normalization with ants
    antsreg = Node(interface = ants.registration.Registration(dimension = 3,
                                                           metric = ['CC'],
                                                           metric_weight = [1.0],
                                                           radius_or_number_of_bins = [4],
                                                           sampling_strategy = ['None'],
                                                           transforms = ['SyN'],
                                                           args = '-g 0.1x1x0.1',
                                                           transform_parameters = [(0.10,3,0)],
                                                           number_of_iterations = [[50,20,10]],
                                                           convergence_threshold = [1e-06],
                                                           convergence_window_size = [10],
                                                           shrink_factors = [[4,2,1]],
                                                           smoothing_sigmas = [[2,1,0]],
                                                           sigma_units = ['vox'],
                                                           use_estimate_learning_rate_once = [True],
                                                           use_histogram_matching = [True],
                                                           collapse_output_transforms=True,
                                                           output_inverse_warped_image = True,
                                                           output_warped_image = True,
                                                           interpolation = 'BSpline'),
                      name = 'antsreg')
    antsreg.plugin_args={'override_specs': 'request_memory = 40000'}
       
    nonlinear.connect([(mask_epi, antsreg, [('out_file', 'moving_image')]),
                       (mask_t1_2, antsreg, [('out_file', 'fixed_image')]),
                       (antsreg, outputnode, [('reverse_transforms', 'epi2highres_invwarp'),
                                              ('forward_transforms', 'epi2highres_warp'),
                                              ('warped_image', 'epi2highres_nonlin')])
                        ])
     
    return nonlinear
Exemplo n.º 33
0
def create_nonlinear_pipeline(name='nonlinear'):
    
    # workflow
    nonlinear=Workflow(name='nonlinear')
    
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['t1_highres',
                                                  'epi2highres_lin',
                                                  'epi2highres_lin_itk',
                                                  'fov_mask',
                                                  'brain_mask',
                                                  #'highres2lowres_itk'
                                                  ]),
                   name='inputnode')
    
    # outputnode                                 
    outputnode=Node(util.IdentityInterface(fields=['epi2highres_warp',
                                                   'epi2highres_invwarp',
                                                   'epi2highres_nonlin',
                                                   ]),
                    name='outputnode')
    
#     
#     brainmask = Node(ants.ApplyTransforms(dimension=3,
#                                           invert_transform_flags=[True],
#                                           interpolation = 'NearestNeighbor'),
#                      name='brainmask')
#     
    dil_brainmask = Node(fs.Binarize(min=0.5,
                                 out_type = 'nii.gz',
                                 dilate=15),
                      name='dil_brainmask')

    
    mask_epi = Node(fsl.ApplyMask(out_file='epi2highres_lin_masked.nii.gz'),
                    name='mask_epi')
    
    nonlinear.connect([#(inputnode, brainmask, [('brain_mask', 'input_image'),
                       #                        ('t1_highres', 'reference_image'),
                       #                        ('highres2lowres_itk', 'transforms')]),
                       #(brainmask, dil_brainmask, [('output_image', 'in_file')]),
                       (inputnode, dil_brainmask, [('brain_mask', 'in_file')]),
                       (dil_brainmask, mask_epi, [('binary_file', 'mask_file')]),
                       (inputnode, mask_epi, [('epi2highres_lin', 'in_file')])
                       ])
    
    # transform fov mask and apply to t1       
    transform_fov = Node(ants.ApplyTransforms(dimension=3,
                                              #invert_transform_flags=[True, False],
                                              output_image='fov_mask_highres.nii.gz',
                                              interpolation = 'NearestNeighbor'),
                          'transform_fov')
    
    dilate_fov = Node(fs.Binarize(min=0.5,
                                  dilate=5,
                                  binary_file='fov_mask_highres_dil.nii.gz'),
                      name='dilate_fov')   
    
    
    mask_t1 = Node(fsl.ApplyMask(out_file='t1_fov_masked.nii.gz'),
                    name='mask_t1')
    
    nonlinear.connect([(inputnode, transform_fov, [('fov_mask', 'input_image'),
                                                   ('t1_highres', 'reference_image'),
                                                   ('epi2highres_lin_itk', 'transforms')]),
                       (transform_fov, dilate_fov, [('output_image', 'in_file')]),
                       (dilate_fov, mask_t1, [('binary_file', 'mask_file')]),
                       (inputnode, mask_t1, [('t1_highres', 'in_file')]),
                       ])
    
    
    # normalization with ants
    antsreg = Node(interface = ants.registration.Registration(dimension = 3,
                                                           metric = ['CC'],
                                                           metric_weight = [1.0],
                                                           radius_or_number_of_bins = [4],
                                                           sampling_strategy = ['None'],
                                                           transforms = ['SyN'],
                                                           args = '-g 0.1x1x0.1',
                                                           transform_parameters = [(0.10,3,0)],
                                                           number_of_iterations = [[50,20,10]],
                                                           convergence_threshold = [1e-06],
                                                           convergence_window_size = [10],
                                                           shrink_factors = [[4,2,1]],
                                                           smoothing_sigmas = [[2,1,0]],
                                                           sigma_units = ['vox'],
                                                           use_estimate_learning_rate_once = [True],
                                                           use_histogram_matching = [True],
                                                           collapse_output_transforms=True,
                                                           output_inverse_warped_image = True,
                                                           output_warped_image = True,
                                                           interpolation = 'BSpline'),
                      name = 'antsreg')
    antsreg.plugin_args={'submit_specs': 'request_memory = 20000'}
       
    nonlinear.connect([(mask_epi, antsreg, [('out_file', 'moving_image')]),
                       (mask_t1, antsreg, [('out_file', 'fixed_image')]),
                       (antsreg, outputnode, [('reverse_transforms', 'epi2highres_invwarp'),
                                              ('forward_transforms', 'epi2highres_warp'),
                                              ('warped_image', 'epi2highres_nonlin')])
                        ])
     
    return nonlinear

# test_nonlinear=create_nonlinear_pipeline('nonlinear')
# test_nonlinear.base_dir='/scr/kansas1/huntenburg/7tresting/working/highres_bias_bspline_maskepi/'
# test_nonlinear.config['execution']['crashdump_dir'] = test_nonlinear.base_dir + "/crash_files"
# test_nonlinear.config['execution']['remove_unnecessary_outputs'] = False
# test_nonlinear.inputs.inputnode.anat='/scr/kansas1/huntenburg/7tresting/sub021/preprocessed/coregister/t1_resampled.nii.gz'
# test_nonlinear.inputs.inputnode.epi= '/scr/kansas1/huntenburg/7tresting/sub021/preprocessed/coregister/rest_coregistered_mean.nii.gz'
# #test_nonlinear.inputs.inputnode.anat='/scr/kansas1/huntenburg/7tresting/sub021/highres/t1.nii.gz'
# #test_nonlinear.inputs.inputnode.epi= '/scr/kansas1/huntenburg/7tresting/sub021/coreg_testing/flirt_epi2highres_t1_cr_bbr_onestep.nii.gz'
# test_nonlinear.run()#plugin='CondorDAGMan')
Exemplo n.º 34
0
def normalize_epi(subjects_list, TR_list, preprocessed_data_dir,
                  selectfiles_templates, ref_file, working_dir, ds_dir,
                  plugin_name, use_n_procs):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.io as nio
    from nipype.interfaces import fsl
    import nipype.interfaces.utility as util

    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='normalize')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'),
                      execution={
                          'stop_on_first_crash': True,
                          'remove_unnecessary_outputs': True,
                          'job_finished_timeout': 120
                      })
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(
        working_dir, 'crash')

    ds = Node(nio.DataSink(), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]
    ds.inputs.regexp_substitutions = [('_subject_id_[A0-9]*/', '')]

    #####################################
    # SET ITERATORS
    #####################################
    # GET SCAN TR_ID ITERATOR
    scan_infosource = Node(util.IdentityInterface(fields=['TR_id']),
                           name='scan_infosource')
    scan_infosource.iterables = ('TR_id', TR_list)

    subjects_infosource = Node(util.IdentityInterface(fields=['subject_id']),
                               name='subjects_infosource')
    subjects_infosource.iterables = ('subject_id', subjects_list)

    # short id if data on afs
    def short_id_fct(id):
        if preprocessed_data_dir.startswith('/a/proje'):
            if id.startswith('A'):
                return 'A' + id[2:]
            else:
                return id
        else:
            return id

    short_id = Node(util.Function(input_names=['id'],
                                  output_names=['subject_id'],
                                  function=short_id_fct),
                    name='short_id')
    wf.connect(subjects_infosource, 'subject_id', short_id, 'id')

    def add_subject_id_to_ds_dir_fct(subject_id, ds_path):
        import os
        out_path = os.path.join(ds_path, subject_id)
        return out_path

    add_subject_id_to_ds_dir = Node(util.Function(
        input_names=['subject_id', 'ds_path'],
        output_names=['out_path'],
        function=add_subject_id_to_ds_dir_fct),
                                    name='add_subject_id_to_ds_dir')
    wf.connect(subjects_infosource, 'subject_id', add_subject_id_to_ds_dir,
               'subject_id')
    add_subject_id_to_ds_dir.inputs.ds_path = ds_dir

    wf.connect(add_subject_id_to_ds_dir, 'out_path', ds, 'base_directory')

    # GET SUBJECT SPECIFIC FUNCTIONAL AND STRUCTURAL DATA
    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name="selectfiles")
    wf.connect(scan_infosource, 'TR_id', selectfiles, 'TR_id')
    wf.connect(short_id, 'subject_id', selectfiles, 'subject_id')

    # CREATE TS IN MNI SPACE
    epi_MNI_01_denoised = Node(fsl.ApplyWarp(), name='epi_MNI_01_denoised')
    epi_MNI_01_denoised.inputs.interp = 'spline'
    epi_MNI_01_denoised.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles, 'preproc_epi_full_spectrum', epi_MNI_01_denoised,
               'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', epi_MNI_01_denoised,
               'field_file')
    # wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_MNI_01_denoised, 'ref_file')
    epi_MNI_01_denoised.inputs.ref_file = ref_file
    epi_MNI_01_denoised.inputs.out_file = 'preprocessed_fullspectrum_MNI_3mm.nii.gz'

    wf.connect(epi_MNI_01_denoised, 'out_file', ds,
               'rsfMRI_preprocessing.epis_MNI_3mm.01_denoised')

    epi_MNI_03_bp_tNorm = Node(fsl.ApplyWarp(), name='epi_MNI_03_bp_tNorm')
    epi_MNI_03_bp_tNorm.inputs.interp = 'spline'
    epi_MNI_03_bp_tNorm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles, 'preproc_epi_bp_tNorm', epi_MNI_03_bp_tNorm,
               'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', epi_MNI_03_bp_tNorm,
               'field_file')
    # wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_MNI_03_bp_tNorm, 'ref_file')
    epi_MNI_03_bp_tNorm.inputs.ref_file = ref_file
    epi_MNI_03_bp_tNorm.inputs.out_file = 'residual_filt_norm_warp.nii.gz'

    wf.connect(epi_MNI_03_bp_tNorm, 'out_file', ds,
               'rsfMRI_preprocessing.epis_MNI_3mm.03_denoised_BP_tNorm')

    #####################################
    # RUN WF
    #####################################
    wf.write_graph(dotfilename=wf.name, graph2use='colored',
                   format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name)
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
Exemplo n.º 35
0
def create_registration_pipeline(working_dir, freesurfer_dir, ds_dir, name='registration'):
    """
    find transformations between struct, funct, and MNI
    """

    # initiate workflow
    reg_wf = Workflow(name=name)
    reg_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting', 'rsfMRI_preprocessing')

    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir)

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['initial_mean_epi_moco',
                                                    't1w',
                                                    't1w_brain',
                                                    'subject_id',
                                                    'wm_mask_4_bbr',
                                                    'struct_brain_mask']),
                     name='inputnode')

    outputnode = Node(util.IdentityInterface(fields=['struct_2_MNI_warp',
                                                     'epi_2_struct_mat',
                                                     'struct_2_epi_mat',
                                                     'epi_2_MNI_warp',
                                                     'MNI_2_epi_warp',
                                                     'fs_2_struct_mat',
                                                     'mean_epi_structSpace',
                                                     'mean_epi_MNIspace',
                                                     'struct_MNIspace']),
                      name='outputnode')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]




    ##########################################
    # TOC REGISTRATION MATS AND WARPS
    ##########################################
    # I. STRUCT -> MNI
    ## 1. STRUCT -> MNI with FLIRT
    ## 2. CALC. WARP STRUCT -> MNI with FNIRT

    # II.EPI -> STRUCT
    ## 3. calc EPI->STRUCT initial registration
    ## 4. run EPI->STRUCT via bbr
    ## 5. INVERT to get: STRUCT -> EPI

    # III. COMBINE I. & II.: EPI -> MNI
    ## 6. COMBINE MATS: EPI -> MNI
    ## 7. MNI -> EPI


    ##########################################
    # CREATE REGISTRATION MATS AND WARPS
    ##########################################

    # I. STRUCT -> MNI
    ##########################################
    # 1. REGISTER STRUCT -> MNI with FLIRT
    struct_2_MNI_mat = Node(fsl.FLIRT(dof=12), name='struct_2_MNI_mat')
    struct_2_MNI_mat.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')

    reg_wf.connect(inputnode, 't1w_brain', struct_2_MNI_mat, 'in_file')
    reg_wf.connect(struct_2_MNI_mat, 'out_matrix_file', outputnode, 'struct_2_MNI_mat_flirt')



    # 2. CALC. WARP STRUCT -> MNI with FNIRT
    # cf. wrt. 2mm
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1311&L=FSL&P=R86108&1=FSL&9=A&J=on&d=No+Match%3BMatch%3BMatches&z=4
    struct_2_MNI_warp = Node(fsl.FNIRT(), name='struct_2_MNI_warp')
    struct_2_MNI_warp.inputs.config_file = 'T1_2_MNI152_2mm'
    struct_2_MNI_warp.inputs.ref_file = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
    struct_2_MNI_warp.inputs.field_file = 'struct_2_MNI_warp.nii.gz'
    struct_2_MNI_warp.plugin_args = {'submit_specs': 'request_memory = 4000'}


    reg_wf.connect(inputnode, 't1w', struct_2_MNI_warp, 'in_file')
    reg_wf.connect(struct_2_MNI_mat, 'out_matrix_file', struct_2_MNI_warp, 'affine_file')
    reg_wf.connect(struct_2_MNI_warp, 'field_file', ds, 'registration.struct_2_MNI_warp')
    reg_wf.connect(struct_2_MNI_warp, 'field_file', outputnode, 'struct_2_MNI_warp')
    reg_wf.connect(struct_2_MNI_warp, 'warped_file', outputnode, 'struct_MNIspace')
    reg_wf.connect(struct_2_MNI_warp, 'warped_file', ds, 'registration.struct_MNIspace')


    # II.EPI -> STRUCT (via bbr)
    ##########################################

    # 3. calc EPI->STRUCT initial registration with flirt dof=6 and corratio
    epi_2_struct_flirt6_mat = Node(fsl.FLIRT(dof=6, cost='corratio'), name='epi_2_struct_flirt6_mat')
    epi_2_struct_flirt6_mat.inputs.out_file = 'epi_structSpace_flirt6.nii.gz'
    reg_wf.connect(inputnode, 't1w_brain', epi_2_struct_flirt6_mat, 'reference')
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', epi_2_struct_flirt6_mat, 'in_file')

    # 4. run EPI->STRUCT via bbr
    bbr_shedule = os.path.join(os.getenv('FSLDIR'), 'etc/flirtsch/bbr.sch')
    epi_2_struct_bbr_mat = Node(interface=fsl.FLIRT(dof=6, cost='bbr'), name='epi_2_struct_bbr_mat')
    epi_2_struct_bbr_mat.inputs.schedule = bbr_shedule
    epi_2_struct_bbr_mat.inputs.out_file = 'epi_structSpace.nii.gz'
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', epi_2_struct_bbr_mat, 'in_file')
    reg_wf.connect(inputnode, 't1w_brain', epi_2_struct_bbr_mat, 'reference')
    reg_wf.connect(epi_2_struct_flirt6_mat, 'out_matrix_file', epi_2_struct_bbr_mat, 'in_matrix_file')
    reg_wf.connect(inputnode, 'wm_mask_4_bbr', epi_2_struct_bbr_mat, 'wm_seg')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_matrix_file', ds, 'registration.epi_2_struct_mat')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_file', outputnode, 'mean_epi_structSpace')


    # 5. INVERT to get: STRUCT -> EPI
    struct_2_epi_mat = Node(fsl.ConvertXFM(invert_xfm=True), name='struct_2_epi_mat')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_matrix_file', struct_2_epi_mat, 'in_file')
    reg_wf.connect(struct_2_epi_mat, 'out_file', outputnode, 'struct_2_epi_mat')


    # III. COMBINE I. & II.: EPI -> MNI
    ##########################################
    # 6. COMBINE MATS: EPI -> MNI
    epi_2_MNI_warp = Node(fsl.ConvertWarp(), name='epi_2_MNI_warp')
    epi_2_MNI_warp.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
    reg_wf.connect(epi_2_struct_bbr_mat, 'out_matrix_file', epi_2_MNI_warp, 'premat')  # epi2struct
    reg_wf.connect(struct_2_MNI_warp, 'field_file', epi_2_MNI_warp, 'warp1')  # struct2mni
    reg_wf.connect(epi_2_MNI_warp, 'out_file', outputnode, 'epi_2_MNI_warp')
    reg_wf.connect(epi_2_MNI_warp, 'out_file', ds, 'registration.epi_2_MNI_warp')


    # output: out_file

    # 7. MNI -> EPI
    MNI_2_epi_warp = Node(fsl.InvWarp(), name='MNI_2_epi_warp')
    MNI_2_epi_warp.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
    reg_wf.connect(epi_2_MNI_warp, 'out_file', MNI_2_epi_warp, 'warp')
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', MNI_2_epi_warp, 'reference')
    reg_wf.connect(MNI_2_epi_warp, 'inverse_warp', outputnode, 'MNI_2_epi_warp')
    # output: inverse_warp




    ##########################################
    # TRANSFORM VOLUMES
    ##########################################

    # CREATE STRUCT IN EPI SPACE FOR DEBUGGING
    struct_epiSpace = Node(fsl.ApplyXfm(), name='struct_epiSpace')
    struct_epiSpace.inputs.out_file = 'struct_brain_epiSpace.nii.gz'
    reg_wf.connect(inputnode, 't1w_brain', struct_epiSpace, 'in_file')
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', struct_epiSpace, 'reference')
    reg_wf.connect(struct_2_epi_mat, 'out_file', struct_epiSpace, 'in_matrix_file')
    reg_wf.connect(struct_epiSpace, 'out_file', ds, 'QC.struct_brain_epiSpace')

    # CREATE EPI IN MNI SPACE
    mean_epi_MNIspace = Node(fsl.ApplyWarp(), name='mean_epi_MNIspace')
    mean_epi_MNIspace.inputs.ref_file = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
    mean_epi_MNIspace.inputs.out_file = 'mean_epi_MNIspace.nii.gz'
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', mean_epi_MNIspace, 'in_file')
    reg_wf.connect(epi_2_MNI_warp, 'out_file', mean_epi_MNIspace, 'field_file')
    reg_wf.connect(mean_epi_MNIspace, 'out_file', ds, 'registration.mean_epi_MNIspace')
    reg_wf.connect(mean_epi_MNIspace, 'out_file', outputnode, 'mean_epi_MNIspace')



    # CREATE MNI IN EPI SPACE FOR DEBUGGING
    MNI_epiSpace = Node(fsl.ApplyWarp(), name='MNI_epiSpace')
    MNI_epiSpace.inputs.in_file = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
    MNI_epiSpace.inputs.out_file = 'MNI_epiSpace.nii.gz'
    reg_wf.connect(inputnode, 'initial_mean_epi_moco', MNI_epiSpace, 'ref_file')
    reg_wf.connect(MNI_2_epi_warp, 'inverse_warp', MNI_epiSpace, 'field_file')
    reg_wf.connect(MNI_epiSpace, 'out_file', ds, 'registration.MNI_epiSpace')



    reg_wf.write_graph(dotfilename=reg_wf.name, graph2use='flat', format='pdf')

    return reg_wf
Exemplo n.º 36
0
reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
reg.inputs.convergence_window_size = [20] * 2 + [5]
reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
reg.inputs.sigma_units = ['vox'] * 3
reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
reg.inputs.use_estimate_learning_rate_once = [True] * 3
reg.inputs.use_histogram_matching = [False] * 2 + [True]
reg.inputs.winsorize_lower_quantile = 0.005
reg.inputs.winsorize_upper_quantile = 0.995
reg.inputs.float = True
reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
reg.inputs.num_threads = 4
reg.plugin_args = {'bsub_args': '-n%d' % 4}
antsreg_wf.connect(fs_skullstrip_wf, 'outputspec.skullstripped_file', reg,
                   'moving_image')

# Move the results to a designated results folder
datasink = Node(DataSink(), name="datasink")
datasink.inputs.base_directory = os.path.join(projdir, "norm_anat")
antsreg_wf.connect(subjID_infosource, 'subject_id', datasink, 'container')
antsreg_wf.connect(reg, 'composite_transform', datasink, 'anat2targ_xfm')
antsreg_wf.connect(reg, 'inverse_composite_transform', datasink,
                   'targ2anat_xfm')
antsreg_wf.connect(reg, 'warped_image', datasink, 'warped_image')

# Run the workflow
antsreg_wf.run(plugin='LSF', plugin_args={'bsub_args': ('-q PQ_madlab')})
Exemplo n.º 37
0
def calc_local_metrics(cfg):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as freesurfer

    import CPAC.alff.alff as cpac_alff
    import CPAC.reho.reho as cpac_reho
    import CPAC.utils.utils as cpac_utils
    import CPAC.vmhc.vmhc as cpac_vmhc
    import CPAC.registration.registration as cpac_registration
    import CPAC.network_centrality.z_score as cpac_centrality_z_score

    import utils as calc_metrics_utils


    # INPUT PARAMETERS
    dicom_dir = cfg['dicom_dir']
    preprocessed_data_dir = cfg['preprocessed_data_dir']

    working_dir = cfg['working_dir']
    freesurfer_dir = cfg['freesurfer_dir']
    template_dir = cfg['template_dir']
    script_dir = cfg['script_dir']
    ds_dir = cfg['ds_dir']

    subject_id = cfg['subject_id']
    TR_list = cfg['TR_list']

    vols_to_drop = cfg['vols_to_drop']
    rois_list = cfg['rois_list']
    lp_cutoff_freq = cfg['lp_cutoff_freq']
    hp_cutoff_freq = cfg['hp_cutoff_freq']
    use_fs_brainmask = cfg['use_fs_brainmask']

    use_n_procs = cfg['use_n_procs']
    plugin_name = cfg['plugin_name']



    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir)

    wf = Workflow(name='LeiCA_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]
    ds.inputs.regexp_substitutions = [('_variabilty_MNIspace_3mm[0-9]*/', ''), ('_z_score[0-9]*/', '')]


    #####################################
    # SET ITERATORS
    #####################################
    # GET SCAN TR_ID ITERATOR
    scan_infosource = Node(util.IdentityInterface(fields=['TR_id']), name='scan_infosource')
    scan_infosource.iterables = ('TR_id', TR_list)



    # get atlas data
    templates_atlases = {  # 'GM_mask_MNI_2mm': 'SPM_GM/SPM_GM_mask_2mm.nii.gz',
                           # 'GM_mask_MNI_3mm': 'SPM_GM/SPM_GM_mask_3mm.nii.gz',
                           'FSL_MNI_3mm_template': 'MNI152_T1_3mm_brain.nii.gz',
                           'vmhc_symm_brain': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_symmetric.nii.gz',
                           'vmhc_symm_brain_3mm': 'cpac_image_resources/symmetric/MNI152_T1_3mm_brain_symmetric.nii.gz',
                           'vmhc_symm_skull': 'cpac_image_resources/symmetric/MNI152_T1_2mm_symmetric.nii.gz',
                           'vmhc_symm_brain_mask_dil': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz',
                           'vmhc_config_file_2mm': 'cpac_image_resources/symmetric/T1_2_MNI152_2mm_symmetric.cnf'
                           }

    selectfiles_anat_templates = Node(nio.SelectFiles(templates_atlases,
                                                      base_directory=template_dir),
                                      name="selectfiles_anat_templates")


    # GET SUBJECT SPECIFIC FUNCTIONAL AND STRUCTURAL DATA
    selectfiles_templates = {
        'epi_2_MNI_warp': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_MNI_warp/TR_{TR_id}/*.nii.gz',
        'epi_mask': '{subject_id}/rsfMRI_preprocessing/masks/brain_mask_epiSpace/TR_{TR_id}/*.nii.gz',
        'preproc_epi_full_spectrum': '{subject_id}/rsfMRI_preprocessing/epis/01_denoised/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp': '{subject_id}/rsfMRI_preprocessing/epis/02_denoised_BP/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp_tNorm': '{subject_id}/rsfMRI_preprocessing/epis/03_denoised_BP_tNorm/TR_{TR_id}/*.nii.gz',
        'epi_2_struct_mat': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_struct_mat/TR_{TR_id}/*.mat',
        't1w': '{subject_id}/raw_niftis/sMRI/t1w_reoriented.nii.gz',
        't1w_brain': '{subject_id}/rsfMRI_preprocessing/struct_prep/t1w_brain/t1w_reoriented_maths.nii.gz',
    }

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name="selectfiles")
    wf.connect(scan_infosource, 'TR_id', selectfiles, 'TR_id')
    selectfiles.inputs.subject_id = subject_id



    # CREATE TRANSFORMATIONS
    # creat MNI 2 epi warp
    MNI_2_epi_warp = Node(fsl.InvWarp(), name='MNI_2_epi_warp')
    MNI_2_epi_warp.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
    wf.connect(selectfiles, 'epi_mask', MNI_2_epi_warp, 'reference')
    wf.connect(selectfiles, 'epi_2_MNI_warp', MNI_2_epi_warp, 'warp')


    # # CREATE GM MASK IN EPI SPACE
    # GM_mask_epiSpace = Node(fsl.ApplyWarp(), name='GM_mask_epiSpace')
    # GM_mask_epiSpace.inputs.out_file = 'GM_mask_epiSpace.nii.gz'
    #
    # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_2mm', GM_mask_epiSpace, 'in_file')
    # wf.connect(selectfiles, 'epi_mask', GM_mask_epiSpace, 'ref_file')
    # wf.connect(MNI_2_epi_warp, 'inverse_warp', GM_mask_epiSpace, 'field_file')
    # wf.connect(GM_mask_epiSpace, 'out_file', ds, 'GM_mask_epiSpace')



    # fixme
    # # CREATE TS IN MNI SPACE
    # # is it ok to apply the 2mm warpfield to the 3mm template?
    # # seems ok: https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind0904&L=FSL&P=R14011&1=FSL&9=A&J=on&d=No+Match%3BMatch%3BMatches&z=4
    # epi_bp_MNIspace_3mm = Node(fsl.ApplyWarp(), name='epi_bp_MNIspace_3mm')
    # epi_bp_MNIspace_3mm.inputs.interp = 'spline'
    # epi_bp_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    # wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_bp_MNIspace_3mm, 'ref_file')
    # wf.connect(selectfiles, 'preproc_epi_bp', epi_bp_MNIspace_3mm, 'in_file')
    # wf.connect(selectfiles, 'epi_2_MNI_warp', epi_bp_MNIspace_3mm, 'field_file')


    # CREATE EPI MASK IN MNI SPACE
    epi_mask_MNIspace_3mm = Node(fsl.ApplyWarp(), name='epi_mask_MNIspace_3mm')
    epi_mask_MNIspace_3mm.inputs.interp = 'nn'
    epi_mask_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_mask_MNIspace_3mm, 'ref_file')
    wf.connect(selectfiles, 'epi_mask', epi_mask_MNIspace_3mm, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', epi_mask_MNIspace_3mm, 'field_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', ds, 'epi_mask_MNIspace_3mm')


    #####################
    # CALCULATE METRICS
    #####################

    # f/ALFF
    alff = cpac_alff.create_alff('alff')
    alff.inputs.hp_input.hp = 0.01
    alff.inputs.lp_input.lp = 0.1
    wf.connect(selectfiles, 'preproc_epi_full_spectrum', alff, 'inputspec.rest_res')
    # wf.connect(GM_mask_epiSpace, 'out_file', alff, 'inputspec.rest_mask')
    wf.connect(selectfiles, 'epi_mask', alff, 'inputspec.rest_mask')
    wf.connect(alff, 'outputspec.alff_img', ds, 'alff.alff')
    wf.connect(alff, 'outputspec.falff_img', ds, 'alff.falff')



    # f/ALFF 2 MNI
    # fixme spline or default?
    alff_MNIspace_3mm = Node(fsl.ApplyWarp(), name='alff_MNIspace_3mm')
    alff_MNIspace_3mm.inputs.interp = 'spline'
    alff_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', alff_MNIspace_3mm, 'ref_file')
    wf.connect(alff, 'outputspec.alff_img', alff_MNIspace_3mm, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', alff_MNIspace_3mm, 'field_file')
    wf.connect(alff_MNIspace_3mm, 'out_file', ds, 'alff.alff_MNI_3mm')

    falff_MNIspace_3mm = Node(fsl.ApplyWarp(), name='falff_MNIspace_3mm')
    falff_MNIspace_3mm.inputs.interp = 'spline'
    falff_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', falff_MNIspace_3mm, 'ref_file')
    wf.connect(alff, 'outputspec.falff_img', falff_MNIspace_3mm, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', falff_MNIspace_3mm, 'field_file')
    wf.connect(falff_MNIspace_3mm, 'out_file', ds, 'alff.falff_MNI_3mm')



    # f/ALFF_MNI Z-SCORE
    alff_MNIspace_3mm_Z = cpac_utils.get_zscore(input_name='alff_MNIspace_3mm', wf_name='alff_MNIspace_3mm_Z')
    wf.connect(alff_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_Z, 'inputspec.input_file')
    # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', alff_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(alff_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'alff.alff_MNI_3mm_Z')

    falff_MNIspace_3mm_Z = cpac_utils.get_zscore(input_name='falff_MNIspace_3mm', wf_name='falff_MNIspace_3mm_Z')
    wf.connect(falff_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_Z, 'inputspec.input_file')
    # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', falff_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(falff_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'alff.falff_MNI_3mm_Z')


    # f/ALFF_MNI STANDARDIZE BY MEAN
    alff_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean(
        wf_name='alff_MNIspace_3mm_standardized_mean')
    wf.connect(alff_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_standardized_mean, 'inputnode.in_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', alff_MNIspace_3mm_standardized_mean, 'inputnode.mask_file')
    wf.connect(alff_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds, 'alff.alff_MNI_3mm_standardized_mean')

    falff_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean(
        wf_name='falff_MNIspace_3mm_standardized_mean')
    wf.connect(falff_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_standardized_mean, 'inputnode.in_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', falff_MNIspace_3mm_standardized_mean, 'inputnode.mask_file')
    wf.connect(falff_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds, 'alff.falff_MNI_3mm_standardized_mean')





    # REHO
    reho = cpac_reho.create_reho()
    reho.inputs.inputspec.cluster_size = 27
    wf.connect(selectfiles, 'preproc_epi_bp', reho, 'inputspec.rest_res_filt')
    # wf.connect(GM_mask_epiSpace, 'out_file', reho, 'inputspec.rest_mask')
    wf.connect(selectfiles, 'epi_mask', reho, 'inputspec.rest_mask')
    wf.connect(reho, 'outputspec.raw_reho_map', ds, 'reho.reho')



    # REHO 2 MNI
    # fixme spline or default?
    reho_MNIspace_3mm = Node(fsl.ApplyWarp(), name='reho_MNIspace_3mm')
    reho_MNIspace_3mm.inputs.interp = 'spline'
    reho_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', reho_MNIspace_3mm, 'ref_file')
    wf.connect(reho, 'outputspec.raw_reho_map', reho_MNIspace_3mm, 'in_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', reho_MNIspace_3mm, 'field_file')
    wf.connect(reho_MNIspace_3mm, 'out_file', ds, 'reho.reho_MNI_3mm')



    # REHO_MNI Z-SCORE
    reho_MNIspace_3mm_Z = cpac_utils.get_zscore(input_name='reho_MNIspace_3mm', wf_name='reho_MNIspace_3mm_Z')
    wf.connect(alff_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_Z, 'inputspec.input_file')
    # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', reho_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(reho_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'reho.reho_MNI_3mm_Z')



    # REHO_MNI STANDARDIZE BY MEAN
    reho_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean(
        wf_name='reho_MNIspace_3mm_standardized_mean')
    wf.connect(reho_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_standardized_mean, 'inputnode.in_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', reho_MNIspace_3mm_standardized_mean, 'inputnode.mask_file')
    wf.connect(reho_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds, 'reho.reho_MNI_3mm_standardized_mean')



    # VMHC
    # create registration to symmetrical MNI template
    struct_2_MNI_symm = cpac_registration.create_nonlinear_register(name='struct_2_MNI_symm')
    wf.connect(selectfiles_anat_templates, 'vmhc_config_file_2mm', struct_2_MNI_symm, 'inputspec.fnirt_config')
    wf.connect(selectfiles_anat_templates, 'vmhc_symm_brain', struct_2_MNI_symm, 'inputspec.reference_brain')
    wf.connect(selectfiles_anat_templates, 'vmhc_symm_skull', struct_2_MNI_symm, 'inputspec.reference_skull')
    wf.connect(selectfiles_anat_templates, 'vmhc_symm_brain_mask_dil', struct_2_MNI_symm, 'inputspec.ref_mask')
    wf.connect(selectfiles, 't1w', struct_2_MNI_symm, 'inputspec.input_skull')
    wf.connect(selectfiles, 't1w_brain', struct_2_MNI_symm, 'inputspec.input_brain')

    wf.connect(struct_2_MNI_symm, 'outputspec.output_brain', ds, 'vmhc.symm_reg.@output_brain')
    wf.connect(struct_2_MNI_symm, 'outputspec.linear_xfm', ds, 'vmhc.symm_reg.@linear_xfm')
    wf.connect(struct_2_MNI_symm, 'outputspec.invlinear_xfm', ds, 'vmhc.symm_reg.@invlinear_xfm')
    wf.connect(struct_2_MNI_symm, 'outputspec.nonlinear_xfm', ds, 'vmhc.symm_reg.@nonlinear_xfm')



    # fixme
    vmhc = cpac_vmhc.create_vmhc(use_ants=False, name='vmhc')
    vmhc.inputs.fwhm_input.fwhm = 4
    wf.connect(selectfiles_anat_templates, 'vmhc_symm_brain_3mm', vmhc, 'inputspec.standard_for_func')
    wf.connect(selectfiles, 'preproc_epi_bp_tNorm', vmhc, 'inputspec.rest_res')
    wf.connect(selectfiles, 'epi_2_struct_mat', vmhc, 'inputspec.example_func2highres_mat')
    wf.connect(struct_2_MNI_symm, 'outputspec.nonlinear_xfm', vmhc, 'inputspec.fnirt_nonlinear_warp')
    # wf.connect(GM_mask_epiSpace, 'out_file', vmhc, 'inputspec.rest_mask')
    wf.connect(selectfiles, 'epi_mask', vmhc, 'inputspec.rest_mask')

    wf.connect(vmhc, 'outputspec.rest_res_2symmstandard', ds, 'vmhc.rest_res_2symmstandard')
    wf.connect(vmhc, 'outputspec.VMHC_FWHM_img', ds, 'vmhc.VMHC_FWHM_img')
    wf.connect(vmhc, 'outputspec.VMHC_Z_FWHM_img', ds, 'vmhc.VMHC_Z_FWHM_img')
    wf.connect(vmhc, 'outputspec.VMHC_Z_stat_FWHM_img', ds, 'vmhc.VMHC_Z_stat_FWHM_img')



    # VARIABILITY SCORES
    variability = Node(util.Function(input_names=['in_file'],
                                     output_names=['out_file_list'],
                                     function=calc_metrics_utils.calc_variability),
                       name='variability')
    wf.connect(selectfiles, 'preproc_epi_bp', variability, 'in_file')
    wf.connect(variability, 'out_file_list', ds, 'variability.subjectSpace.@out_files')


    # #fixme spline?
    variabilty_MNIspace_3mm = MapNode(fsl.ApplyWarp(), iterfield=['in_file'], name='variabilty_MNIspace_3mm')
    variabilty_MNIspace_3mm.inputs.interp = 'spline'
    variabilty_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
    wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', variabilty_MNIspace_3mm, 'ref_file')
    wf.connect(selectfiles, 'epi_2_MNI_warp', variabilty_MNIspace_3mm, 'field_file')
    wf.connect(variability, 'out_file_list', variabilty_MNIspace_3mm, 'in_file')
    wf.connect(variabilty_MNIspace_3mm, 'out_file', ds, 'variability.MNI_3mm.@out_file')


    # CALC Z SCORE
    variabilty_MNIspace_3mm_Z = cpac_centrality_z_score.get_cent_zscore(wf_name='variabilty_MNIspace_3mm_Z')
    wf.connect(variabilty_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_Z, 'inputspec.input_file')
    # wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_3mm', variabilty_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_Z, 'inputspec.mask_file')
    wf.connect(variabilty_MNIspace_3mm_Z, 'outputspec.z_score_img', ds, 'variability.MNI_3mm_Z.@out_file')



    # STANDARDIZE BY MEAN
    variabilty_MNIspace_3mm_standardized_mean = calc_metrics_utils.standardize_divide_by_mean(
        wf_name='variabilty_MNIspace_3mm_standardized_mean')
    wf.connect(variabilty_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_standardized_mean, 'inputnode.in_file')
    wf.connect(epi_mask_MNIspace_3mm, 'out_file', variabilty_MNIspace_3mm_standardized_mean, 'inputnode.mask_file')
    wf.connect(variabilty_MNIspace_3mm_standardized_mean, 'outputnode.out_file', ds,
               'variability.MNI_3mm_standardized_mean.@out_file')

    wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name)
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
def create_nonlinear_pipeline(name='nonlinear'):

    # workflow
    nonlinear = Workflow(name='nonlinear')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=[
        't1_highres', 'epi2highres_lin', 'epi2highres_lin_itk', 'fov_mask',
        'brain_mask', 'wmcsf_mask', 'highres2lowres_itk'
    ]),
                     name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        'epi2highres_warp', 'epi2highres_invwarp', 'epi2highres_nonlin',
        'brainmask_highres', 'wmcsfmask_highres'
    ]),
                      name='outputnode')

    # project brainmask and wmcsf mask from lowres to highres mp2rage space
    brainmask = Node(ants.ApplyTransforms(dimension=3,
                                          invert_transform_flags=[True],
                                          interpolation='NearestNeighbor'),
                     name='brainmask')

    wmcsf_mask = Node(ants.ApplyTransforms(dimension=3,
                                           invert_transform_flags=[True],
                                           interpolation='NearestNeighbor'),
                      name='wmcsf_mask')

    # mask t1

    #dilate brainmask
    dil_brainmask = Node(fs.Binarize(min=0.5, out_type='nii.gz', dilate=15),
                         name='dil_brainmask')

    mask_epi = Node(fsl.ApplyMask(out_file='epi2highres_lin_masked.nii.gz'),
                    name='mask_epi')

    nonlinear.connect([
        (inputnode, brainmask, [('brain_mask', 'input_image'),
                                ('t1_highres', 'reference_image'),
                                ('highres2lowres_itk', 'transforms')]),
        (brainmask, outputnode, [('output_image', 'brainmask_highres')]),
        (inputnode, wmcsf_mask, [('wmcsf_mask', 'input_image'),
                                 ('t1_highres', 'reference_image'),
                                 ('highres2lowres_itk', 'transforms')]),
        (wmcsf_mask, outputnode, [('output_image', 'wmcsfmask_highres')]),
        (brainmask, dil_brainmask, [('output_image', 'in_file')]),
        (dil_brainmask, mask_epi, [('binary_file', 'mask_file')]),
        (inputnode, mask_epi, [('epi2highres_lin', 'in_file')])
    ])

    # transform fov mask, dilate and apply to t1
    transform_fov = Node(
        ants.ApplyTransforms(dimension=3,
                             output_image='fov_mask_highres.nii.gz',
                             interpolation='NearestNeighbor'), 'transform_fov')

    dilate_fov = Node(fs.Binarize(min=0.5,
                                  dilate=5,
                                  binary_file='fov_mask_highres_dil.nii.gz'),
                      name='dilate_fov')

    #mask t1 twice
    mask_t1_1 = Node(fsl.ApplyMask(out_file='t1_brain_masked.nii.gz'),
                     name='mask_t1_1')

    mask_t1_2 = Node(fsl.ApplyMask(out_file='t1_brain_fov_masked.nii.gz'),
                     name='mask_t1_2')

    nonlinear.connect([
        (inputnode, transform_fov, [('fov_mask', 'input_image'),
                                    ('t1_highres', 'reference_image'),
                                    ('epi2highres_lin_itk', 'transforms')]),
        (transform_fov, dilate_fov, [('output_image', 'in_file')]),
        (brainmask, mask_t1_1, [('output_image', 'mask_file')]),
        (inputnode, mask_t1_1, [('t1_highres', 'in_file')]),
        (dilate_fov, mask_t1_2, [('binary_file', 'mask_file')]),
        (mask_t1_1, mask_t1_2, [('out_file', 'in_file')]),
    ])

    # normalization with ants
    antsreg = Node(interface=ants.registration.Registration(
        dimension=3,
        metric=['CC'],
        metric_weight=[1.0],
        radius_or_number_of_bins=[4],
        sampling_strategy=['None'],
        transforms=['SyN'],
        args='-g 0.1x1x0.1',
        transform_parameters=[(0.10, 3, 0)],
        number_of_iterations=[[50, 20, 10]],
        convergence_threshold=[1e-06],
        convergence_window_size=[10],
        shrink_factors=[[4, 2, 1]],
        smoothing_sigmas=[[2, 1, 0]],
        sigma_units=['vox'],
        use_estimate_learning_rate_once=[True],
        use_histogram_matching=[True],
        collapse_output_transforms=True,
        output_inverse_warped_image=True,
        output_warped_image=True,
        interpolation='BSpline'),
                   name='antsreg')
    antsreg.plugin_args = {'override_specs': 'request_memory = 40000'}

    nonlinear.connect([(mask_epi, antsreg, [('out_file', 'moving_image')]),
                       (mask_t1_2, antsreg, [('out_file', 'fixed_image')]),
                       (antsreg, outputnode,
                        [('reverse_transforms', 'epi2highres_invwarp'),
                         ('forward_transforms', 'epi2highres_warp'),
                         ('warped_image', 'epi2highres_nonlin')])])

    return nonlinear
Exemplo n.º 39
0
def create_denoise_pipeline(name='denoise'):
    # workflow
    denoise = Workflow(name='denoise')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=[
        'anat_brain', 'brain_mask', 'epi2anat_dat', 'unwarped_mean',
        'epi_coreg', 'moco_par', 'highpass_sigma', 'lowpass_sigma', 'tr'
    ]),
                     name='inputnode')
    outputnode = Node(interface=util.IdentityInterface(fields=[
        'wmcsf_mask', 'brain_mask_resamp', 'brain_mask2epi', 'combined_motion',
        'outlier_files', 'intensity_files', 'outlier_stats', 'outlier_plots',
        'mc_regressor', 'mc_F', 'mc_pF', 'comp_regressor', 'comp_F', 'comp_pF',
        'normalized_file'
    ]),
                      name='outputnode')
    # run fast to get tissue probability classes
    fast = Node(fsl.FAST(), name='fast')
    denoise.connect([(inputnode, fast, [('anat_brain', 'in_files')])])

    # functions to select tissue classes
    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    def selectsingle(files, idx):
        return files[idx]

    # resample tissue classes
    resample_tissue = MapNode(afni.Resample(resample_mode='NN',
                                            outputtype='NIFTI_GZ'),
                              iterfield=['in_file'],
                              name='resample_tissue')
    denoise.connect([
        (inputnode, resample_tissue, [('epi_coreg', 'master')]),
        (fast, resample_tissue, [(('partial_volume_files', selectindex,
                                   [0, 2]), 'in_file')]),
    ])
    # binarize tissue classes
    binarize_tissue = MapNode(
        fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'),
        iterfield=['in_file'],
        name='binarize_tissue')
    denoise.connect([
        (resample_tissue, binarize_tissue, [('out_file', 'in_file')]),
    ])
    # combine tissue classes to noise mask
    wmcsf_mask = Node(fsl.BinaryMaths(operation='add',
                                      out_file='wmcsf_mask_lowres.nii.gz'),
                      name='wmcsf_mask')
    denoise.connect([(binarize_tissue, wmcsf_mask,
                      [(('out_file', selectsingle, 0), 'in_file'),
                       (('out_file', selectsingle, 1), 'operand_file')]),
                     (wmcsf_mask, outputnode, [('out_file', 'wmcsf_mask')])])
    # resample brain mask
    resample_brain = Node(afni.Resample(
        resample_mode='NN',
        outputtype='NIFTI_GZ',
        out_file='T1_brain_mask_lowres.nii.gz'),
                          name='resample_brain')
    denoise.connect([(inputnode, resample_brain, [('brain_mask', 'in_file'),
                                                  ('epi_coreg', 'master')]),
                     (resample_brain, outputnode, [('out_file',
                                                    'brain_mask_resamp')])])
    # project brain mask into original epi space fpr quality assessment
    brainmask2epi = Node(fs.ApplyVolTransform(
        interp='nearest',
        inverse=True,
        transformed_file='T1_brain_mask2epi.nii.gz',
    ),
                         name='brainmask2epi')
    denoise.connect([
        (inputnode, brainmask2epi, [('brain_mask', 'target_file'),
                                    ('epi2anat_dat', 'reg_file'),
                                    ('unwarped_mean', 'source_file')]),
        (brainmask2epi, outputnode, [('transformed_file', 'brain_mask2epi')])
    ])
    # perform artefact detection
    artefact = Node(ra.ArtifactDetect(save_plot=True,
                                      use_norm=True,
                                      parameter_source='FSL',
                                      mask_type='file',
                                      norm_threshold=1,
                                      zintensity_threshold=3,
                                      use_differences=[True, False]),
                    name='artefact')
    artefact.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (inputnode, artefact, [('epi_coreg', 'realigned_files'),
                               ('moco_par', 'realignment_parameters')]),
        (resample_brain, artefact, [('out_file', 'mask_file')]),
        (artefact, outputnode, [('norm_files', 'combined_motion'),
                                ('outlier_files', 'outlier_files'),
                                ('intensity_files', 'intensity_files'),
                                ('statistic_files', 'outlier_stats'),
                                ('plot_files', 'outlier_plots')])
    ])
    # Compute motion regressors
    motreg = Node(util.Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors),
                  name='getmotionregress')
    motreg.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, motreg, [('moco_par', 'motion_params')])])
    # Create a filter to remove motion and art confounds
    createfilter1 = Node(util.Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    createfilter1.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (motreg, createfilter1, [('out_files', 'motion_params')]),
        (
            artefact,
            createfilter1,
            [  #('norm_files', 'comp_norm'),
                ('outlier_files', 'outliers')
            ]),
        (createfilter1, outputnode, [('out_files', 'mc_regressor')])
    ])
    # regress out motion and art confounds
    filter1 = Node(fsl.GLM(out_f_name='F_mcart.nii.gz',
                           out_pf_name='pF_mcart.nii.gz',
                           out_res_name='rest_mc_denoised.nii.gz',
                           demean=True),
                   name='filtermotion')
    filter1.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, filter1, [('epi_coreg', 'in_file')]),
                     (createfilter1, filter1,
                      [(('out_files', list_to_filename), 'design')]),
                     (filter1, outputnode, [('out_f', 'mc_F'),
                                            ('out_pf', 'mc_pF')])])
    # create filter with compcor components
    createfilter2 = Node(util.Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                       output_names=['out_files'],
                                       function=extract_noise_components),
                         name='makecompcorfilter')
    createfilter2.inputs.num_components = 6
    createfilter2.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (createfilter1, createfilter2, [(('out_files', list_to_filename),
                                         'extra_regressors')]),
        (filter1, createfilter2, [('out_res', 'realigned_file')]),
        (wmcsf_mask, createfilter2, [('out_file', 'mask_file')]),
        (createfilter2, outputnode, [('out_files', 'comp_regressor')]),
    ])
    # regress compcor and other noise components
    filter2 = Node(fsl.GLM(out_f_name='F_noise.nii.gz',
                           out_pf_name='pF_noise.nii.gz',
                           out_res_name='rest2anat_denoised.nii.gz',
                           demean=True),
                   name='filternoise')
    filter2.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(filter1, filter2, [('out_res', 'in_file')]),
                     (createfilter2, filter2, [('out_files', 'design')]),
                     (resample_brain, filter2, [('out_file', 'mask')]),
                     (filter2, outputnode, [('out_f', 'comp_F'),
                                            ('out_pf', 'comp_pF')])])
    # bandpass filter denoised file
    bandpass_filter = Node(
        fsl.TemporalFilter(out_file='rest_denoised_bandpassed.nii.gz'),
        name='bandpass_filter')
    bandpass_filter.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, bandpass_filter,
                      [('highpass_sigma', 'highpass_sigma'),
                       ('lowpass_sigma', 'lowpass_sigma')]),
                     (filter2, bandpass_filter, [('out_res', 'in_file')])])
    # time-normalize scans
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (inputnode, normalize_time, [('tr', 'tr')]),
        (bandpass_filter, normalize_time, [('out_file', 'in_file')]),
        (normalize_time, outputnode, [('out_file', 'normalized_file')])
    ])
    return denoise
Exemplo n.º 40
0
                raise Exception("If I may .. you are using some freesurfer 5 version and requested a freesurfer6 command of recon-all")
            
            # Node: fs.fsReconAll
            fsReconAll = Node(AbcReconAll(), name="fsReconAll6")
            fsReconAll.inputs.args = ' -brainstem-structures -3T -contrasurfreg -qcache -no-isrunning'
            fsReconAll.inputs.directive = 'all'
            fsReconAll.inputs.environ = {}
            fsReconAll.ignore_exception = True # may throw an exception that cannot find result files ==> dummy
            fsReconAll.inputs.subjects_dir = subjects_dir
            fsReconAll.inputs.use_FLAIR = use_FLAIR
            # no need to connect the T1 and T2flair since the process was already initialized in basic_structural
            ishanat.connect(struct, "outputNode.subject_id", fsReconAll, "subject_id")   
            
            # Node: fs.fsReconAllHippoT1 pour calculer l'hippocampe sans le FLAIR
            fsReconAllHippoT1 = Node(AbcReconAll(), name="fsHipT1")
            fsReconAllHippoT1.plugin_args={'sbatch_args': '-t 20:00:00'}
            fsReconAllHippoT1.inputs.args = ' -hippocampal-subfields-T1 '
            fsReconAllHippoT1.inputs.subjects_dir = subjects_dir
            ishanat.connect(fsReconAll, "subject_id", fsReconAllHippoT1, "subject_id")
            
            if use_FLAIR:
                # Node: fs.fsReconAllHippoT1T2 pour calculer l'hippocampe AVEC le FLAIR
                fsReconAllHippoT1T2 = Node(AbcReconAll(), name="fsHipT1T2")
                fsReconAllHippoT1T2.plugin_args={'sbatch_args': '--mem 7000'}
                fsReconAllHippoT1T2.inputs.subjects_dir = subjects_dir
                ishanat.connect(fsReconAllHippoT1, "subject_id", fsReconAllHippoT1T2, "subject_id")
                ishanat.connect(struct, "outputNode.nifti_acc", fsReconAllHippoT1T2, "hippo_file")

#    # Conversion of FS outputs
#    fsconv = genPostFsPipeline(name='fsconv', fsversion=fsversion, pipeline='connectomics',no_myelin=not use_FLAIR)
#    if fsversion == "freesurfer6" and do_recon_all:
Exemplo n.º 41
0
def create_denoise_pipeline(name='denoise'):
    # workflow
    denoise = Workflow(name='denoise')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=[
        'anat_brain', 'brain_mask', 'flirt_mat', 'unwarped_mean', 'epi_coreg',
        'highpass_sigma', 'tr'
    ]),
                     name='inputnode')
    outputnode = Node(interface=util.IdentityInterface(fields=[
        'wmcsf_mask', 'brain2epi', 'wmcsf_mask2epi', 'combined_motion',
        'comp_regressor', 'comp_F', 'comp_pF', 'out_betas', 'ts_fullspectrum',
        'normalized_file'
    ]),
                      name='outputnode')
    # run fast to get tissue probability classes
    fast = Node(fsl.FAST(), name='fast')
    denoise.connect([(inputnode, fast, [('anat_brain', 'in_files')])])

    # functions to select tissue classes
    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    def selectsingle(files, idx):
        return files[idx]

    # binarize tissue classes
    binarize_tissue = MapNode(
        fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'),
        iterfield=['in_file'],
        name='binarize_tissue')
    denoise.connect([
        (fast, binarize_tissue, [(('partial_volume_files', selectindex,
                                   [0, 2]), 'in_file')]),
    ])
    # combine tissue classes to noise mask
    wmcsf_mask = Node(fsl.BinaryMaths(operation='add',
                                      out_file='wmcsf_mask.nii'),
                      name='wmcsf_mask')
    denoise.connect([(binarize_tissue, wmcsf_mask,
                      [(('out_file', selectsingle, 0), 'in_file'),
                       (('out_file', selectsingle, 1), 'operand_file')]),
                     (wmcsf_mask, outputnode, [('out_file', 'wmcsf_mask')])])

    # project wm_csf mask from anatomical to original epi space using inverse FLIRT-matrix
    invmat = Node(fsl.ConvertXFM(), name='invmat')
    invmat.inputs.invert_xfm = True

    apply_inv = Node(fsl.ApplyXfm(), name='apply_inv')
    apply_inv.inputs.apply_xfm = True
    denoise.connect([(inputnode, invmat, [('flirt_mat', 'in_file')]),
                     (invmat, apply_inv, [('out_file', 'in_matrix_file')]),
                     (inputnode, apply_inv, [('unwarped_mean', 'reference')]),
                     (wmcsf_mask, apply_inv, [('out_file', 'in_file')]),
                     (apply_inv, outputnode, [('out_file', 'wmcsf_mask2epi')])
                     ])
    #project brain to epi space as a checkup
    apply_inv_brain = Node(fsl.ApplyXfm(), name='apply_inv_brain')
    apply_inv_brain.inputs.apply_xfm = True
    denoise.connect([
        (invmat, apply_inv_brain, [('out_file', 'in_matrix_file')]),
        (inputnode, apply_inv_brain, [('unwarped_mean', 'reference')]),
        (inputnode, apply_inv_brain, [('anat_brain', 'in_file')]),
        (apply_inv_brain, outputnode, [('out_file', 'brain2epi')])
    ])

    #no artifact detection and motion regression done because of AROMA

    # create filter with compcor components
    createfilter2 = Node(util.Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                       output_names=['out_files'],
                                       function=extract_noise_components),
                         name='makecompcorfilter')
    createfilter2.inputs.num_components = 6
    createfilter2.inputs.extra_regressors = None
    createfilter2.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (inputnode, createfilter2, [('epi_coreg', 'realigned_file')]),
        (apply_inv, createfilter2, [('out_file', 'mask_file')]),
        (createfilter2, outputnode, [('out_files', 'comp_regressor')]),
    ])
    # regress compcor and other noise components
    filter2 = Node(fsl.GLM(out_f_name='F_noise.nii.gz',
                           out_pf_name='pF_noise.nii.gz',
                           out_res_name='rest2anat_denoised.nii.gz',
                           output_type='NIFTI_GZ',
                           demean=True),
                   name='filternoise')
    filter2.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, filter2, [('epi_coreg', 'in_file')]),
                     (createfilter2, filter2, [('out_files', 'design')]),
                     (inputnode, filter2, [('brain_mask', 'mask')]),
                     (filter2, outputnode, [('out_f', 'comp_F'),
                                            ('out_pf', 'comp_pF'),
                                            ('out_file', 'out_betas')])])

    # write TR into header again (glms remove it)
    # do not use mri_convert interface as it has a bug (already fixed in niyppe master)
    fix_tr = Node(util.Function(input_names=['in_file', 'TR_sec'],
                                output_names=['out_file'],
                                function=fix_TR_fs),
                  name='fix_tr')
    denoise.connect(inputnode, 'tr', fix_tr, 'TR_sec')
    denoise.connect(filter2, 'out_res', fix_tr, 'in_file')

    #use only highpass filter (because high-frequency content (otherwise filtered by lowpass is already considered in AROMA))
    highpass_filter = Node(
        fsl.TemporalFilter(out_file='rest_denoised_highpassed.nii'),
        name='highpass_filter')
    highpass_filter.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, highpass_filter, [('highpass_sigma',
                                                    'highpass_sigma')]),
                     (fix_tr, highpass_filter, [('out_file', 'in_file')]),
                     (fix_tr, outputnode, [('out_file', 'ts_fullspectrum')])])

    # time-normalize scans (could be set to percent change etc.  but here NO normalization is used
    #                 http://nipy.org/nitime/api/generated/nitime.fmri.io.html)
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([
        (inputnode, normalize_time, [('tr', 'tr')]),
        (highpass_filter, normalize_time, [('out_file', 'in_file')]),
        (normalize_time, outputnode, [('out_file', 'normalized_file')])
    ])
    return denoise
Exemplo n.º 42
0
def main():
    arguments = docopt(__doc__)
    study = arguments['<study>']
    use_server = arguments['--log-to-server']
    debug = arguments['--debug']

    config = load_config(study)

    if use_server:
        add_server_handler(config)
    if debug:
        logger.setLevel(logging.DEBUG)
    ## setup some paths
    study_base_dir = config.get_study_base()
    fs_dir = config.get_path('freesurfer')
    data_dir = config.get_path('nii')
    # not sure where to put this. Potentially it could be very large
    # keeping it means existing subjects don't get re-run.
    # it could be deleted but then would need extra code to Determine
    # if subjects have been run.
    working_dir = os.path.join(study_base_dir,
                               'pipelines/workingdir_reconflow')

    ## These are overrides, for testing
    base_dir = '/external/rprshnas01/tigrlab/'
    fs_dir = os.path.join(base_dir, 'scratch/twright/pipelines/freesurfer',
                          study)

    working_dir = os.path.join(
        base_dir, 'scratch/twright/pipelines/workingdir_reconflow')

    # freesurfer fails if the subjects dir doesn't exist
    check_folder_exists(fs_dir)
    # get the list of subjects that are not phantoms and have been qc'd
    subject_list = config.get_subject_metadata()
    subject_list = [
        subject for subject in subject_list
        if not dm_scanid.is_phantom(subject)
    ]

    # Need to determine if the study has T2 (or FLAIR) scans,
    # do this by looking in the study_config.yml for expected scantypes.
    # Current pipelines add T2 files if they exist on a per-subject basis
    # Nipype expects the each run of the pipeline to be the same across all subjects
    # it is possible to set some parameters on a per-subject basis (see nu-iter setting)
    # but is this desirable?
    scan_types = get_common_scan_types(config)

    if not 'T1' in scan_types:
        msg = 'Study {} does not have T1 scans, aborting.'.format(study)
        sys.exit(msg)

    templates = {'T1': '{dm_subject_id}/{dm_subject_id}_??_T1_??*.nii.gz'}
    if 'T2' in scan_types:
        templates['T2'] = '{dm_subject_id}/{dm_subject_id}_??_T2_??*.nii.gz'
    if 'FLAIR' in scan_types:
        logger.debug('FLAIR processing not yet implemented')
        #templates = {'T2': '{dm_subject_id}/{dm_subject_id}_??_FLAIR _??*.nii.gz'}

    # setup the nipype nodes
    # infosource justs iterates through the list of subjects
    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")
    # For testing
    subject_list = ['DTI_CMH_H001_02']
    infosource.iterables = ('subject_id', subject_list)

    # sf finds the files for each subject. The dmSelectFiles class
    # overrides the nipype.SelectFiles adding checks that the numbers
    # of files matches those defined in study_config.yml
    sf = Node(dmSelectFiles(templates), name="selectFiles")

    sf.inputs.base_directory = data_dir

    # set_nuiter implements a simple function to set the iteration count
    # on a subject by subject basis
    set_nuiter = Node(Function(input_names=['subject_id'],
                               output_names=['nu_iter'],
                               function=get_nuiter_settings),
                      name='get_nuiter')

    # reconall is the interface for the recon-all freesurfer function
    # currently seem unable to specify multiple directives
    #    (e.g. -qcache and -notal-check)
    reconall = Node(ReconAll(directive='all',
                             parallel=True,
                             subjects_dir=fs_dir),
                    name='recon-all')
    # if this is running on a cluster, we can specify node specific requirements
    #  i.e. reconall runs well with lots of cores.
    reconall.plugin_args = {
        'qsub_args': '-l nodes=1:ppn=24',
        'overwrite': True
    }

    # get_summary extracts the summary information from the output of reconall
    get_summary = Node(EnigmaSummaryTask(), name='Enigma_Summaries')

    ## Create the workflow
    reconflow = Workflow(name='reconflow')
    reconflow.base_dir = working_dir

    # need a different connection pattern and param for the reconall node
    # if T2 files exist
    sf_ra_conx = [('T1', 'T1_files')]

    if 'T2' in scan_types:
        reconall.inputs.use_T2 = True
        sf_ra_conx.append('T2', 'T2_file')

    ## Connect the outputs from each node to the corresponding inputs
    # Basically we link the defined outputs from each node, to the inputs of the next node
    #   Each item in the list is [node1, node2, [(output_node1, input_node2)]]

    # Problem here due to incompatibilities between freesurfer 5 & 6
    # this pattern works for freesurfer 5.3.0 (without the parallel flag for reconall)
    # but failes for 6.0.0, which doesn't support the nuierations flag.
    # reconflow.connect([(infosource, sf, [('subject_id', 'dm_subject_id')]),
    #                    (infosource, set_nuiter, [('subject_id', 'subject_id')]),
    #                    (sf, reconall, sf_ra_conx),
    #                    (set_nuiter, reconall, [('nu_iter', 'flags')])])

    # this is the freesurfer 6 compatible version
    reconflow.connect([(infosource, sf, [('subject_id', 'dm_subject_id')]),
                       (infosource, reconall, [('subject_id', 'subject_id')]),
                       (sf, reconall, sf_ra_conx),
                       (reconall, get_summary,
                        [('subjects_dir', 'subjects_dir'),
                         ('subject_id', 'subject_id'),
                         ('subjects_dir', 'output_path')])])

    # need to use a job template to ensure the environment is set correctly
    # on the running nodes.
    # Not sure why the current env isn't being passed
    job_template = os.path.join(os.path.dirname(__file__),
                                'job_template_scc.sh')

    ## run the actual workflow.
    # the pbsGraph plugin creates jobs for each node on a PBS torque using
    # torque scheduling to keep them in order.
    # Use plugin='SGEGraph' to run on lab cluster (not sure what will happen
    #   to the reconflow node if we don't have any 24 core machines).
    # Don't specify a plugin to run on a single machine
    reconflow.run(plugin='PBSGraph', plugin_args=dict(template=job_template))
Exemplo n.º 43
0
def create_denoise_pipeline(name='denoise'):
    # workflow
    denoise = Workflow(name='denoise')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=['anat_brain',
                                                              'brain_mask',
                                                              'epi2anat_dat',
                                                              'unwarped_mean',
                                                              'epi_coreg',
                                                              'moco_par',
                                                              'highpass_sigma',
                                                              'lowpass_sigma',
                                                              'tr']),
                     name='inputnode')
    outputnode = Node(interface=util.IdentityInterface(fields=['wmcsf_mask',
                                                               'brain_mask_resamp',
                                                               'brain_mask2epi',
                                                               'combined_motion',
                                                               'outlier_files',
                                                               'intensity_files',
                                                               'outlier_stats',
                                                               'outlier_plots',
                                                               'mc_regressor',
                                                               'mc_F',
                                                               'mc_pF',
                                                               'comp_regressor',
                                                               'comp_F',
                                                               'comp_pF',
                                                               # FL added fullspectrum
                                                               'ts_fullspectrum',
                                                               'normalized_file']),
                      name='outputnode')
    # run fast to get tissue probability classes
    fast = Node(fsl.FAST(), name='fast')
    denoise.connect([(inputnode, fast, [('anat_brain', 'in_files')])])
    # functions to select tissue classes
    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(np.array(filename_to_list(files))[idx].tolist())

    def selectsingle(files, idx):
        return files[idx]

    # resample tissue classes
    resample_tissue = MapNode(afni.Resample(resample_mode='NN',
                                            outputtype='NIFTI_GZ'),
                              iterfield=['in_file'],
                              name='resample_tissue')
    denoise.connect([(inputnode, resample_tissue, [('epi_coreg', 'master')]),
                     (fast, resample_tissue, [(('partial_volume_files', selectindex, [0, 2]), 'in_file')]),
                     ])
    # binarize tissue classes
    binarize_tissue = MapNode(fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'),
                              iterfield=['in_file'],
                              name='binarize_tissue')
    denoise.connect([(resample_tissue, binarize_tissue, [('out_file', 'in_file')]),
                     ])
    # combine tissue classes to noise mask
    wmcsf_mask = Node(fsl.BinaryMaths(operation='add',
                                      out_file='wmcsf_mask_lowres.nii.gz'),
                      name='wmcsf_mask')
    denoise.connect([(binarize_tissue, wmcsf_mask, [(('out_file', selectsingle, 0), 'in_file'),
                                                    (('out_file', selectsingle, 1), 'operand_file')]),
                     (wmcsf_mask, outputnode, [('out_file', 'wmcsf_mask')])])
    # resample brain mask
    resample_brain = Node(afni.Resample(resample_mode='NN',
                                        outputtype='NIFTI_GZ',
                                        out_file='T1_brain_mask_lowres.nii.gz'),
                          name='resample_brain')
    denoise.connect([(inputnode, resample_brain, [('brain_mask', 'in_file'),
                                                  ('epi_coreg', 'master')]),
                     (resample_brain, outputnode, [('out_file', 'brain_mask_resamp')])])
    # project brain mask into original epi space fpr quality assessment
    brainmask2epi = Node(fs.ApplyVolTransform(interp='nearest',
                                              inverse=True,
                                              transformed_file='T1_brain_mask2epi.nii.gz', ),
                         name='brainmask2epi')
    denoise.connect([(inputnode, brainmask2epi, [('brain_mask', 'target_file'),
                                                 ('epi2anat_dat', 'reg_file'),
                                                 ('unwarped_mean', 'source_file')]),
                     (brainmask2epi, outputnode, [('transformed_file', 'brain_mask2epi')])])
    # perform artefact detection
    artefact = Node(ra.ArtifactDetect(save_plot=True,
                                      use_norm=True,
                                      parameter_source='FSL',
                                      mask_type='file',
                                      norm_threshold=1,
                                      zintensity_threshold=3,
                                      use_differences=[True, False]
                                      ),
                    name='artefact')
    artefact.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, artefact, [('epi_coreg', 'realigned_files'),
                                            ('moco_par', 'realignment_parameters')]),
                     (resample_brain, artefact, [('out_file', 'mask_file')]),
                     (artefact, outputnode, [('norm_files', 'combined_motion'),
                                             ('outlier_files', 'outlier_files'),
                                             ('intensity_files', 'intensity_files'),
                                             ('statistic_files', 'outlier_stats'),
                                             ('plot_files', 'outlier_plots')])])
    # Compute motion regressors
    motreg = Node(util.Function(input_names=['motion_params', 'order', 'derivatives'],
                                output_names=['out_files'],
                                function=motion_regressors),
                  name='getmotionregress')
    motreg.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, motreg, [('moco_par', 'motion_params')])])
    # Create a filter to remove motion and art confounds
    createfilter1 = Node(util.Function(input_names=['motion_params', 'comp_norm',
                                                    'outliers', 'detrend_poly'],
                                       output_names=['out_files'],
                                       function=build_filter1),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    createfilter1.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(motreg, createfilter1, [('out_files', 'motion_params')]),
                     (artefact, createfilter1, [  # ('norm_files', 'comp_norm'),
                                                  ('outlier_files', 'outliers')]),
                     (createfilter1, outputnode, [('out_files', 'mc_regressor')])
                     ])
    # regress out motion and art confounds
    filter1 = Node(fsl.GLM(out_f_name='F_mcart.nii.gz',
                           out_pf_name='pF_mcart.nii.gz',
                           out_res_name='rest_mc_denoised.nii.gz',
                           demean=True),
                   name='filtermotion')
    filter1.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, filter1, [('epi_coreg', 'in_file')]),
                     (createfilter1, filter1, [(('out_files', list_to_filename), 'design')]),
                     (filter1, outputnode, [('out_f', 'mc_F'),
                                            ('out_pf', 'mc_pF')])])
    # create filter with compcor components
    createfilter2 = Node(util.Function(input_names=['realigned_file', 'mask_file',
                                                    'num_components',
                                                    'extra_regressors'],
                                       output_names=['out_files'],
                                       function=extract_noise_components),
                         name='makecompcorfilter')
    createfilter2.inputs.num_components = 6
    createfilter2.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(createfilter1, createfilter2, [(('out_files', list_to_filename), 'extra_regressors')]),
                     (filter1, createfilter2, [('out_res', 'realigned_file')]),
                     (wmcsf_mask, createfilter2, [('out_file', 'mask_file')]),
                     (createfilter2, outputnode, [('out_files', 'comp_regressor')]),
                     ])
    # regress compcor and other noise components
    filter2 = Node(fsl.GLM(out_f_name='F_noise.nii.gz',
                           out_pf_name='pF_noise.nii.gz',
                           out_res_name='rest2anat_denoised.nii.gz',
                           demean=True),
                   name='filternoise')
    filter2.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(filter1, filter2, [('out_res', 'in_file')]),
                     (createfilter2, filter2, [('out_files', 'design')]),
                     (resample_brain, filter2, [('out_file', 'mask')]),
                     (filter2, outputnode, [('out_f', 'comp_F'),
                                            ('out_pf', 'comp_pF')])
                     ])
    # bandpass filter denoised file
    bandpass_filter = Node(fsl.TemporalFilter(out_file='rest_denoised_bandpassed.nii.gz'),
                           name='bandpass_filter')
    bandpass_filter.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, bandpass_filter, [('highpass_sigma', 'highpass_sigma'),
                                                   ('lowpass_sigma', 'lowpass_sigma')]),
                     (filter2, bandpass_filter, [('out_res', 'in_file')]),
                     # FL added fullspectrum to outputnoded
                     (filter2, outputnode, [('out_res', 'ts_fullspectrum')])
                     ])
    # time-normalize scans
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    denoise.connect([(inputnode, normalize_time, [('tr', 'tr')]),
                     (bandpass_filter, normalize_time, [('out_file', 'in_file')]),
                     (normalize_time, outputnode, [('out_file', 'normalized_file')])
                     ])
    return denoise
Exemplo n.º 44
0
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir):
    '''
    Workflow to run brackground masking and then freesurfer recon-all
    on "lowres" MP2RAGE data
    '''

    # main workflow
    struct_preproc = Workflow(name='mp2rage_preproc')
    struct_preproc.base_dir = working_dir
    struct_preproc.config['execution'][
        'crashdump_dir'] = struct_preproc.base_dir + "/crash_files"

    # select files
    templates = {
        'inv2': 'raw/mp2rage/inv2.nii.gz',
        't1map': 'raw/mp2rage/t1map.nii.gz',
        'uni': 'raw/mp2rage/uni.nii.gz'
    }
    selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")

    # mp2rage background masking
    background = Node(JistIntensityMp2rageMasking(outMasked=True,
                                                  outMasked2=True,
                                                  outSignal2=True),
                      name='background')

    # workflow to run freesurfer reconall

    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/', '_')

    recon_all = Node(fs.ReconAll(args='-nuiterations 7 -no-isrunning'),
                     name="recon_all")
    recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'}
    recon_all.inputs.subjects_dir = freesurfer_dir
    recon_all.inputs.subject_id = sub_id(subject)

    #sink to store files
    sink = Node(nio.DataSink(base_directory=out_dir,
                             parameterization=False,
                             substitutions=[('outStripped', 'uni_stripped'),
                                            ('outMasked2', 'uni_masked'),
                                            ('outSignal2', 'background_mask'),
                                            ('outOriginal', 'uni_reoriented'),
                                            ('outMask', 'skullstrip_mask'),
                                            ('transform_Warped',
                                             'T1_brain2std')]),
                name='sink')

    # connections
    struct_preproc.connect([
        (selectfiles, background, [('inv2', 'inSecond'),
                                   ('t1map', 'inQuantitative'),
                                   ('uni', 'inT1weighted')]),
        (background, recon_all, [('outMasked2', 'T1files')]),
        (background, sink, [('outMasked2', 'preprocessed.mp2rage.@uni_masked'),
                            ('outSignal2',
                             'preprocessed.mp2rage.@background_mask')]),
    ])

    #struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    return struct_preproc