def create_reconall_pipeline(name='reconall'):
    
    reconall=Workflow(name='reconall')

    #inputnode 
    inputnode=Node(util.IdentityInterface(fields=['anat', 
                                                  'fs_subjects_dir',
                                                  'fs_subject_id'
                                                  ]),
                   name='inputnode')
    
    outputnode=Node(util.IdentityInterface(fields=['fs_subjects_dir',
                                                   'fs_subject_id']),
                    name='outputnode')
    
    # run reconall
    recon_all = create_skullstripped_recon_flow()
    
    
    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/','_')
    
    reconall.connect([(inputnode, recon_all, [('fs_subjects_dir', 'inputspec.subjects_dir'),
                                              ('anat', 'inputspec.T1_files'),
                                              (('fs_subject_id', sub_id), 'inputspec.subject_id')]),
                      (recon_all, outputnode, [('outputspec.subject_id', 'fs_subject_id'),
                                               ('outputspec.subjects_dir', 'fs_subjects_dir')])
                      ])
    
    
    return reconall
示例#2
0
def create_normalize_pipeline(name='normalize'):
    # workflow
    normalize = Workflow(name='normalize')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=['epi_coreg',
                                                              'tr']),
                     name='inputnode')
    outputnode = Node(interface=util.IdentityInterface(fields=[
        'normalized_file']),
        name='outputnode')

    # time-normalize scans
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    normalize.connect([(inputnode, normalize_time, [('tr', 'tr')]),
                       (inputnode, normalize_time, [('epi_coreg', 'in_file')]),
                       (normalize_time, outputnode, [('out_file', 'normalized_file')])
                       ])

    # time-normalize scans    

    return normalize
示例#3
0
def create_smoothing_pipeline(name='smoothing'):
    # set fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI')
    # initiate workflow
    smoothing = Workflow(name='smoothing')
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['ts_transformed',
    'fwhm'
    ]),
    name='inputnode')
    # outputnode
    outputnode=Node(util.IdentityInterface(fields=['ts_smoothed'
    ]),
    name='outputnode')
    
    
    #apply smoothing
    smooth = Node(fsl.Smooth(),name = 'smooth')
   
    
    smoothing.connect([
    (inputnode, smooth, [
    ('ts_transformed', 'in_file'),
    ('fwhm', 'fwhm')]
    ), 
    (smooth, outputnode, [('smoothed_file', 'ts_smoothed')]
    )
    ])
    
 



    
    return smoothing
示例#4
0
def create_dcmconvert_pipeline(name='dcmconvert'):
    
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    from nipype.interfaces.dcmstack import DcmStack

    # workflow
    dcmconvert = Workflow(name='dcmconvert')
    
    #inputnode 
    inputnode=Node(util.IdentityInterface(fields=['dicoms',
                                                  'filename']),
                   name='inputnode')
    
    # outputnode                                     
    outputnode=Node(util.IdentityInterface(fields=['nifti']),
                    name='outputnode')
    
    # conversion node
    converter = Node(DcmStack(embed_meta=True),
                     name='converter')
    
    # connections
    dcmconvert.connect([(inputnode, converter, [('dicoms', 'dicom_files'),
                                                ('filename','out_format')]),
                        (converter, outputnode, [('out_file','nifti')])])
    
    return dcmconvert
def create_ants_registration_pipeline(name='ants_registration'):
    # set fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    # initiate workflow
    ants_registration = Workflow(name='ants_registration')
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['denoised_ts',
    'ants_affine',
    'ants_warp',
    'ref'
    ]),
    name='inputnode')
    # outputnode
    outputnode=Node(util.IdentityInterface(fields=['ants_reg_ts',
    ]),
    name='outputnode')

    #also transform to mni space
    collect_transforms = Node(interface = util.Merge(2),name='collect_transforms')    
    
    ants_reg = Node(ants.ApplyTransforms(input_image_type = 3, dimension = 3, interpolation = 'Linear'), name='ants_reg')
    
    
    
    
    ants_registration.connect([
                          (inputnode, ants_reg, [('denoised_ts', 'input_image')]),
                          (inputnode, ants_reg, [('ref', 'reference_image')]),
                          (inputnode, collect_transforms, [('ants_affine', 'in1')]),
                          (inputnode, collect_transforms, [('ants_warp', 'in2')]),
                          (collect_transforms, ants_reg,  [('out', 'transforms')]),
                          (ants_reg, outputnode, [('output_image', 'ants_reg_ts')])
                          ])
                          
    return ants_registration
示例#6
0
def create_reconall_pipeline(name='reconall'):
    reconall = Workflow(name='reconall')
    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['anat',
                                                    'fs_subjects_dir',
                                                    'fs_subject_id'
                                                    ]),
                     name='inputnode')
    outputnode = Node(util.IdentityInterface(fields=['fs_subjects_dir',
                                                     'fs_subject_id']),
                      name='outputnode')
    # run reconall
    recon_all = Node(fs.ReconAll(args='-autorecon2 -nuiterations 7 -no-isrunning -hippo-subfields'),
                     name="recon_all")
    # recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3
    recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'}
    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/', '_')

    reconall.connect([(inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'),
                                              ('anat', 'T1_files'),
                                              (('fs_subject_id', sub_id), 'subject_id')]),
                      (recon_all, outputnode, [('subject_id', 'fs_subject_id'),
                                               ('subjects_dir', 'fs_subjects_dir')])
                      ])
    return reconall
    def __init__(self, ct_file_name, tmp_dir, chest_regions=None):
        Workflow.__init__(self, 'VesselParticlesWorkflow')

        assert ct_file_name.rfind('.') != -1, "Unrecognized CT file name format"
        
        self._tmp_dir = tmp_dir
        self._cid = ct_file_name[max([ct_file_name.rfind('/'), 0])+1:\
                                 ct_file_name.rfind('.')]

        if ct_file_name.rfind('/') != -1:
            self._dir = ct_file_name[0:ct_file_name.rfind('/')]
        else:
            self._dir = '.'

        if vessel_seeds_mask_file_name is None:
            self._vessel_seeds_mask_file_name = \
              os.path.join(self._dir, self._cid + CM._vesselSeedsMask)
        else:
            self._vessel_seeds_mask_file_name = vessel_seeds_mask_file_name
            
        generate_partial_lung_label_map = \
          pe.Node(interface=cip.GeneratePartialLungLabelMap(), 
                  name='generate_partial_lung_label_map')
        generate_partial_lung_label_map.inputs.ct = ct_file_name
        generate_partial_lung_label_map.inputs.
        
        extract_chest_label_map = \
          pe.Node(interface=cip.ExtractChestLabelMap(),
                  name='extract_chest_label_map')
        extract_chest_label_map.inputs.outFileName = 
        extract_chest_label_map.inputs.
def create_slice_timing_pipeline(name='slicetiming'):
    # set fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI')
    # initiate workflow
    slicetiming = Workflow(name='slicetiming')
    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['ts'
                                                    ]),
                     name='inputnode')
    # outputnode
    outputnode = Node(util.IdentityInterface(fields=['ts_slicetcorrected'
                                                     ]),
                      name='outputnode')


    # use FSL slicetiming (default ascending bottom to top)
    timer = Node(fsl.SliceTimer(), name='timer')
    timer.inputs.time_repetition = 2.0

    slicetiming.connect([
        (inputnode, timer, [
            ('ts', 'in_file')]
         ),
        (timer, outputnode, [('slice_time_corrected_file', 'ts_slicetcorrected')]
         )
    ])

    return slicetiming
示例#9
0
def create_visualize_pipeline(name='visualize'):

    # initiate workflow
    visualize = Workflow(name='visualize')
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['ts_transformed',
    'mni_template'
    ]),
    name='inputnode')
    # outputnode
    outputnode=Node(util.IdentityInterface(fields=['output_image'
    ]),
    name='outputnode')
    
    
    #apply smoothing
    slicer = Node(fsl.Slicer(sample_axial=6, image_width=750),name = 'smooth')
   
    
    visualize.connect([
    (inputnode, slicer, [('ts_transformed', 'in_file'),('mni_template', 'image_edges')]),     
    (slicer, outputnode,[('out_file', 'output_image')])
    ])
    
 
   
    return visualize
示例#10
0
def create_mgzconvert_pipeline(name='mgzconvert'):
    # workflow
    mgzconvert = Workflow(name='mgzconvert')
    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='inputnode')
    # outputnode
    outputnode = Node(util.IdentityInterface(fields=['anat_head',
                                                     'anat_brain',
                                                     'anat_brain_mask',
                                                     'wmseg',
                                                     'wmedge']),
                      name='outputnode')
    # import files from freesurfer
    fs_import = Node(interface=nio.FreeSurferSource(),
                     name='fs_import')
    # convert Freesurfer T1 file to nifti
    head_convert = Node(fs.MRIConvert(out_type='niigz',
                                      out_file='T1.nii.gz'),
                        name='head_convert')
    # create brainmask from aparc+aseg with single dilation
    def get_aparc_aseg(files):
        for name in files:
            if 'aparc+aseg' in name:
                return name

    # create brain by converting only freesurfer output
    brain_convert = Node(fs.MRIConvert(out_type='niigz',
                                       out_file='brain.nii.gz'),
                         name='brain_convert')
    brain_binarize = Node(fsl.ImageMaths(op_string='-bin -fillh', out_file='T1_brain_mask.nii.gz'), name='brain_binarize')

    # cortical and cerebellar white matter volumes to construct wm edge
    # [lh cerebral wm, lh cerebellar wm, rh cerebral wm, rh cerebellar wm, brain stem]
    wmseg = Node(fs.Binarize(out_type='nii.gz',
                             match=[2, 7, 41, 46, 16],
                             binary_file='T1_brain_wmseg.nii.gz'),
                 name='wmseg')
    # make edge from wmseg to visualize coregistration quality
    edge = Node(fsl.ApplyMask(args='-edge -bin',
                              out_file='T1_brain_wmedge.nii.gz'),
                name='edge')
    # connections
    mgzconvert.connect([(inputnode, fs_import, [('fs_subjects_dir', 'subjects_dir'),
                                                ('fs_subject_id', 'subject_id')]),
                        (fs_import, head_convert, [('T1', 'in_file')]),
                        (fs_import, wmseg, [(('aparc_aseg', get_aparc_aseg), 'in_file')]),
                        (fs_import, brain_convert, [('brainmask', 'in_file')]),
                        (wmseg, edge, [('binary_file', 'in_file'),
                                       ('binary_file', 'mask_file')]),
                        (head_convert, outputnode, [('out_file', 'anat_head')]),
                        (brain_convert, outputnode, [('out_file', 'anat_brain')]),
                        (brain_convert, brain_binarize, [('out_file', 'in_file')]),
                        (brain_binarize, outputnode, [('out_file', 'anat_brain_mask')]),
                        (wmseg, outputnode, [('binary_file', 'wmseg')]),
                        (edge, outputnode, [('out_file', 'wmedge')])
                        ])

    return mgzconvert
示例#11
0
文件: ants_ct.py 项目: fliem/LeiCA
def ants_ct_wf(subjects_id,
            preprocessed_data_dir,
            working_dir,
            ds_dir,
            template_dir,
            plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    from nipype.interfaces.freesurfer.utils import ImageInfo



    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='ants_ct')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')



    #####################################
    # GET DATA
    #####################################
    # GET SUBJECT SPECIFIC STRUCTURAL DATA
    in_data_templates = {
        't1w': '{subject_id}/raw_niftis/sMRI/t1w_reoriented.nii.gz',
    }

    in_data = Node(nio.SelectFiles(in_data_templates,
                                       base_directory=preprocessed_data_dir),
                       name="in_data")
    in_data.inputs.subject_id = subjects_id


    # GET NKI ANTs templates
    ants_templates_templates = {
        'brain_template': 'NKI/T_template.nii.gz',
        'brain_probability_mask': 'NKI/T_templateProbabilityMask.nii.gz',
        'segmentation_priors': 'NKI/Priors/*.nii.gz',
        't1_registration_template': 'NKI/T_template_BrainCerebellum.nii.gz'

    }

    ants_templates = Node(nio.SelectFiles(ants_templates_templates,
                                       base_directory=template_dir),
                       name="ants_templates")
示例#12
0
    def __init__(self,name,input_fields=None,output_fields=None,**kwargs):
        Workflow.__init__(self,name=name,**kwargs)

        if input_fields:
            self.input_node = pe.Node(name = 'input',
                                      interface = util.IdentityInterface(fields=input_fields))
        if output_fields:
            self.output_node = pe.Node(name = 'output',
                                       interface = util.IdentityInterface(fields=output_fields))
示例#13
0
def create_normalize_pipeline(name='normalize'):
    
    # workflow
    normalize=Workflow(name='normalize')
    
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['anat',
                                                  'standard']),
                   name='inputnode')
    
    # outputnode                                 
    outputnode=Node(util.IdentityInterface(fields=['anat2std_transforms',
                                                   'anat2std',
                                                   'std2anat_transforms',
                                                   'std2anat']),
                    name='outputnode')
    
    # normalization with ants
    antsreg= Node(ants.Registration(dimension=3,
                                    transforms=['Rigid','Affine','SyN'],
                                    metric=['MI','MI','CC'],
                                    metric_weight=[1,1,1],
                                    number_of_iterations=[[1000,500,250,100],[1000,500,250,100],[100,70,50,20]],
                                    convergence_threshold=[1e-6,1e-6,1e-6],
                                    convergence_window_size=[10,10,10],
                                    shrink_factors=[[8,4,2,1],[8,4,2,1],[8,4,2,1]],
                                    smoothing_sigmas=[[3,2,1,0],[3,2,1,0],[3,2,1,0]],
                                    sigma_units=['vox','vox','vox'],
                                    initial_moving_transform_com=1,
                                    transform_parameters=[(0.1,),(0.1,),(0.1,3.0,0.0)],
                                    sampling_strategy=['Regular', 'Regular', 'None'],
                                    sampling_percentage=[0.25,0.25,1],
                                    radius_or_number_of_bins=[32,32,4],
                                    num_threads=1,
                                    interpolation='Linear',
                                    winsorize_lower_quantile=0.005,
                                    winsorize_upper_quantile=0.995,
                                    collapse_output_transforms=True,
                                    output_inverse_warped_image=True,
                                    output_warped_image=True,
                                    use_histogram_matching=True,
                                    ),
                  name='antsreg')
       
    
    # connections
    normalize.connect([(inputnode, antsreg, [('anat', 'moving_image'),
                                             ('standard', 'fixed_image')]),
                       (antsreg, outputnode, [('forward_transforms', 'anat2std_transforms'),
                                              ('reverse_transforms', 'std2anat_transforms'),
                                              ('warped_image', 'anat2std'),
                                              ('inverse_warped_image', 'std2anat')])
                        ])
     
    return normalize
示例#14
0
    def create(self):  # , **kwargs):
        """ Create the nodes and connections for the workflow """
        # Preamble
        csvReader = CSVReader()
        csvReader.inputs.in_file = self.csv_file.default_value
        csvReader.inputs.header = self.hasHeader.default_value
        csvOut = csvReader.run()

        print(("=" * 80))
        print((csvOut.outputs.__dict__))
        print(("=" * 80))

        iters = OrderedDict()
        label = list(csvOut.outputs.__dict__.keys())[0]
        result = eval("csvOut.outputs.{0}".format(label))
        iters['tests'], iters['trains'] = subsample_crossValidationSet(result, self.sample_size.default_value)
        # Main event
        out_fields = ['T1', 'T2', 'Label', 'trainindex', 'testindex']
        inputsND = Node(interface=IdentityInterface(fields=out_fields),
                        run_without_submitting=True, name='inputs')
        inputsND.iterables = [('trainindex', iters['trains']),
                              ('testindex', iters['tests'])]
        if not self.hasHeader.default_value:
            inputsND.inputs.T1 = csvOut.outputs.column_0
            inputsND.inputs.Label = csvOut.outputs.column_1
            inputsND.inputs.T2 = csvOut.outputs.column_2
        else:
            inputsND.inputs.T1 = csvOut.outputs.__dict__['t1']
            inputsND.inputs.Label = csvOut.outputs.__dict__['label']
            inputsND.inputs.T2 = csvOut.outputs.__dict__['t2']
            pass  # TODO
        metaflow = Workflow(name='metaflow')
        metaflow.config['execution'] = {
            'plugin': 'Linear',
            'stop_on_first_crash': 'false',
            'stop_on_first_rerun': 'false',
        # This stops at first attempt to rerun, before running, and before deleting previous results.
            'hash_method': 'timestamp',
            'single_thread_matlab': 'true',  # Multi-core 2011a  multi-core for matrix multiplication.
            'remove_unnecessary_outputs': 'true',
            'use_relative_paths': 'false',  # relative paths should be on, require hash update when changed.
            'remove_node_directories': 'false',  # Experimental
            'local_hash_check': 'false'
        }

        metaflow.add_nodes([inputsND])
        """import pdb; pdb.set_trace()"""
        fusionflow = FusionLabelWorkflow()
        self.connect(
            [(metaflow, fusionflow, [('inputs.trainindex', 'trainT1s.index'), ('inputs.T1', 'trainT1s.inlist')]),
             (metaflow, fusionflow,
              [('inputs.trainindex', 'trainLabels.index'), ('inputs.Label', 'trainLabels.inlist')]),
             (metaflow, fusionflow, [('inputs.testindex', 'testT1s.index'), ('inputs.T1', 'testT1s.inlist')])
             ])
示例#15
0
def create_mp2rage_pipeline(name='mp2rage'):
    
    # workflow
    mp2rage = Workflow('mp2rage')
    
    # inputnode 
    inputnode=Node(util.IdentityInterface(fields=['inv2',
                                                  'uni',
                                                  't1map']),
               name='inputnode')
    
    # outputnode                                     
    outputnode=Node(util.IdentityInterface(fields=['uni_masked',
                                                   'background_mask',
                                                   'uni_stripped',
                                                   #'skullstrip_mask',
                                                   #'uni_reoriented'
                                                   ]),
                name='outputnode')
    
    # remove background noise
    background = Node(JistIntensityMp2rageMasking(outMasked=True,
                                            outMasked2=True,
                                            outSignal2=True), 
                      name='background')
    
    # skullstrip
    strip = Node(MedicAlgorithmSPECTRE2010(outStripped=True,
                                           outMask=True,
                                           outOriginal=True,
                                           inOutput='true',
                                           inFind='true',
                                           inMMC=4
                                           ), 
                 name='strip')
    
    # connections
    mp2rage.connect([(inputnode, background, [('inv2', 'inSecond'),
                                              ('t1map', 'inQuantitative'),
                                              ('uni', 'inT1weighted')]),
                     (background, strip, [('outMasked2','inInput')]),
                     (background, outputnode, [('outMasked2','uni_masked'),
                                               ('outSignal2','background_mask')]),
                    (strip, outputnode, [('outStripped','uni_stripped'),
                                         #('outMask', 'skullstrip_mask'),
                                         #('outOriginal','uni_reoriented')
                                         ])
                     ])
    
    
    return mp2rage
示例#16
0
    def __init__(self,name='PermutationAnalysis',session_dir='/export/data/mri/Neurometrics',n=2000,**kwargs):
        Workflow.__init__(self,name=name,**kwargs)

        self.permutation_workflows = [Analysis1('p{0}'.format(i),session_dir,permute=True) for i in range(n)]
            
        self.datasink = pe.Node(name = 'DataSink',interface = nio.DataSink())
        self.datasink.inputs.base_directory = session_dir
        self.datasink.inputs.container = 'ptest'

        for pw in self.permutation_workflows:
            self.connect(pw,'output.frame_scores',self.datasink,pw.name+'.frame')
            self.connect(pw,'output.block_scores',self.datasink,pw.name+'.block')
            self.connect(pw,'output.halfrun_scores',self.datasink,pw.name+'.halfrun')
            self.connect(pw,'output.run_scores',self.datasink,pw.name+'.run')
def create_brainextract_pipeline(name='brainextract'):
    # workflow
    brainextract = Workflow(name='brainextract')
    #inputnode
    inputnode=Node(util.IdentityInterface(fields=['anat', 'fraction']),
                   name='inputnode')
    #outputnode
    outputnode=Node(util.IdentityInterface(fields=['anat_brain', 'anat_brain_mask']),
                    name='outputnode')
    #use bet brain extraction
    bet = Node(interface=fsl.BET(mask=True),
               name = 'bet')
  
    # connections
    brainextract.connect([(inputnode, bet, [('anat','in_file'),
    ('fraction', 'frac')]),
    (bet, outputnode, [('out_file', 'anat_brain')]),
    (bet, outputnode, [('mask_file', 'anat_brain_mask')])
    ])
    
    return brainextract
示例#18
0
def smooth_data(name = 'func_smoothed'):
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl as fsl

    flow        = Workflow(name)

    inputnode   = Node(util.IdentityInterface(fields=['func_data']),
                       name = 'inputnode')

    outputnode  =  Node(util.IdentityInterface(fields=['func_smoothed']),
                       name = 'outputnode')

    smooth      = Node(interface=fsl.Smooth(), name='func_smooth_fwhm_4')
    smooth.inputs.fwhm                 = 4.0
    smooth.terminal_output             = 'file'

    flow.connect(inputnode, 'func_data'      , smooth      , 'in_file'    )
    flow.connect(smooth,    'smoothed_file'  , outputnode  , 'func_smoothed'   )


    return flow
示例#19
0
def func_preprocess(name = 'func_preproc'):

    '''
    Method to preprocess functional data after warping to anatomical space.

    Accomplished after one step Distortion Correction, Motion Correction and Boundary based linear registration to
    anatomical space.

    Precodure includes:
    # 1- skull strip
    # 2- Normalize the image intensity values.
    # 3- Calculate Mean of Skull stripped image
    # 4- Create brain mask from Normalized data.
    '''

    # Define Workflow
    flow        = Workflow(name=name)
    inputnode   = Node(util.IdentityInterface(fields=['func_in']),
                           name='inputnode')
    outputnode  = Node(util.IdentityInterface(fields=['func_preproc',
                                                      'func_preproc_mean',
                                                      'func_preproc_mask']),
                           name = 'outputnode')


    # 2- Normalize the image intensity values.
    norm                               = Node(interface = fsl.ImageMaths(),       name = 'func_normalized')
    norm.inputs.op_string              = '-ing 1000'
    norm.out_data_type                 = 'float'
    norm.output_type                   = 'NIFTI'

    # 4- Create brain mask from Normalized data.
    mask                               = Node(interface = fsl.BET(),  name = 'func_preprocessed')
    mask.inputs.functional             = True
    mask.inputs.mask                   = True
    mask.inputs.frac                   = 0.5
    mask.inputs.vertical_gradient      = 0
    mask.inputs.threshold              = True

    # 3- Calculate Mean of Skull stripped image
    mean                          = Node(interface = preprocess.TStat(),     name = 'func_preprocessed_mean')
    mean.inputs.options           = '-mean'
    mean.inputs.outputtype        = 'NIFTI'


    flow.connect( inputnode  ,   'func_in'           ,   norm,        'in_file'     )
    flow.connect( norm       ,   'out_file'          ,   mask,        'in_file'     )
    flow.connect( norm       ,   'out_file'          ,   mean,        'in_file'     )
    flow.connect( mask       ,   'out_file'          ,   outputnode,  'func_preproc')
    flow.connect( mask       ,   'mask_file'         ,   outputnode,  'func_preproc_mask')
    flow.connect( mean       ,   'out_file'          ,   outputnode,  'func_preproc_mean')

    return flow
    def make_neuromet1_workflow(self):

        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = ('subject_id', self.subject_list)

        #unidensource, return for every subject uni and den
        unidensource = Node(interface=IdentityInterface(
            fields=['uniden_prefix', 'uniden_suffix']),
                            name="unidensource")
        unidensource.iterables = [
            ('uniden_prefix', ['', 'derivatives/Siemens/']),
            ('uniden_suffix', ['T1w', 'desc-UNIDEN_MP2RAGE'])
        ]
        unidensource.synchronize = True

        split_sub_str = Node(Function(['subject_str'],
                                      ['subject_id', 'session_id'],
                                      self.split_subject_ses),
                             name='split_sub_str')

        info = dict(T1w=[[
            'uniden_prefix', 'subject_id', 'session_id', 'anat', 'subject_id',
            'session_id', 'uniden_suffix'
        ]])

        datasource = Node(interface=DataGrabber(infields=[
            'subject_id', 'session_id', 'uniden_prefix', 'uniden_suffix'
        ],
                                                outfields=['T1w']),
                          name='datasource')
        datasource.inputs.base_directory = self.bids_root
        datasource.inputs.template = '%ssub-NeuroMET%s/ses-0%s/%s/sub-NeuroMET%s_ses-0%s_%s.nii.gz'
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()
        segment = self.make_segment()
        mask = self.make_mask()

        neuromet = Workflow(name='NeuroMET', base_dir=self.temp_dir)
        neuromet.connect(infosource, 'subject_id', split_sub_str,
                         'subject_str')
        neuromet.connect(split_sub_str, 'subject_id', datasource, 'subject_id')
        neuromet.connect(split_sub_str, 'session_id', datasource, 'session_id')
        neuromet.connect(unidensource, 'uniden_prefix', datasource,
                         'uniden_prefix')
        neuromet.connect(unidensource, 'uniden_suffix', datasource,
                         'uniden_suffix')
        neuromet.connect(datasource, 'T1w', segment, 'ro.in_file')

        # neuromet.connect()
        neuromet.connect(segment, 'spm_tissues_split.gm', mask,
                         'sum_tissues1.in_file')
        neuromet.connect(segment, 'spm_tissues_split.wm', mask,
                         'sum_tissues1.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.csf', mask,
                         'sum_tissues2.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.gm', sink, '@gm')
        neuromet.connect(segment, 'spm_tissues_split.wm', sink, '@wm')
        neuromet.connect(segment, 'spm_tissues_split.csf', sink, '@csf')
        neuromet.connect(segment, 'seg.bias_corrected_images', sink,
                         '@biascorr')
        # neuromet.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink, '@img')
        neuromet.connect(mask, 'gen_mask.out_file', sink, '@mask')
        neuromet.connect(segment, 'ro.out_file', sink, '@ro')

        return neuromet
示例#21
0
def psacnn_workflow(input_file,
                    output_dir,
                    use_preprocess=True,
                    model_file=None,
                    contrast='t1w',
                    use_gpu=True,
                    gpu_id=0,
                    save_label_image=False,
                    save_prob_image=False,
                    patch_size=96,
                    batch_size=4,
                    sample_rate=20000):

    subprocess.call(['mkdir', '-p', output_dir])
    if use_gpu == False:
        os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
        os.environ["CUDA_VISIBLE_DEVICES"] = ""
        gpu_id = -1
        batch_size = 16
        sample_rate = 40000
    else:
        os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
        os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)
        batch_size = 4
        sample_rate = 20000

    if use_preprocess == True:
        preprocess_flow = Workflow(name='preprocess', base_dir=output_dir)

        conform = Node(MRIConvert(conform=True,
                                  out_type='niigz',
                                  out_file='conformed.nii.gz'),
                       name='conform')
        n4 = Node(N4BiasFieldCorrection(dimension=3,
                                        bspline_fitting_distance=300,
                                        shrink_factor=3,
                                        n_iterations=[50, 50, 30, 20],
                                        output_image='n4.nii.gz'),
                  name='n4')
        robex = Node(ROBEX(seed=1729, stripped_image='brain.nii.gz'),
                     name='robex')

        psacnn = Node(PSACNN(output_dir=output_dir,
                             contrast=contrast,
                             patch_size=patch_size,
                             batch_size=batch_size,
                             save_label_image=save_label_image,
                             save_prob_image=save_prob_image,
                             sample_rate=sample_rate),
                      name='psacnn')

        preprocess_flow.connect([
            (conform, n4, [('out_file', 'input_image')]),
            (n4, robex, [('output_image', 'input_image')]),
            (robex, psacnn, [('stripped_image', 'input_image')])
        ])

        preprocess_flow.write_graph(graph2use='orig')
        conform.inputs.in_file = input_file
        preprocess_flow.run('MultiProc', plugin_args={'n_procs': 16})
    else:

        psacnn = PSACNN(input_image=input_file,
                        output_dir=output_dir,
                        contrast=contrast,
                        patch_size=patch_size,
                        batch_size=batch_size,
                        save_label_image=save_label_image,
                        save_prob_image=save_prob_image,
                        sample_rate=sample_rate)
        # psacnn.inputs.input_image = input_file
        # psacnn.inputs.output_dir = output_dir
        # psacnn.inputs.contrast = contrast
        # psacnn.inputs.patch_size = patch_size
        # psacnn.inputs.batch_size = batch_size
        # psacnn.inputs.save_label_image = save_label_image
        # psacnn.inputs.save_prob_image = save_prob_image
        # psacnn.inputs.sample_rate = sample_rate

        psacnn.run()
sessions = ['d6']

# directories
working_dir = '/nobackup/eminem2/schmidt/MMPIRS/preprocessing/working_dir'
#data_dir = '/nobackup/monaco1/schmidt/MMPIRS/preprocessed'
#data_dir = '/afs/cbs.mpg.de/projects/neu009_sequencing-plasticity/probands'
out_dir = '/nobackup/eminem2/schmidt/MMPIRS/preprocessing/'

# set fsl output type to nii.gz
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

# volumes to remove from each timeseries
vol_to_remove = 5

# main workflow
preproc = Workflow(name='func_preproc')
preproc.base_dir = working_dir
preproc.config['execution'][
    'crashdump_dir'] = preproc.base_dir + "/crash_files"

# iterate over subjects
subject_infosource = Node(util.IdentityInterface(fields=['subjectlist']),
                          name='subject_infosource')
subject_infosource.iterables = [('subjectlist', subjects)]

# iterate over sessions
session_infosource = Node(util.IdentityInterface(fields=['session']),
                          name='session_infosource')
session_infosource.iterables = [('session', sessions)]

# select files
示例#23
0
# assign the path to the base directory:
l1datasink.inputs.base_directory = opj(path_root, 'l1pipeline')
# create a list of substitutions to adjust the file paths of datasink:
substitutions = [('_subject_id_', '')]
# assign the substitutions to the datasink command:
l1datasink.inputs.substitutions = substitutions
# determine whether to store output in parameterized form:
l1datasink.inputs.parameterization = True
# set expected thread and memory usage for the node:
l1datasink.interface.num_threads = 1
l1datasink.interface.mem_gb = 0.2
# ======================================================================
# DEFINE THE LEVEL 1 ANALYSIS SUB-WORKFLOW AND CONNECT THE NODES:
# ======================================================================
# initiation of the 1st-level analysis workflow:
l1analysis = Workflow(name='l1analysis')
# connect the 1st-level analysis components
l1analysis.connect(l1model, 'session_info', l1design, 'session_info')
l1analysis.connect(l1design, 'spm_mat_file', l1estimate, 'spm_mat_file')
l1analysis.connect(l1estimate, 'spm_mat_file', l1contrasts, 'spm_mat_file')
l1analysis.connect(l1estimate, 'beta_images', l1contrasts, 'beta_images')
l1analysis.connect(l1estimate, 'residual_image', l1contrasts, 'residual_image')
# ======================================================================
# DEFINE META-WORKFLOW PIPELINE:
# ======================================================================
# initiation of the 1st-level analysis workflow:
l1pipeline = Workflow(name='l1pipeline')
# stop execution of the workflow if an error is encountered:
l1pipeline.config = {
    'execution': {
        'stop_on_first_crash': True,
示例#24
0
def create_struct_preproc_pipeline(working_dir,
                                   freesurfer_dir,
                                   ds_dir,
                                   use_fs_brainmask,
                                   name='struct_preproc'):
    """

    """

    # initiate workflow
    struct_preproc_wf = Workflow(name=name)
    struct_preproc_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting',
                                              'rsfMRI_preprocessing')
    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['t1w', 'subject_id']),
                     name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        't1w_brain', 'struct_brain_mask', 'fast_partial_volume_files',
        'wm_mask', 'csf_mask', 'wm_mask_4_bbr', 'gm_mask'
    ]),
                      name='outputnode')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]

    # CREATE BRAIN MASK
    if use_fs_brainmask:
        # brainmask with fs
        fs_source = Node(interface=nio.FreeSurferSource(), name='fs_source')
        fs_source.inputs.subjects_dir = freesurfer_dir
        struct_preproc_wf.connect(inputnode, 'subject_id', fs_source,
                                  'subject_id')

        # get aparc+aseg from list
        def get_aparc_aseg(files):
            for name in files:
                if 'aparc+aseg' in name:
                    return name

        aseg = Node(fs.MRIConvert(out_type='niigz', out_file='aseg.nii.gz'),
                    name='aseg')
        struct_preproc_wf.connect(fs_source, ('aparc_aseg', get_aparc_aseg),
                                  aseg, 'in_file')

        fs_brainmask = Node(
            fs.Binarize(
                min=0.5,  #dilate=1,
                out_type='nii.gz'),
            name='fs_brainmask')
        struct_preproc_wf.connect(aseg, 'out_file', fs_brainmask, 'in_file')

        # fill holes in mask, smooth, rebinarize
        fillholes = Node(fsl.maths.MathsCommand(
            args='-fillh -s 3 -thr 0.1 -bin', out_file='T1_brain_mask.nii.gz'),
                         name='fillholes')

        struct_preproc_wf.connect(fs_brainmask, 'binary_file', fillholes,
                                  'in_file')

        fs_2_struct_mat = Node(util.Function(
            input_names=['moving_image', 'target_image'],
            output_names=['fsl_file'],
            function=tkregister2_fct),
                               name='fs_2_struct_mat')

        struct_preproc_wf.connect([(fs_source, fs_2_struct_mat,
                                    [('T1', 'moving_image'),
                                     ('rawavg', 'target_image')])])

        struct_brain_mask = Node(fsl.ApplyXfm(interp='nearestneighbour'),
                                 name='struct_brain_mask_fs')
        struct_preproc_wf.connect(fillholes, 'out_file', struct_brain_mask,
                                  'in_file')
        struct_preproc_wf.connect(inputnode, 't1w', struct_brain_mask,
                                  'reference')
        struct_preproc_wf.connect(fs_2_struct_mat, 'fsl_file',
                                  struct_brain_mask, 'in_matrix_file')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', outputnode,
                                  'struct_brain_mask')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', ds,
                                  'struct_prep.struct_brain_mask')

        # multiply t1w with fs brain mask
        t1w_brain = Node(fsl.maths.BinaryMaths(operation='mul'),
                         name='t1w_brain')
        struct_preproc_wf.connect(inputnode, 't1w', t1w_brain, 'in_file')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', t1w_brain,
                                  'operand_file')
        struct_preproc_wf.connect(t1w_brain, 'out_file', outputnode,
                                  't1w_brain')
        struct_preproc_wf.connect(t1w_brain, 'out_file', ds,
                                  'struct_prep.t1w_brain')

    else:  # use bet
        t1w_brain = Node(fsl.BET(mask=True, outline=True, surfaces=True),
                         name='t1w_brain')
        struct_preproc_wf.connect(inputnode, 't1w', t1w_brain, 'in_file')
        struct_preproc_wf.connect(t1w_brain, 'out_file', outputnode,
                                  't1w_brain')

        def struct_brain_mask_bet_fct(in_file):
            return in_file

        struct_brain_mask = Node(util.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=struct_brain_mask_bet_fct),
                                 name='struct_brain_mask')
        struct_preproc_wf.connect(t1w_brain, 'mask_file', struct_brain_mask,
                                  'in_file')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', outputnode,
                                  'struct_brain_mask')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', ds,
                                  'struct_prep.struct_brain_mask')

    # SEGMENTATION WITH FAST
    fast = Node(fsl.FAST(), name='fast')
    struct_preproc_wf.connect(t1w_brain, 'out_file', fast, 'in_files')
    struct_preproc_wf.connect(fast, 'partial_volume_files', outputnode,
                              'fast_partial_volume_files')
    struct_preproc_wf.connect(fast, 'partial_volume_files', ds,
                              'struct_prep.fast')

    # functions to select tissue classes
    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    def selectsingle(files, idx):
        return files[idx]

    # pve0: CSF
    # pve1: GM
    # pve2: WM
    # binarize tissue classes
    binarize_tissue = MapNode(
        fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'),
        iterfield=['in_file'],
        name='binarize_tissue')

    struct_preproc_wf.connect(fast,
                              ('partial_volume_files', selectindex, [0, 2]),
                              binarize_tissue, 'in_file')

    # OUTPUT  WM AND CSF MASKS FOR CPAC DENOISING
    struct_preproc_wf.connect([(binarize_tissue, outputnode,
                                [(('out_file', selectsingle, 0), 'csf_mask'),
                                 (('out_file', selectsingle, 1), 'wm_mask')])])

    # WRITE WM MASK WITH P > .5 FOR FSL BBR
    # use threshold of .5 like FSL's epi_reg script
    wm_mask_4_bbr = Node(fsl.ImageMaths(op_string='-thr 0.5 -bin'),
                         name='wm_mask_4_bbr')
    struct_preproc_wf.connect(fast, ('partial_volume_files', selectindex, [2]),
                              wm_mask_4_bbr, 'in_file')
    struct_preproc_wf.connect(wm_mask_4_bbr, 'out_file', outputnode,
                              'wm_mask_4_bbr')

    struct_preproc_wf.write_graph(dotfilename=struct_preproc_wf.name,
                                  graph2use='flat',
                                  format='pdf')

    return struct_preproc_wf
示例#25
0
def make_w_mcmean(data_type='func'):
    """func or fmap"""

    n_in = Node(IdentityInterface(fields=[
        'epi',
        ]), name='input')
    n_out = Node(IdentityInterface(fields=[
        'corrected',
        'mean',
        'motion_parameters',
        ]), name='output')

    n_middle = Node(
        interface=Function(
            input_names=[
                'in_file',
                ],
            output_names=[
                'args',
            ],
            function=select_middle_volume,
            ),
        name='select_middle_volume')

    n_volreg = Node(interface=Volreg(), name='volreg')
    n_volreg.inputs.outputtype = 'NIFTI'

    n_mean = Node(interface=TStat(), name='mean')
    n_mean.inputs.args = '-mean'
    n_mean.inputs.outputtype = 'NIFTI_GZ'

    w = Workflow(name='mc_' + data_type)

    w.connect(n_in, 'epi', n_middle, 'in_file')
    w.connect(n_in, 'epi', n_volreg, 'in_file')
    w.connect(n_middle, 'args', n_volreg, 'args')
    w.connect(n_volreg, 'out_file', n_out, 'corrected')
    w.connect(n_volreg, 'out_file', n_mean, 'in_file')
    w.connect(n_volreg, 'oned_matrix_save', n_out, 'motion_parameters')
    w.connect(n_mean, 'out_file', n_out, 'mean')

    return w
def create_workflow(unwarp_direction='y'):
    workflow = Workflow(name='func_unwarp')

    inputs = Node(
        IdentityInterface(fields=[
            # 'subject_id',
            # 'session_id',
            'funcs',
            'funcmasks',
            'fmap_phasediff',
            'fmap_magnitude',
            'fmap_mask',
        ]),
        name='in')

    outputs = Node(IdentityInterface(fields=[
        'funcs',
        'funcmasks',
    ]),
                   name='out')

    # --- --- --- --- --- --- --- Convert to radians --- --- --- --- --- ---

    # fslmaths $FUNCDIR/"$SUB"_B0_phase -div 100 -mul 3.141592653589793116
    #     -odt float $FUNCDIR/"$SUB"_B0_phase_rescaled

    # in_file --> out_file
    phase_radians = Node(fsl.ImageMaths(
        op_string='-mul 3.141592653589793116 -div 100',
        out_data_type='float',
        suffix='_radians',
    ),
                         name='phaseRadians')

    workflow.connect(inputs, 'fmap_phasediff', phase_radians, 'in_file')

    # --- --- --- --- --- --- --- Unwrap Fieldmap --- --- --- --- --- ---
    # --- Unwrap phase
    # prelude -p $FUNCDIR/"$SUB"_B0_phase_rescaled
    #         -a $FUNCDIR/"$SUB"_B0_magnitude
    #         -o $FUNCDIR/"$SUB"_fmri_B0_phase_rescaled_unwrapped
    #         -m $FUNCDIR/"$SUB"_B0_magnitude_brain_mask
    #  magnitude_file, phase_file [, mask_file] --> unwrapped_phase_file
    unwrap = MapNode(
        PRELUDE(),
        name='unwrap',
        iterfield=['mask_file'],
    )

    workflow.connect([
        (inputs, unwrap, [('fmap_magnitude', 'magnitude_file')]),
        (inputs, unwrap, [('fmap_mask', 'mask_file')]),
        (phase_radians, unwrap, [('out_file', 'phase_file')]),
    ])

    # --- --- --- --- --- --- --- Convert to Radians / Sec --- --- --- --- ---
    # fslmaths $FUNCDIR/"$SUB"_B0_phase_rescaled_unwrapped
    #          -mul 200 $FUNCDIR/"$SUB"_B0_phase_rescaled_unwrapped
    rescale = MapNode(
        fsl.ImageMaths(op_string='-mul 200'),
        name='rescale',
        iterfield=['in_file'],
    )

    workflow.connect(unwrap, 'unwrapped_phase_file', rescale, 'in_file')

    # --- --- --- --- --- --- --- Unmask fieldmap --- --- --- --- ---

    unmask_phase = MapNode(
        FUGUE(
            save_unmasked_fmap=True,
            unwarp_direction=unwarp_direction,
        ),
        name='unmask_phase',
        iterfield=['mask_file', 'fmap_in_file'],
    )

    workflow.connect(rescale, 'out_file', unmask_phase, 'fmap_in_file')
    workflow.connect(inputs, 'fmap_mask', unmask_phase, 'mask_file')

    # --- --- --- --- --- --- --- Undistort functionals --- --- --- --- ---
    # phasemap_in_file = phasediff
    # mask_file = mask
    # in_file = functional image
    # dwell_time = 0.0005585 s
    # unwarp_direction

    undistort = MapNode(
        FUGUE(
            dwell_time=0.0005585,
            # based on Process-NHP-MRI/Process_functional_data.md:
            asym_se_time=0.020,
            smooth3d=2.0,
            median_2dfilter=True,
            unwarp_direction=unwarp_direction,
        ),
        name='undistort',
        iterfield=['in_file', 'mask_file', 'fmap_in_file'],
    )

    workflow.connect(unmask_phase, 'fmap_out_file', undistort, 'fmap_in_file')
    workflow.connect(inputs, 'fmap_mask', undistort, 'mask_file')
    workflow.connect(inputs, 'funcs', undistort, 'in_file')

    undistort_masks = undistort.clone('undistort_masks')
    workflow.connect(unmask_phase, 'fmap_out_file', undistort_masks,
                     'fmap_in_file')
    workflow.connect(inputs, 'fmap_mask', undistort_masks, 'mask_file')
    workflow.connect(inputs, 'funcmasks', undistort_masks, 'in_file')

    workflow.connect(undistort, 'unwarped_file', outputs, 'funcs')

    workflow.connect(undistort_masks, 'unwarped_file', outputs, 'funcmasks')
    return workflow
示例#27
0
def make_w_smooth(roi=''):
    w = Workflow('filt' + roi)

    n_in = Node(IdentityInterface(fields=[
        'func'
        ]), name='input')
    n_out = Node(IdentityInterface(fields=[
        'func'
        ]), name='output')

    n_t = Node(TStat(), 'tstat')
    n_t.inputs.args = '-mean'
    n_t.inputs.out_file = 'mean.nii.gz'

    n_mask = Node(Automask(), 'mask')
    n_mask.inputs.args = '-eclip'
    n_mask.inputs.clfrac = 0.4
    n_mask.inputs.out_file = 'mask.nii.gz'

    n_d = Node(Detrend(), 'detrend')
    n_d.inputs.out_file = 'detrended.nii.gz'

    n_smooth = Node(TSmooth(), 'smooth')
    n_smooth.inputs.adaptive = 5
    n_smooth.inputs.out_file = 'smooth.nii.gz'

    n_c = Node(Calc(), 'calc')
    n_c.inputs.args = '-datum float'
    n_c.inputs.expr = 'step(a)*(b+c)'
    n_c.inputs.out_file = f'filtered_{roi}.nii.gz'

    w.connect(n_in, 'func', n_t, 'in_file')
    w.connect(n_t, 'out_file', n_mask, 'in_file')
    w.connect(n_in, 'func', n_d, 'in_file')
    w.connect(n_in, ('func', afni_expr), n_d, 'args')
    w.connect(n_d, 'out_file', n_smooth, 'in_file')
    w.connect(n_mask, 'out_file', n_c, 'in_file_a')
    w.connect(n_t, 'out_file', n_c, 'in_file_b')
    w.connect(n_smooth, 'out_file', n_c, 'in_file_c')
    w.connect(n_c, 'out_file', n_out, 'func')

    return w
示例#28
0
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir,
                      standard_brain):

    # main workflow
    struct_preproc = Workflow(name='mp2rage_preproc')
    struct_preproc.base_dir = working_dir
    struct_preproc.config['execution'][
        'crashdump_dir'] = struct_preproc.base_dir + "/crash_files"

    # select files
    templates = {
        'inv2': 'nifti/mp2rage/inv2.nii.gz',
        't1map': 'nifti/mp2rage/t1map.nii.gz',
        'uni': 'nifti/mp2rage/uni.nii.gz'
    }
    selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")

    # workflow for mp2rage background masking
    mp2rage = create_mp2rage_pipeline()

    # workflow to run freesurfer reconall
    reconall = create_reconall_pipeline()
    reconall.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    reconall.inputs.inputnode.fs_subject_id = subject

    # workflow to get brain, head and wmseg from freesurfer and convert to nifti
    mgzconvert = create_mgzconvert_pipeline()

    # workflow to normalize anatomy to standard space
    normalize = create_normalize_pipeline()
    normalize.inputs.inputnode.standard = standard_brain

    #sink to store files
    sink = Node(nio.DataSink(base_directory=out_dir,
                             parameterization=False,
                             substitutions=[('outStripped', 'uni_stripped'),
                                            ('outMasked2', 'uni_masked'),
                                            ('outSignal2', 'background_mask'),
                                            ('outOriginal', 'uni_reoriented'),
                                            ('outMask', 'skullstrip_mask'),
                                            ('transform_Warped',
                                             'T1_brain2mni')]),
                name='sink')

    # connections
    struct_preproc.connect([
        (selectfiles, mp2rage, [('inv2', 'inputnode.inv2'),
                                ('t1map', 'inputnode.t1map'),
                                ('uni', 'inputnode.uni')]),
        (mp2rage, reconall, [('outputnode.uni_stripped', 'inputnode.anat')]),
        (reconall, mgzconvert,
         [('outputnode.fs_subject_id', 'inputnode.fs_subject_id'),
          ('outputnode.fs_subjects_dir', 'inputnode.fs_subjects_dir')]),
        (mgzconvert, normalize, [('outputnode.anat_brain', 'inputnode.anat')]),
        #(mp2rage, sink, [('outputnode.uni_masked', 'preprocessed.mp2rage.background_masking.@uni_masked'),
        #                 ('outputnode.background_mask', 'preprocessed.mp2rage.background_masking.@background_mask')
        #                 ]),
        (
            mgzconvert,
            sink,
            [
                ('outputnode.anat_head', 'preprocessed.anat.@head'),
                ('outputnode.anat_brain', 'preprocessed.anat.@brain'),
                ('outputnode.brain_mask', 'preprocessed.anat.@brain_mask'),
                ('outputnode.wmedge', 'preprocessed.anat.@wmedge'),
                #('outputnode.wmseg', 'preprocessed.mp2rage.brain_extraction.@wmseg')
            ]),
        (normalize, sink,
         [('outputnode.anat2std', 'preprocessed.anat.@anat2std'),
          ('outputnode.anat2std_transforms',
           'preprocessed.anat.transforms2mni.@anat2std_transforms'),
          ('outputnode.std2anat_transforms',
           'preprocessed.anat.transforms2mni.@std2anat_transforms')])
    ])
    #struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    struct_preproc.run()
示例#29
0
def run_workflow(session=None, csv_file=None, undist=True):
    from nipype import config
    #config.enable_debug_mode()

    # ------------------ Specify variables
    ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

    data_dir = ds_root
    output_dir = 'derivatives/featpreproc/warp2nmt/highpassed_files'
    working_dir = 'workingdirs'

    # ------------------ Input Files
    infosource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'run_id',
        'refsubject_id',
    ]), name="infosource")

    if csv_file is not None:
      print('=== reading csv ===')
      # Read csv and use pandas to set-up image and ev-processing
      df = pd.read_csv(csv_file)
      # init lists
      sub_img=[]; ses_img=[]; run_img=[]; ref_img=[]
      
      # fill lists to iterate mapnodes
      for index, row in df.iterrows():
        for r in row.run.strip("[]").split(" "):
            sub_img.append(row.subject)
            ses_img.append(row.session)
            run_img.append(r)
            if 'refsubject' in df.columns:
                if row.refsubject == 'nan':
                    # empty field
                    ref_img.append(row.subject)
                else:
                    # non-empty field
                    ref_img.append(row.refsubject) 
            else:
                ref_img.append(row.subject)

      infosource.iterables = [
            ('subject_id', sub_img),
            ('session_id', ses_img),
            ('run_id', run_img),
            ('refsubject_id', ref_img),
        ]
      infosource.synchronize = True
    else:
      print("No csv-file specified. Don't know what data to process.")

    # use undistorted epi's if these are requested (need to be generated with undistort workflow)
    if undist:
        func_flag = 'preproc_undistort'
    else:
        func_flag = 'preproc'    
    
    # SelectFiles
    templates = {
        'image': 
        'derivatives/featpreproc/highpassed_files/'
        'sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}*run-{run_id}_bold_res-1x1x1_' + func_flag + '_mc_smooth_mask_gms_tempfilt_maths.nii.gz',

        'imagewarp': 
        'reference-vols/sub-{refsubject_id}/transforms/'
        'sub-{subject_id}_func2nmt_WARP.nii.gz',

        'ref_image': 
        'reference-vols/sub-{refsubject_id}/transforms/'
        'sub-{subject_id}_func2nmt_res-1x1x1.nii.gz',
    }

    inputfiles = Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), 
                        name="input_files")


    # ------------------ Output Files
    # Datasink
    outputfiles = Node(nio.DataSink(
        base_directory=ds_root,
        container=output_dir,
        parameterization=True),
        name="output_files")

    # Use the following DataSink output substitutions
    outputfiles.inputs.substitutions = [
        ('refsubject_id_', 'ref-'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('_Nwarp.nii.gz', '_NMTv2.nii.gz'),
        # remove subdirectories:
        ('highpassed_files/reg_func', 'highpassed_files'),
    ]  
       
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1/func'),
        (r'_ref-([a-zA-Z0-9]+)_run_id_[0-9][0-9]', r''),
    ]


    # -------------------------------------------- Create Pipeline
    warp2nmt = Workflow(
        name='warp2nmt',
        base_dir=os.path.join(ds_root, working_dir))

    warp2nmt.connect([
        (infosource, inputfiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ('run_id', 'run_id'),
          ('refsubject_id', 'refsubject_id'),
          ])])
       
    nwarp = Node(afni.NwarpApply(out_file='%s_Nwarp.nii.gz'),name='nwarp')       
    warp2nmt.connect(inputfiles, 'image',
                        nwarp, 'in_file')
    warp2nmt.connect(inputfiles, 'imagewarp',
                        nwarp, 'warp')
    warp2nmt.connect(inputfiles, 'ref_image',
                        nwarp, 'master')
    warp2nmt.connect(nwarp, 'out_file',
                        outputfiles, 'reg_func')

    warp2nmt.stop_on_first_crash = False  # True
    warp2nmt.keep_inputs = True
    warp2nmt.remove_unnecessary_outputs = False
    warp2nmt.write_graph()
    warp2nmt.run()
示例#30
0
# Transform the mean image. First to anatomical and then to the target
warpmean = Node(ApplyTransforms(args='--float',
                                input_image_type=3,
                                interpolation='Linear',
                                invert_transform_flags=[False, False],
                                num_threads=1,
                                reference_image=template,
                                terminal_output='file'),
                name='warpmean')

###
# Specify Normalization Workflow & Connect Nodes

# Initiation of the ANTS normalization workflow
normflow = Workflow(name='normflow')
normflow.base_dir = opj(experiment_dir, working_dir)

# Connect up ANTS normalization components
normflow.connect([
    (fssource, convert2nii, [('T1', 'in_file')]),
    (convert2nii, convert2itk, [('out_file', 'reference_file')]),
    (bbregister, convert2itk, [('out_fsl_file', 'transform_file')]),
    (convert2itk, merge, [('itk_transform', 'in2')]),
    (antsreg, merge, [('composite_transform', 'in1')]),
    (merge, warpmean, [('out', 'transforms')]),
    (merge, warpall, [('out', 'transforms')]),
])

###
# Input & Output Stream
示例#31
0
def create_workflow():
    workflow = Workflow(
        name='transform_manual_mask')

    inputs = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'refsubject_id',
        'ref_funcmask',
        'ref_func',
        'funcs',
    ]), name='in')

    # Find the transformation matrix func_ref -> func
    # First find transform from func to manualmask's ref func

    # first take the median (flirt functionality has changed and no longer automatically takes the first volume when given 4D files)
    median_func = MapNode(
                    interface=fsl.maths.MedianImage(dimension="T"),
                    name='median_func',
                    iterfield=('in_file'),
                    )
    findtrans = MapNode(fsl.FLIRT(),
                        iterfield=['in_file'],
                        name='findtrans'
                        )

    # Invert the matrix transform
    invert = MapNode(fsl.ConvertXFM(invert_xfm=True),
                     name='invert',
                     iterfield=['in_file'],
                     )
    workflow.connect(findtrans, 'out_matrix_file',
                     invert, 'in_file')

    # Transform the manualmask to be aligned with func
    funcreg = MapNode(ApplyXFMRefName(),
                      name='funcreg',
                      iterfield=['in_matrix_file', 'reference'],
                      )


    workflow.connect(inputs, 'funcs',
                     median_func, 'in_file')

    workflow.connect(median_func, 'out_file',
                     findtrans, 'in_file')
    workflow.connect(inputs, 'ref_func',
                     findtrans, 'reference')

    workflow.connect(invert, 'out_file',
                     funcreg, 'in_matrix_file')

    workflow.connect(inputs, 'ref_func',
                     funcreg, 'in_file')
    workflow.connect(inputs, 'funcs',
                     funcreg, 'reference')

    
    return workflow
示例#32
0
def run_workflow():
    '''
    WE ONLY IMPORT THE CREATE_WORKFLOW FUNCTION FROM THIS FILE.
    THIS RUN_WORKFLOW FUNCTION IS NOT USED AT ALL
    >> KEEP IT HERE FOR DEBUGGING PURPOSES
    '''

    # ------------------ Specify variables
    subject_list = ['eddy']
    session_list = ['20170511']

    # ------------------ Input Files
    infosource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
    ]), name="infosource")

    infosource.iterables = [
        ('session_id', session_list),
        ('subject_id', subject_list),
    ]
    # SelectFiles
    templates = {
        'ref_func':
        'manual-masks/sub-{refsubject_id}/func/'
        'sub-{subject_id}_ref_func_res-1x1x1.nii.gz',

        'ref_funcmask':
        'manual-masks/sub-{refsubject_id}/func/'
        'sub-{subject_id}_ref_func_mask_res-1x1x1.nii.gz',

        'funcs':
        'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/func/'
            'sub-{subject_id}_ses-{session_id}*_bold_res-1x1x1_preproc'
            '.nii.gz',
    }

    data_dir = ds_root
    output_dir = 'transformed-manual-func-mask'

    inputfiles = Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), name="input_files")

    # ------------------ Output Files
    # Datasink
    outputfiles = Node(nio.DataSink(
        base_directory=ds_root,
        container=output_dir,
        parameterization=True),
        name="output_files")

    # Use the following DataSink output substitutions
    outputfiles.inputs.substitutions = [
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('/mask/', '/'),
        ('_preproc_flirt_thresh.nii.gz', '_transformedmask.nii.gz'),
        #   ('/_findtrans0/', '/fmap/'),
        #   ('_magnitude1_res-1x1x1_manualmask_flirt',
        #    '_magnitude1_res-1x1x1_preproc'),
        # BIDS Extension Proposal: BEP003
        # ('_resample.nii.gz', '_res-1x1x1_preproc.nii.gz'),
        # remove subdirectories:
        # ('resampled-isotropic-1mm/isoxfm-1mm', 'resampled-isotropic-1mm'),
        # ('resampled-isotropic-1mm/mriconv-1mm', 'resampled-isotropic-1mm'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)', r'sub-\2/ses-\1'),
        (r'_maskthresh[0-9]*/', r'func/'),
    ]

    # -------------------------------------------- Wrapper workflow
    working_dir = 'workingdirs/transform_manual_func_mask'

    wrapper = Workflow(
        name='transform_manual_func_mask',
        base_dir=os.path.join(ds_root, working_dir))

    # -------------------------------------------- Create Pipeline
    workflow = create_workflow()

    wrapper.connect([(infosource, inputfiles,
                      [('subject_id', 'subject_id'),
                       ('session_id', 'session_id'),
                       ])])

    wrapper.connect(inputfiles, 'ref_funcmask',
                    workflow, 'in.ref_funcmask')
    wrapper.connect(inputfiles, 'funcs',
                    workflow, 'in.funcs')
    wrapper.connect(inputfiles, 'ref_func',
                    workflow, 'in.ref_func')

    wrapper.stop_on_first_crash = True
    wrapper.keep_inputs = True
    wrapper.remove_unnecessary_outputs = False
    wrapper.write_graph()
    wrapper.run()
示例#33
0
def create_denoise_pipeline(name='denoise'):

    # workflow
    denoise = Workflow(name='denoise')

    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=[
        'anat_brain', 'brain_mask', 'epi2anat_dat', 'unwarped_mean',
        'epi_coreg', 'moco_par', 'highpass_sigma', 'lowpass_sigma', 'tr'
    ]),
                     name='inputnode')

    outputnode = Node(interface=util.IdentityInterface(fields=[
        'wmcsf_mask', 'brain_mask_resamp', 'brain_mask2epi', 'combined_motion',
        'outlier_files', 'intensity_files', 'outlier_stats', 'outlier_plots',
        'mc_regressor', 'mc_F', 'mc_pF', 'comp_regressor', 'comp_F', 'comp_pF',
        'normalized_file'
    ]),
                      name='outputnode')

    # run fast to get tissue probability classes
    fast = Node(fsl.FAST(), name='fast')
    denoise.connect([(inputnode, fast, [('anat_brain', 'in_files')])])

    # functions to select tissue classes
    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    def selectsingle(files, idx):
        return files[idx]

    # resample tissue classes
    resample_tissue = MapNode(afni.Resample(resample_mode='NN',
                                            outputtype='NIFTI_GZ'),
                              iterfield=['in_file'],
                              name='resample_tissue')

    denoise.connect([
        (inputnode, resample_tissue, [('epi_coreg', 'master')]),
        (fast, resample_tissue, [(('partial_volume_files', selectindex,
                                   [0, 2]), 'in_file')]),
    ])

    # binarize tissue classes
    binarize_tissue = MapNode(
        fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'),
        iterfield=['in_file'],
        name='binarize_tissue')

    denoise.connect([
        (resample_tissue, binarize_tissue, [('out_file', 'in_file')]),
    ])

    # combine tissue classes to noise mask
    wmcsf_mask = Node(fsl.BinaryMaths(operation='add',
                                      out_file='wmcsf_mask_lowres.nii.gz'),
                      name='wmcsf_mask')

    denoise.connect([(binarize_tissue, wmcsf_mask,
                      [(('out_file', selectsingle, 0), 'in_file'),
                       (('out_file', selectsingle, 1), 'operand_file')]),
                     (wmcsf_mask, outputnode, [('out_file', 'wmcsf_mask')])])

    # resample brain mask
    resample_brain = Node(afni.Resample(
        resample_mode='NN',
        outputtype='NIFTI_GZ',
        out_file='T1_brain_mask_lowres.nii.gz'),
                          name='resample_brain')

    denoise.connect([(inputnode, resample_brain, [('brain_mask', 'in_file'),
                                                  ('epi_coreg', 'master')]),
                     (resample_brain, outputnode, [('out_file',
                                                    'brain_mask_resamp')])])

    # project brain mask into original epi space fpr quality assessment
    brainmask2epi = Node(fs.ApplyVolTransform(
        interp='nearest',
        inverse=True,
        transformed_file='T1_brain_mask2epi.nii.gz',
    ),
                         name='brainmask2epi')

    denoise.connect([
        (inputnode, brainmask2epi, [('brain_mask', 'target_file'),
                                    ('epi2anat_dat', 'reg_file'),
                                    ('unwarped_mean', 'source_file')]),
        (brainmask2epi, outputnode, [('transformed_file', 'brain_mask2epi')])
    ])

    # perform artefact detection
    artefact = Node(ra.ArtifactDetect(save_plot=True,
                                      use_norm=True,
                                      parameter_source='FSL',
                                      mask_type='file',
                                      norm_threshold=1,
                                      zintensity_threshold=3,
                                      use_differences=[True, False]),
                    name='artefact')
    artefact.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([
        (inputnode, artefact, [('epi_coreg', 'realigned_files'),
                               ('moco_par', 'realignment_parameters')]),
        (resample_brain, artefact, [('out_file', 'mask_file')]),
        (artefact, outputnode, [('norm_files', 'combined_motion'),
                                ('outlier_files', 'outlier_files'),
                                ('intensity_files', 'intensity_files'),
                                ('statistic_files', 'outlier_stats'),
                                ('plot_files', 'outlier_plots')])
    ])

    # Compute motion regressors
    motreg = Node(util.Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors),
                  name='getmotionregress')
    motreg.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([(inputnode, motreg, [('moco_par', 'motion_params')])])

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(util.Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    createfilter1.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([
        (motreg, createfilter1, [('out_files', 'motion_params')]),
        (
            artefact,
            createfilter1,
            [  #('norm_files', 'comp_norm'),
                ('outlier_files', 'outliers')
            ]),
        (createfilter1, outputnode, [('out_files', 'mc_regressor')])
    ])

    # regress out motion and art confounds
    filter1 = Node(fsl.GLM(out_f_name='F_mcart.nii.gz',
                           out_pf_name='pF_mcart.nii.gz',
                           out_res_name='rest_mc_denoised.nii.gz',
                           demean=True),
                   name='filtermotion')

    filter1.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([(inputnode, filter1, [('epi_coreg', 'in_file')]),
                     (createfilter1, filter1,
                      [(('out_files', list_to_filename), 'design')]),
                     (filter1, outputnode, [('out_f', 'mc_F'),
                                            ('out_pf', 'mc_pF')])])

    # create filter with compcor components
    createfilter2 = Node(util.Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                       output_names=['out_files'],
                                       function=extract_noise_components),
                         name='makecompcorfilter')
    createfilter2.inputs.num_components = 6
    createfilter2.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([
        (createfilter1, createfilter2, [(('out_files', list_to_filename),
                                         'extra_regressors')]),
        (filter1, createfilter2, [('out_res', 'realigned_file')]),
        (wmcsf_mask, createfilter2, [('out_file', 'mask_file')]),
        (createfilter2, outputnode, [('out_files', 'comp_regressor')]),
    ])

    # regress compcor and other noise components
    filter2 = Node(fsl.GLM(out_f_name='F_noise.nii.gz',
                           out_pf_name='pF_noise.nii.gz',
                           out_res_name='rest2anat_denoised.nii.gz',
                           demean=True),
                   name='filternoise')

    filter2.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([(filter1, filter2, [('out_res', 'in_file')]),
                     (createfilter2, filter2, [('out_files', 'design')]),
                     (resample_brain, filter2, [('out_file', 'mask')]),
                     (filter2, outputnode, [('out_f', 'comp_F'),
                                            ('out_pf', 'comp_pF')])])

    # bandpass filter denoised file
    bandpass_filter = Node(
        fsl.TemporalFilter(out_file='rest_denoised_bandpassed.nii.gz'),
        name='bandpass_filter')

    bandpass_filter.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([(inputnode, bandpass_filter,
                      [('highpass_sigma', 'highpass_sigma'),
                       ('lowpass_sigma', 'lowpass_sigma')]),
                     (filter2, bandpass_filter, [('out_res', 'in_file')])])

    # time-normalize scans
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')

    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}

    denoise.connect([
        (inputnode, normalize_time, [('tr', 'tr')]),
        (bandpass_filter, normalize_time, [('out_file', 'in_file')]),
        (normalize_time, outputnode, [('out_file', 'normalized_file')])
    ])

    return denoise
示例#34
0
def make_workflow(n_fmap=10):
    n_in = Node(IdentityInterface(fields=[
        'func',
        'fmap',
        ]), name='input')

    n_out = Node(IdentityInterface(fields=[
        'func1',
        'func2',
        'mean',
        ]), name='output')

    w = Workflow('preproc')

    w_mc_func = make_w_mcmean('func')
    if n_fmap == 1:  # nipype cannot handle conditional nodes
        w_mc_fmap = identify_workflow()
    else:
        w_mc_fmap = make_w_mcmean('fmap')
    w_masking = make_w_masking()
    w_warp = make_w_warp()

    n_apply = Node(interface=NwarpApply(), name='warpapply')
    n_apply.inputs.out_file = 'preprocessed.nii'

    n_mean = Node(interface=TStat(), name='mean')
    n_mean.inputs.args = '-mean'
    n_mean.inputs.outputtype = 'NIFTI_GZ'

    n_roi1 = Node(ExtractROI(), 'split1')
    n_roi1.inputs.t_min = 0
    n_roi1.inputs.roi_file = 'preprocessed_1.nii.gz'
    n_roi2 = Node(ExtractROI(), 'split2')
    n_roi2.inputs.roi_file = 'preprocessed_2.nii.gz'

    w.connect(n_in, 'fmap', w_mc_fmap, 'input.epi')

    w.connect(w_mc_fmap, 'output.mean', w_masking, 'input.fmap')
    w.connect(n_in, 'func', w_masking, 'input.func')
    w.connect(w_masking, 'output.func', w_mc_func, 'input.epi')

    w.connect(w_masking, 'output.fmap', w_warp, 'input.fmap')
    w.connect(w_mc_func, 'output.mean', w_warp, 'input.func')
    w.connect(w_mc_func, 'output.motion_parameters', w_warp, 'input.motion_parameters')

    w.connect(w_warp, 'output.warping', n_apply, 'warp')
    w.connect(w_masking, 'output.func', n_apply, 'in_file')
    w.connect(w_mc_fmap, 'output.mean', n_apply, 'master')
    w.connect(n_apply, 'out_file', n_mean, 'in_file')

    w.connect(n_apply, 'out_file', n_roi1, 'in_file')
    w.connect(n_apply, ('out_file', _half_dynamics), n_roi1, 't_size')
    w.connect(n_apply, 'out_file', n_roi2, 'in_file')
    w.connect(n_apply, ('out_file', _half_dynamics), n_roi2, 't_min')
    w.connect(n_apply, ('out_file', _half_dynamics), n_roi2, 't_size')

    w.connect(n_mean, 'out_file', n_out, 'mean')
    w.connect(n_roi1, 'roi_file', n_out, 'func1')
    w.connect(n_roi2, 'roi_file', n_out, 'func2')

    return w
示例#35
0
def main():

    #######################
    # Commandline Arguments
    #######################
    # list of subject identifiers
    task_name = "Training" if training else "Test"
    print(project_folder, subject_list, task_name, nb_prc)

    #############################################################
    # Extracting fMRI Params (Only works with Kamitani's Dataset)
    #############################################################
    TR = 3.0
    voxel_size = (3, 3, 3)
    number_of_slices = 50
    json_file1 = opj(project_folder,
                     "dataset/ds001246-download/task-imagery_bold.json")
    json_file2 = opj(project_folder,
                     "dataset/ds001246-download/task-perception_bold.json")

    file = open(json_file1)
    data = json.load(file)
    slice_timing1 = data['SliceTiming']
    file.close()

    file = open(json_file2)
    data = json.load(file)
    slice_timing2 = data['SliceTiming']
    file.close()

    sorted1 = np.argsort(slice_timing1)
    sorted2 = np.argsort(slice_timing2)
    print(np.all(sorted1 == sorted2))

    slice_order = list(sorted1 + 1)
    print("Slice order:", slice_order)

    ##########################
    # Creating essential nodes
    ##########################
    # Model Spec
    modelspec_node = Node(SpecifySPMModel(concatenate_runs=True,
                                          input_units='secs',
                                          output_units='secs',
                                          time_repetition=TR,
                                          high_pass_filter_cutoff=128),
                          name='modelspec')

    # Level1Design - Generates a SPM design matrix
    level1design_node = Node(Level1Design(bases={'hrf': {
        'derivs': [0, 0]
    }},
                                          timing_units='secs',
                                          interscan_interval=TR,
                                          model_serial_correlations='AR(1)',
                                          mask_threshold='-Inf'),
                             name="level1design")

    # EstimateModel - estimate the parameters of the model (GLM)
    level1estimate_node = Node(
        EstimateModel(estimation_method={'Classical': 1}),
        name="level1estimate")

    # Infosource - a function free node to iterate over the list of subject names
    infosrc_subjects = Node(IdentityInterface(fields=['subject_id']),
                            name="infosrc_subjects")
    infosrc_subjects.iterables = [('subject_id', subject_list)]

    # SelectFiles - it select files based on template matching
    tsv_file = opj('dataset', 'ds001246-download', '{subject_id}',
                   'ses-p*' + task_name + '*', 'func',
                   '{subject_id}_ses-p*' + task_name + '*_task-*_events.tsv')
    reg_file = opj('preprocess', '_subject_id_{subject_id}',
                   '_session_id_ses-p*' + task_name + '*', 'Realign',
                   'rp_a{subject_id}_ses-p*' + task_name + '*_task-*_bold.txt')
    func_file = opj(
        'preprocess', '_subject_id_{subject_id}',
        '_session_id_ses-p*' + task_name + '*', 'Coregister',
        'rara{subject_id}_ses-p*' + task_name + '*_task-*_bold.nii')
    mask_file = opj('datasink', 'preprocessed_masks', '{subject_id}',
                    '{subject_id}_full_mask.nii')

    templates = {
        'tsv': tsv_file,
        'reg': reg_file,
        'func': func_file,
        'mask': mask_file
    }

    selectfiles = Node(SelectFiles(templates, base_directory=project_folder),
                       name="selectfiles")

    # Subject Info
    subject_info_node = Node(Function(
        input_names=['tsv_files'],
        output_names=['subject_info'],
        function=read_tsv_train if training else read_tsv_test),
                             name='subject_info')

    # Datasink - creates output folder for important outputs
    datasink_node = Node(DataSink(base_directory=project_folder,
                                  container='datasink'),
                         name="datasink")

    substitutions = [('_subject_id_', '')]
    datasink_node.inputs.substitutions = substitutions

    #####################
    # Create the workflow
    #####################
    wf_name = 'glm_train_nomod' if training else 'glm_test'
    glm = Workflow(name=wf_name)
    glm.base_dir = project_folder

    # connect infosource to selectfile
    glm.connect([(infosrc_subjects, selectfiles, [('subject_id', 'subject_id')
                                                  ])])
    glm.connect([(selectfiles, subject_info_node, [('tsv', 'tsv_files')])])

    # connect infos to modelspec
    glm.connect([(subject_info_node, modelspec_node, [('subject_info',
                                                       'subject_info')])])
    glm.connect([(selectfiles, modelspec_node, [('reg',
                                                 'realignment_parameters')])])
    glm.connect([(selectfiles, modelspec_node, [('func', 'functional_runs')])])

    # connect modelspec to level1design
    glm.connect([(modelspec_node, level1design_node, [('session_info',
                                                       'session_info')])])
    glm.connect([(selectfiles, level1design_node, [('mask', 'mask_image')])])

    # connect design to estimate
    glm.connect([(level1design_node, level1estimate_node, [('spm_mat_file',
                                                            'spm_mat_file')])])

    # keeping estimate files params
    glm.connect([(level1estimate_node, datasink_node,
                  [('mask_image', f'{wf_name}.@mask_img')])])
    glm.connect([(level1estimate_node, datasink_node,
                  [('beta_images', f'{wf_name}.@beta_imgs')])])
    glm.connect([(level1estimate_node, datasink_node,
                  [('residual_image', f'{wf_name}.@res_img')])])
    glm.connect([(level1estimate_node, datasink_node,
                  [('RPVimage', f'{wf_name}.@rpv_img')])])
    glm.connect([(level1estimate_node, datasink_node,
                  [('spm_mat_file', f'{wf_name}.@spm_mat_file')])])

    glm.write_graph(graph2use='flat', format='png', simple_form=True)
    #     from IPython.display import Image
    #     Image(filename=opj(glm.base_dir, {wf_name}, 'graph_detailed.png'))

    ##################
    # Run the workflow
    ##################
    glm.run('MultiProc', plugin_args={'n_procs': nb_prc})
示例#36
0
def init_mriqc(opts, retval):
    """Build the workflow enumerator"""

    from bids.grabbids import BIDSLayout
    from nipype import config as ncfg
    from nipype.pipeline.engine import Workflow

    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow

    retval['workflow'] = None
    retval['plugin_settings'] = None

    # Build settings dict
    bids_dir = Path(opts.bids_dir).expanduser()
    output_dir = Path(opts.output_dir).expanduser()

    # Number of processes
    n_procs = opts.n_procs or cpu_count()

    settings = {
        'bids_dir': bids_dir.resolve(),
        'output_dir': output_dir.resolve(),
        'work_dir': opts.work_dir.expanduser().resolve(),
        'write_graph': opts.write_graph,
        'n_procs': n_procs,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'ants_nthreads': opts.ants_nthreads,
        'ants_float': opts.ants_float,
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    if opts.dsname:
        settings['dataset_name'] = opts.dsname

    log_dir = settings['output_dir'] / 'logs'

    # Create directories
    log_dir.mkdir(parents=True, exist_ok=True)
    settings['work_dir'].mkdir(parents=True, exist_ok=True)

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': str(log_dir), 'log_to_file': True},
        'execution': {
            'crashdump_dir': str(log_dir), 'crashfile_format': 'txt',
            'resource_monitor': opts.profile},
    })

    # Plugin configuration
    plugin_settings = {}
    if n_procs == 1:
        plugin_settings['plugin'] = 'Linear'

        if settings['ants_nthreads'] == 0:
            settings['ants_nthreads'] = 1
    else:
        plugin_settings['plugin'] = 'MultiProc'
        plugin_settings['plugin_args'] = {'n_procs': n_procs}
        if opts.mem_gb:
            plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

        if settings['ants_nthreads'] == 0:
            # always leave one extra thread for non ANTs work,
            # don't use more than 8 threads - the speed up is minimal
            settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)

    # Overwrite options if --use-plugin provided
    if opts.use_plugin and opts.use_plugin.exists():
        from yaml import load as loadyml
        with opts.use_plugin.open() as pfile:
            plugin_settings.update(loadyml(pfile))

    # Process data types
    modalities = opts.modalities

    layout = BIDSLayout(str(settings['bids_dir']),
                        exclude=['derivatives', 'sourcedata'])
    dataset = collect_bids_data(
        layout,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
        bids_type=modalities,
    )

    workflow = Workflow(name='workflow_enumerator')
    workflow.base_dir = settings['work_dir']

    wf_list = []
    subject_list = []
    for mod in modalities:
        if dataset[mod]:
            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))
            subject_list += dataset[mod]

    retval['subject_list'] = subject_list
    if not wf_list:
        retval['return_code'] = 1
        return retval

    workflow.add_nodes(wf_list)
    retval['plugin_settings'] = plugin_settings
    retval['workflow'] = workflow
    retval['return_code'] = 0
    return retval
示例#37
0
def create_resting():

    # main workflow
    func_preproc = Workflow(name='resting')

    inputnode = Node(util.IdentityInterface(fields=[
        'subject_id', 'out_dir', 'freesurfer_dir', 'func', 'rs_mag', 'rs_ph',
        'anat_head', 'anat_brain', 'anat_brain_mask', 'wmseg', 'csfseg',
        'vol_to_remove', 'TR', 'highpass_freq', 'epi_resolution', 'echo_space',
        'te_diff', 'fwhm', 'pe_dir', 'composite_transform', 'standard_brain',
        'standard_downsampled'
    ]),
                     name='inputnode')

    #Use correct subject ID from long timepoint for bbregister
    def change_subject_id(subject):
        import re
        [subj, ses] = re.split("_", subject)

        new_subject_id = subject + '.long.' + subj
        return new_subject_id

    change_subject_id = Node(util.Function(input_names=["subject"],
                                           output_names=["new_subject_id"],
                                           function=change_subject_id),
                             name="change_subject_id")

    outputnode = Node(util.IdentityInterface(fields=[
        'brain', 'brainmask', 'anat2std_transforms', 'std2anat_transforms',
        'anat2std', 'anat_head', 'wmseg', 'csfseg', 'wmedge', 'subject_id'
    ]),
                      name='outputnode')

    ##PREPROCESSING FOR AROMA (Steps 1 - 7)
    def merge_if_list(in_file):
        if type(in_file) == list:
            import numpy as np
            import nibabel as nb
            import os
            from nipype.utils.filemanip import split_filename
            nii1 = nb.load(in_file[0])
            nii1d = nii1.get_data()
            nii2 = nb.load(in_file[1])
            nii2d = nii2.get_data()
            x = np.concatenate((nii1d, nii2d), axis=3)
            new_nii = nb.Nifti1Image(x, nii1.get_affine(), nii1.get_header())
            new_nii.set_data_dtype(np.float32)
            _, base, _ = split_filename(in_file[0])
            nb.save(new_nii, base + "_merged.nii.gz")
            return os.path.abspath(base + "_merged.nii.gz")
        else:
            return in_file

    #if rsfmri is a list -> merge files, otherwise return single list.
    merge_rs = Node(util.Function(input_names=['in_file'],
                                  output_names=["out_file"],
                                  function=merge_if_list),
                    name='merge_rs')

    # node to remove first volumes
    remove_vol = Node(util.Function(input_names=['in_file', 't_min'],
                                    output_names=["out_file"],
                                    function=strip_rois_func),
                      name='remove_vol')

    # workflow for motion correction
    moco = create_moco_pipeline()

    # workflow for fieldmap correction and coregistration
    fmap_coreg = create_fmap_coreg_pipeline()

    # workflow for applying transformations to timeseries
    transform_ts = create_transform_pipeline()

    #mean intensity normalization
    meanintensnorm = Node(fsl.ImageMaths(op_string='-ing 10000'),
                          name='meanintensnorm')

    smoothing = create_smoothing_pipeline()

    # connections
    func_preproc.connect([
        (inputnode, merge_rs, [('func', 'in_file')]),
        (merge_rs, remove_vol, [('out_file', 'in_file')]),
        (inputnode, remove_vol, [('vol_to_remove', 't_min')]),
        (inputnode, moco, [('anat_brain_mask', 'inputnode.brainmask')]),
        (remove_vol, moco, [('out_file', 'inputnode.epi')]),
        (inputnode, change_subject_id, [('subject_id', 'subject')]),
        (change_subject_id, fmap_coreg, [('new_subject_id',
                                          'inputnode.fs_subject_id')]),
        (inputnode, fmap_coreg, [('rs_mag', 'inputnode.mag'),
                                 ('rs_ph', 'inputnode.phase'),
                                 ('freesurfer_dir',
                                  'inputnode.fs_subjects_dir'),
                                 ('echo_space', 'inputnode.echo_space'),
                                 ('te_diff', 'inputnode.te_diff'),
                                 ('pe_dir', 'inputnode.pe_dir'),
                                 ('anat_head', 'inputnode.anat_head'),
                                 ('anat_brain', 'inputnode.anat_brain')]),
        (moco, fmap_coreg, [('outputnode.epi_mean', 'inputnode.epi_mean')]),
        (remove_vol, transform_ts, [('out_file', 'inputnode.orig_ts')]),
        (inputnode, transform_ts, [('anat_head', 'inputnode.anat_head')]),
        (inputnode, transform_ts, [('anat_brain_mask', 'inputnode.brain_mask')
                                   ]),
        (inputnode, transform_ts, [('epi_resolution', 'inputnode.resolution')
                                   ]),
        (moco, transform_ts, [('outputnode.mat_moco', 'inputnode.mat_moco')]),
        (fmap_coreg, transform_ts, [('outputnode.fmap_fullwarp',
                                     'inputnode.fullwarp')]),
        (transform_ts, meanintensnorm, [('outputnode.trans_ts', 'in_file')]),
        (meanintensnorm, smoothing, [('out_file', 'inputnode.ts_transformed')
                                     ]),
        (inputnode, smoothing, [('fwhm', 'inputnode.fwhm')])
    ])

    ##CALCULATE TRANSFORM from anatomical to standard space with FSL tools
    # Anat > Standard
    # register high-resolution to standard template with non-linear transform
    # flirt serves as preparation for fnirt)

    #reorient brain to standard (because Freesurfer space can cause problems)
    reorient2std = Node(fsl.Reorient2Std(), name="reorient2std")

    reorient2std_rs = Node(fsl.Reorient2Std(), name="reorient2std_rs")
    reorient2std_mask = Node(fsl.Reorient2Std(), name="reorient2std_mask")

    flirt_prep = Node(fsl.FLIRT(cost_func='mutualinfo', interp='trilinear'),
                      name='flirt_prep')
    flirt_prep.inputs.interp = 'trilinear'
    flirt_prep.inputs.dof = 12

    fnirt = Node(fsl.FNIRT(), name='fnirt')
    fnirt.inputs.field_file = True
    fnirt.inputs.fieldcoeff_file = True

    func_preproc.connect([
        (inputnode, reorient2std, [('anat_brain', 'in_file')]),
        (reorient2std, flirt_prep, [('out_file', 'in_file')]),
        #(inputnode, flirt_prep,  [('anat_brain', 'in_file')]),
        (inputnode, flirt_prep, [('standard_brain', 'reference')]),
        (flirt_prep, fnirt, [('out_matrix_file', 'affine_file')]),
        (reorient2std, fnirt, [('out_file', 'in_file')]),
        (inputnode, fnirt, [('standard_brain', 'ref_file')]),
    ])

    def getcwd(subject_id):
        import os
        tmp = os.getcwd()
        tmp = tmp[:-6]
        tmp = tmp + 'ica_aroma/out'  #%(subject_id)
        return tmp

    get_wd = Node(util.Function(input_names=['subject_id'],
                                output_names=["d"],
                                function=getcwd),
                  name='get_wd')

    ica_aroma = Node(ICA_AROMA(), name="ica_aroma")
    ica_aroma.inputs.denoise_type = 'both'
    #ica_aroma.inputs.out_dir = os.getcwd()

    func_preproc.connect([
        (moco, ica_aroma, [('outputnode.par_moco', 'motion_parameters')]),
        (smoothing, reorient2std_rs, [('outputnode.ts_smoothed', 'in_file')]),
        (reorient2std_rs, ica_aroma, [('out_file', 'in_file')]),
        (fnirt, ica_aroma, [('field_file', 'fnirt_warp_file')]),
        (transform_ts, reorient2std_mask, [('outputnode.comb_mask_resamp',
                                            'in_file')]),
        (reorient2std_mask, ica_aroma, [('out_file', 'mask')]),
        (inputnode, get_wd, [('subject_id', 'subject_id')]),
        (get_wd, ica_aroma, [('d', 'out_dir')])
    ])

    ##POSTPROCESSING
    postprocess = create_denoise_pipeline()

    func_preproc.connect([
        (reorient2std_mask, postprocess, [
            ('out_file', 'inputnode.brain_mask')
        ]),  #use the correctly oriented mask                           
        (ica_aroma, postprocess, [
            ('nonaggr_denoised_file', 'inputnode.epi_coreg')
        ]),  #use the nonaggr_denoised_file
        (inputnode, postprocess, [('TR', 'inputnode.tr')]),
        (inputnode, postprocess, [('highpass_freq', 'inputnode.highpass_freq')
                                  ]),
        (inputnode, postprocess, [('wmseg', 'inputnode.wmseg')]),
        (inputnode, postprocess, [('csfseg', 'inputnode.csfseg')]),
    ])

    #outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        'par', 'rms', 'mean_epi', 'tsnr', 'stddev_file', 'realigned_ts',
        'fmap', 'unwarped_mean_epi2fmap', 'coregistered_epi2fmap',
        'fmap_fullwarp', 'epi2anat', 'epi2anat_mat', 'epi2anat_dat',
        'epi2anat_mincost', 'full_transform_ts', 'full_transform_mean',
        'resamp_t1', 'comb_mask_resamp', 'dvars_file', 'out_flirt_prep',
        'out_matrix_flirt_prep', 'out_warped', 'out_warp_field',
        'aggr_denoised_file', 'nonaggr_denoised_file', 'out_dir', 'wmcsf_mask',
        'combined_motion', 'comp_regressor', 'comp_F', 'comp_pF', 'out_betas',
        'ts_fullspectrum', 'ts_filtered'
    ]),
                      name='outputnode')

    # connections
    func_preproc.connect([
        (
            moco,
            outputnode,
            [  #('outputnode.epi_moco', 'realign.@realigned_ts'),
                ('outputnode.par_moco', 'par'),
                ('outputnode.rms_moco', 'rms'),
                ('outputnode.epi_moco', 'realigned_ts'),
                ('outputnode.epi_mean', 'mean_epi'),
                ('outputnode.tsnr_file', 'tsnr'),
                ('outputnode.stddev_file', 'stddev'),
            ]),
        (fmap_coreg, outputnode,
         [('outputnode.fmap', 'fmap'),
          ('outputnode.unwarped_mean_epi2fmap', 'unwarped_mean_epi2fmap'),
          ('outputnode.epi2fmap', 'coregistered_epi2fmap'),
          ('outputnode.fmap_fullwarp', 'fmap_fullwarp'),
          ('outputnode.epi2anat', 'epi2anat'),
          ('outputnode.epi2anat_mat', 'epi2anat_mat'),
          ('outputnode.epi2anat_dat', 'epi2anat_dat'),
          ('outputnode.epi2anat_mincost', 'epi2anat_mincost')]),
        (transform_ts, outputnode,
         [('outputnode.trans_ts', 'full_transform_ts'),
          ('outputnode.trans_ts_mean', 'full_transform_mean'),
          ('outputnode.resamp_t1', 'resamp_t1'),
          ('outputnode.comb_mask_resamp', 'comb_mask_resamp'),
          ('outputnode.out_dvars', 'dvars_file')]),
        (flirt_prep, outputnode, [('out_file', 'out_flirt_prep'),
                                  ('out_matrix_file', 'out_matrix_flirt_prep')
                                  ]),
        (fnirt, outputnode, [('warped_file', 'out_warped'),
                             ('field_file', 'out_warp_field')]),
        (ica_aroma, outputnode, [('aggr_denoised_file', 'aggr_denoised_file'),
                                 ('nonaggr_denoised_file',
                                  'nonaggr_denoised_file'),
                                 ('out_dir', 'out_dir')]),
        (postprocess, outputnode,
         [('outputnode.wmcsf_mask', 'wmcsf_mask'),
          ('outputnode.combined_motion', 'combined_motion'),
          ('outputnode.comp_regressor', 'comp_regressor'),
          ('outputnode.comp_F', 'comp_F'), ('outputnode.comp_pF', 'comp_pF'),
          ('outputnode.out_betas', 'out_betas'),
          ('outputnode.ts_fullspectrum', 'ts_fullspectrum'),
          ('outputnode.ts_filtered', 'ts_filtered')])
    ])

    return func_preproc
示例#38
0
def make_w_masking():
    w_mask = Workflow('masking')

    n_in = Node(
        IdentityInterface(fields=[
            'T1w',
            'subject',  # without sub-
            'freesurfer2func',
            'func',
        ]),
        name='input')

    n_out = Node(IdentityInterface(fields=[
        'func',
    ]), name='output')

    n_fl = Node(FLIRT(), name='flirt')
    n_fl.inputs.output_type = 'NIFTI_GZ'
    n_fl.inputs.apply_xfm = True
    n_fl.inputs.interp = 'nearestneighbour'

    n_conv = Node(MRIConvert(), name='convert')
    n_conv.inputs.out_type = 'niigz'

    reconall = Node(ReconAll(), name='reconall')
    reconall.inputs.directive = 'all'
    reconall.inputs.subjects_dir = '/Fridge/R01_BAIR/freesurfer'

    w_mask.connect(n_in, 'T1w', reconall, 'T1_files')
    w_mask.connect(n_in, 'subject', reconall, 'subject_id')

    n_mul = Node(interface=BinaryMaths(), name='mul')
    n_mul.inputs.operation = 'mul'

    w_mask.connect(reconall, ('ribbon', select_ribbon), n_conv, 'in_file')
    w_mask.connect(n_conv, 'out_file', n_fl, 'in_file')
    w_mask.connect(n_in, 'func', n_fl, 'reference')
    w_mask.connect(n_in, 'freesurfer2func', n_fl, 'in_matrix_file')

    w_mask.connect(n_in, 'func', n_mul, 'in_file')
    w_mask.connect(n_fl, 'out_file', n_mul, 'operand_file')

    w_mask.connect(n_mul, 'out_file', n_out, 'func')

    return w_mask
示例#39
0
# In[2]:
experiment_dir = '/media/amr/Amr_4TB/Work/stimulation'

subject_list = [
    '003', '005', '008', '011', '130', '018', '019', '020', '059', '060',
    '062', '063', '066', '126', '127', '146'
]

# session_list = ['run001', 'run002', 'run003']

# frequency_list = ['10Hz', '20Hz', '40Hz']

output_dir = 'Stimulation_2nd_level_OutputDir_10Hz'
working_dir = 'Stimulation_2nd_level_WorkingDir_10Hz'

stimulation_2nd_level = Workflow(name='stimulation_2nd_level_10Hz')
stimulation_2nd_level.base_dir = opj(experiment_dir, working_dir)

#==========================================================================================================================================================
# In[3]:
infosource = Node(IdentityInterface(fields=['subject_id']), name="infosource")

infosource.iterables = [('subject_id', subject_list)]

#==========================================================================================================================================================
# In[4]:
# sub-001_task-MGT_run--02_bold.nii.gz, sub-001_task-MGT_run--02_sbref.nii.gz
#/preproc_img/run--04sub-119/smoothed_all_maths_filt_maths.nii.gz
#functional run-s

template_brain = '/media/amr/Amr_4TB/Work/October_Acquistion/Anat_Template_Enhanced.nii.gz'
示例#40
0
    def create(self):  # , **kwargs):
        """ Create the nodes and connections for the workflow """

        # Preamble
        csvReader = CSVReader()
        csvReader.inputs.in_file = self.csv_file.default_value
        csvReader.inputs.header = self.hasHeader.default_value
        csvOut = csvReader.run()

        print(("=" * 80))
        print((csvOut.outputs.__dict__))
        print(("=" * 80))

        iters = OrderedDict()
        label = list(csvOut.outputs.__dict__.keys())[0]
        result = eval("csvOut.outputs.{0}".format(label))
        iters["tests"], iters["trains"] = sample_crossvalidation_set(
            result, self.sample_size.default_value
        )
        # Main event
        out_fields = ["T1", "T2", "Label", "trainindex", "testindex"]
        inputsND = Node(
            interface=IdentityInterface(fields=out_fields),
            run_without_submitting=True,
            name="inputs",
        )
        inputsND.iterables = [
            ("trainindex", iters["trains"]),
            ("testindex", iters["tests"]),
        ]
        if not self.hasHeader.default_value:
            inputsND.inputs.T1 = csvOut.outputs.column_0
            inputsND.inputs.Label = csvOut.outputs.column_1
            inputsND.inputs.T2 = csvOut.outputs.column_2
        else:
            inputsND.inputs.T1 = csvOut.outputs.__dict__["t1"]
            inputsND.inputs.Label = csvOut.outputs.__dict__["label"]
            inputsND.inputs.T2 = csvOut.outputs.__dict__["t2"]
            pass  # TODO
        metaflow = Workflow(name="metaflow")
        metaflow.config["execution"] = {
            "plugin": "Linear",
            "stop_on_first_crash": "false",
            "stop_on_first_rerun": "false",
            # This stops at first attempt to rerun, before running, and before deleting previous results.
            "hash_method": "timestamp",
            "single_thread_matlab": "true",  # Multi-core 2011a  multi-core for matrix multiplication.
            "remove_unnecessary_outputs": "true",
            "use_relative_paths": "false",  # relative paths should be on, require hash update when changed.
            "remove_node_directories": "false",  # Experimental
            "local_hash_check": "false",
        }

        metaflow.add_nodes([inputsND])
        """import pdb; pdb.set_trace()"""
        fusionflow = FusionLabelWorkflow()
        self.connect(
            [
                (
                    metaflow,
                    fusionflow,
                    [
                        ("inputs.trainindex", "trainT1s.index"),
                        ("inputs.T1", "trainT1s.inlist"),
                    ],
                ),
                (
                    metaflow,
                    fusionflow,
                    [
                        ("inputs.trainindex", "trainLabels.index"),
                        ("inputs.Label", "trainLabels.inlist"),
                    ],
                ),
                (
                    metaflow,
                    fusionflow,
                    [
                        ("inputs.testindex", "testT1s.index"),
                        ("inputs.T1", "testT1s.inlist"),
                    ],
                ),
            ]
        )
示例#41
0
def Lesion_extractor(
    name='Lesion_Extractor',
    wf_name='Test',
    base_dir='/homes_unix/alaurent/',
    input_dir=None,
    subjects=None,
    main=None,
    acc=None,
    atlas='/homes_unix/alaurent/cbstools-public-master/atlases/brain-segmentation-prior3.0/brain-atlas-quant-3.0.8.txt'
):

    wf = Workflow(wf_name)
    wf.base_dir = base_dir

    #file = open(subjects,"r")
    #subjects = file.read().split("\n")
    #file.close()

    # Subject List
    subjectList = Node(IdentityInterface(fields=['subject_id'],
                                         mandatory_inputs=True),
                       name="subList")
    subjectList.iterables = ('subject_id', [
        sub for sub in subjects if sub != '' and sub != '\n'
    ])

    # T1w and FLAIR
    scanList = Node(DataGrabber(infields=['subject_id'],
                                outfields=['T1', 'FLAIR']),
                    name="scanList")
    scanList.inputs.base_directory = input_dir
    scanList.inputs.ignore_exception = False
    scanList.inputs.raise_on_empty = True
    scanList.inputs.sort_filelist = True
    #scanList.inputs.template = '%s/%s.nii'
    #scanList.inputs.template_args = {'T1': [['subject_id','T1*']],
    #                                 'FLAIR': [['subject_id','FLAIR*']]}
    scanList.inputs.template = '%s/anat/%s'
    scanList.inputs.template_args = {
        'T1': [['subject_id', '*_T1w.nii.gz']],
        'FLAIR': [['subject_id', '*_FLAIR.nii.gz']]
    }
    wf.connect(subjectList, "subject_id", scanList, "subject_id")

    #     # T1w and FLAIR
    #     dg = Node(DataGrabber(outfields=['T1', 'FLAIR']), name="T1wFLAIR")
    #     dg.inputs.base_directory = "/homes_unix/alaurent/LesionPipeline"
    #     dg.inputs.template = "%s/NIFTI/*.nii.gz"
    #     dg.inputs.template_args['T1']=[['7']]
    #     dg.inputs.template_args['FLAIR']=[['9']]
    #     dg.inputs.sort_filelist=True

    # Reorient Volume
    T1Conv = Node(Reorient2Std(), name="ReorientVolume")
    T1Conv.inputs.ignore_exception = False
    T1Conv.inputs.terminal_output = 'none'
    T1Conv.inputs.out_file = "T1_reoriented.nii.gz"
    wf.connect(scanList, "T1", T1Conv, "in_file")

    # Reorient Volume (2)
    T2flairConv = Node(Reorient2Std(), name="ReorientVolume2")
    T2flairConv.inputs.ignore_exception = False
    T2flairConv.inputs.terminal_output = 'none'
    T2flairConv.inputs.out_file = "FLAIR_reoriented.nii.gz"
    wf.connect(scanList, "FLAIR", T2flairConv, "in_file")

    # N3 Correction
    T1NUC = Node(N4BiasFieldCorrection(), name="N3Correction")
    T1NUC.inputs.dimension = 3
    T1NUC.inputs.environ = {'NSLOTS': '1'}
    T1NUC.inputs.ignore_exception = False
    T1NUC.inputs.num_threads = 1
    T1NUC.inputs.save_bias = False
    T1NUC.inputs.terminal_output = 'none'
    wf.connect(T1Conv, "out_file", T1NUC, "input_image")

    # N3 Correction (2)
    T2flairNUC = Node(N4BiasFieldCorrection(), name="N3Correction2")
    T2flairNUC.inputs.dimension = 3
    T2flairNUC.inputs.environ = {'NSLOTS': '1'}
    T2flairNUC.inputs.ignore_exception = False
    T2flairNUC.inputs.num_threads = 1
    T2flairNUC.inputs.save_bias = False
    T2flairNUC.inputs.terminal_output = 'none'
    wf.connect(T2flairConv, "out_file", T2flairNUC, "input_image")
    '''
    #####################
    ### PRE-NORMALIZE ###
    #####################
    To make sure there's no outlier values (negative, or really high) to offset the initialization steps
    '''

    # Intensity Range Normalization
    getMaxT1NUC = Node(ImageStats(op_string='-r'), name="getMaxT1NUC")
    wf.connect(T1NUC, 'output_image', getMaxT1NUC, 'in_file')

    T1NUCirn = Node(AbcImageMaths(), name="IntensityNormalization")
    T1NUCirn.inputs.op_string = "-div"
    T1NUCirn.inputs.out_file = "normT1.nii.gz"
    wf.connect(T1NUC, 'output_image', T1NUCirn, 'in_file')
    wf.connect(getMaxT1NUC, ('out_stat', getElementFromList, 1), T1NUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2NUC = Node(ImageStats(op_string='-r'), name="getMaxT2")
    wf.connect(T2flairNUC, 'output_image', getMaxT2NUC, 'in_file')

    T2NUCirn = Node(AbcImageMaths(), name="IntensityNormalization2")
    T2NUCirn.inputs.op_string = "-div"
    T2NUCirn.inputs.out_file = "normT2.nii.gz"
    wf.connect(T2flairNUC, 'output_image', T2NUCirn, 'in_file')
    wf.connect(getMaxT2NUC, ('out_stat', getElementFromList, 1), T2NUCirn,
               "op_value")
    '''
    ########################
    #### COREGISTRATION ####
    ########################
    '''

    # Optimized Automated Registration
    T2flairCoreg = Node(FLIRT(), name="OptimizedAutomatedRegistration")
    T2flairCoreg.inputs.output_type = 'NIFTI_GZ'
    wf.connect(T2NUCirn, "out_file", T2flairCoreg, "in_file")
    wf.connect(T1NUCirn, "out_file", T2flairCoreg, "reference")
    '''    
    #########################
    #### SKULL-STRIPPING ####
    #########################
    '''

    # SPECTRE
    T1ss = Node(BET(), name="SPECTRE")
    T1ss.inputs.frac = 0.45  #0.4
    T1ss.inputs.mask = True
    T1ss.inputs.outline = True
    T1ss.inputs.robust = True
    wf.connect(T1NUCirn, "out_file", T1ss, "in_file")

    # Image Calculator
    T2ss = Node(ApplyMask(), name="ImageCalculator")
    wf.connect(T1ss, "mask_file", T2ss, "mask_file")
    wf.connect(T2flairCoreg, "out_file", T2ss, "in_file")
    '''
    ####################################
    #### 2nd LAYER OF N3 CORRECTION ####
    ####################################
    This time without the skull: there were some significant amounts of inhomogeneities leftover.
    '''

    # N3 Correction (3)
    T1ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction3")
    T1ssNUC.inputs.dimension = 3
    T1ssNUC.inputs.environ = {'NSLOTS': '1'}
    T1ssNUC.inputs.ignore_exception = False
    T1ssNUC.inputs.num_threads = 1
    T1ssNUC.inputs.save_bias = False
    T1ssNUC.inputs.terminal_output = 'none'
    wf.connect(T1ss, "out_file", T1ssNUC, "input_image")

    # N3 Correction (4)
    T2ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction4")
    T2ssNUC.inputs.dimension = 3
    T2ssNUC.inputs.environ = {'NSLOTS': '1'}
    T2ssNUC.inputs.ignore_exception = False
    T2ssNUC.inputs.num_threads = 1
    T2ssNUC.inputs.save_bias = False
    T2ssNUC.inputs.terminal_output = 'none'
    wf.connect(T2ss, "out_file", T2ssNUC, "input_image")
    '''
    ####################################
    ####    NORMALIZE FOR MGDM      ####
    ####################################
    This normalization is a bit aggressive: only useful to have a 
    cropped dynamic range into MGDM, but possibly harmful to further 
    processing, so the unprocessed images are passed to the subsequent steps.
    '''

    # Intensity Range Normalization
    getMaxT1ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT1ssNUC")
    wf.connect(T1ssNUC, 'output_image', getMaxT1ssNUC, 'in_file')

    T1ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization3")
    T1ssNUCirn.inputs.op_string = "-div"
    T1ssNUCirn.inputs.out_file = "normT1ss.nii.gz"
    wf.connect(T1ssNUC, 'output_image', T1ssNUCirn, 'in_file')
    wf.connect(getMaxT1ssNUC, ('out_stat', getElementFromList, 1), T1ssNUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT2ssNUC")
    wf.connect(T2ssNUC, 'output_image', getMaxT2ssNUC, 'in_file')

    T2ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization4")
    T2ssNUCirn.inputs.op_string = "-div"
    T2ssNUCirn.inputs.out_file = "normT2ss.nii.gz"
    wf.connect(T2ssNUC, 'output_image', T2ssNUCirn, 'in_file')
    wf.connect(getMaxT2ssNUC, ('out_stat', getElementFromList, 1), T2ssNUCirn,
               "op_value")
    '''
    ####################################
    ####      ESTIMATE CSF PV       ####
    ####################################
    Here we try to get a better handle on CSF voxels to help the segmentation step
    '''

    # Recursive Ridge Diffusion
    CSF_pv = Node(RecursiveRidgeDiffusion(), name='estimate_CSF_pv')
    CSF_pv.plugin_args = {'sbatch_args': '--mem 6000'}
    CSF_pv.inputs.ridge_intensities = "dark"
    CSF_pv.inputs.ridge_filter = "2D"
    CSF_pv.inputs.orientation = "undefined"
    CSF_pv.inputs.ang_factor = 1.0
    CSF_pv.inputs.min_scale = 0
    CSF_pv.inputs.max_scale = 3
    CSF_pv.inputs.propagation_model = "diffusion"
    CSF_pv.inputs.diffusion_factor = 0.5
    CSF_pv.inputs.similarity_scale = 0.1
    CSF_pv.inputs.neighborhood_size = 4
    CSF_pv.inputs.max_iter = 100
    CSF_pv.inputs.max_diff = 0.001
    CSF_pv.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, CSF_pv.name),
        CSF_pv, 'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', CSF_pv, 'input_image')
    '''
    ####################################
    ####            MGDM            ####
    ####################################
    '''

    # Multi-contrast Brain Segmentation
    MGDM = Node(MGDMSegmentation(), name='MGDM')
    MGDM.plugin_args = {'sbatch_args': '--mem 7000'}
    MGDM.inputs.contrast_type1 = "Mprage3T"
    MGDM.inputs.contrast_type2 = "FLAIR3T"
    MGDM.inputs.contrast_type3 = "PVDURA"
    MGDM.inputs.save_data = True
    MGDM.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, MGDM.name), MGDM,
        'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', MGDM, 'contrast_image1')
    wf.connect(T2ssNUCirn, 'out_file', MGDM, 'contrast_image2')
    wf.connect(CSF_pv, 'ridge_pv', MGDM, 'contrast_image3')

    # Enhance Region Contrast
    ERC = Node(EnhanceRegionContrast(), name='ERC')
    ERC.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC.inputs.enhanced_region = "crwm"
    ERC.inputs.contrast_background = "crgm"
    ERC.inputs.partial_voluming_distance = 2.0
    ERC.inputs.save_data = True
    ERC.inputs.atlas_file = atlas
    wf.connect(subjectList,
               ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC.name),
               ERC, 'output_dir')
    wf.connect(T1ssNUC, 'output_image', ERC, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC, 'levelset_boundary_image')

    # Enhance Region Contrast (2)
    ERC2 = Node(EnhanceRegionContrast(), name='ERC2')
    ERC2.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC2.inputs.enhanced_region = "crwm"
    ERC2.inputs.contrast_background = "crgm"
    ERC2.inputs.partial_voluming_distance = 2.0
    ERC2.inputs.save_data = True
    ERC2.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC2.name), ERC2,
        'output_dir')
    wf.connect(T2ssNUC, 'output_image', ERC2, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC2, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC2, 'levelset_boundary_image')

    # Define Multi-Region Priors
    DMRP = Node(DefineMultiRegionPriors(), name='DefineMultRegPriors')
    DMRP.plugin_args = {'sbatch_args': '--mem 6000'}
    #DMRP.inputs.defined_region = "ventricle-horns"
    #DMRP.inputs.definition_method = "closest-distance"
    DMRP.inputs.distance_offset = 3.0
    DMRP.inputs.save_data = True
    DMRP.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, DMRP.name), DMRP,
        'output_dir')
    wf.connect(MGDM, 'segmentation', DMRP, 'segmentation_image')
    wf.connect(MGDM, 'distance', DMRP, 'levelset_boundary_image')
    '''
    ###############################################
    ####      REMOVE VENTRICLE POSTERIOR       ####
    ###############################################
    Due to topology constraints, the ventricles are often not fully segmented:
    here add back all ventricle voxels from the posterior probability (without the topology constraints)
    '''

    # Posterior label
    PostLabel = Node(Split(), name='PosteriorLabel')
    PostLabel.inputs.dimension = "t"
    wf.connect(MGDM, 'labels', PostLabel, 'in_file')

    # Posterior proba
    PostProba = Node(Split(), name='PosteriorProba')
    PostProba.inputs.dimension = "t"
    wf.connect(MGDM, 'memberships', PostProba, 'in_file')

    # Threshold binary mask : ventricle label part 1
    VentLabel1 = Node(Threshold(), name="VentricleLabel1")
    VentLabel1.inputs.thresh = 10.5
    VentLabel1.inputs.direction = "below"
    wf.connect(PostLabel, ("out_files", getFirstElement), VentLabel1,
               "in_file")

    # Threshold binary mask : ventricle label part 2
    VentLabel2 = Node(Threshold(), name="VentricleLabel2")
    VentLabel2.inputs.thresh = 13.5
    VentLabel2.inputs.direction = "above"
    wf.connect(VentLabel1, "out_file", VentLabel2, "in_file")

    # Image calculator : ventricle proba
    VentProba = Node(ImageMaths(), name="VentricleProba")
    VentProba.inputs.op_string = "-mul"
    VentProba.inputs.out_file = "ventproba.nii.gz"
    wf.connect(PostProba, ("out_files", getFirstElement), VentProba, "in_file")
    wf.connect(VentLabel2, "out_file", VentProba, "in_file2")

    # Image calculator : remove inter ventricles
    RmInterVent = Node(ImageMaths(), name="RemoveInterVent")
    RmInterVent.inputs.op_string = "-sub"
    RmInterVent.inputs.out_file = "rmintervent.nii.gz"
    wf.connect(ERC, "region_pv", RmInterVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInterVent, "in_file2")

    # Image calculator : add horns
    AddHorns = Node(ImageMaths(), name="AddHorns")
    AddHorns.inputs.op_string = "-add"
    AddHorns.inputs.out_file = "rmvent.nii.gz"
    wf.connect(RmInterVent, "out_file", AddHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddHorns, "in_file2")

    # Image calculator : remove ventricles
    RmVent = Node(ImageMaths(), name="RemoveVentricles")
    RmVent.inputs.op_string = "-sub"
    RmVent.inputs.out_file = "rmvent.nii.gz"
    wf.connect(AddHorns, "out_file", RmVent, "in_file")
    wf.connect(VentProba, "out_file", RmVent, "in_file2")

    # Image calculator : remove internal capsule
    RmIC = Node(ImageMaths(), name="RemoveInternalCap")
    RmIC.inputs.op_string = "-sub"
    RmIC.inputs.out_file = "rmic.nii.gz"
    wf.connect(RmVent, "out_file", RmIC, "in_file")
    wf.connect(DMRP, "internal_capsule_pv", RmIC, "in_file2")

    # Intensity Range Normalization (3)
    getMaxRmIC = Node(ImageStats(op_string='-r'), name="getMaxRmIC")
    wf.connect(RmIC, 'out_file', getMaxRmIC, 'in_file')

    RmICirn = Node(AbcImageMaths(), name="IntensityNormalization5")
    RmICirn.inputs.op_string = "-div"
    RmICirn.inputs.out_file = "normRmIC.nii.gz"
    wf.connect(RmIC, 'out_file', RmICirn, 'in_file')
    wf.connect(getMaxRmIC, ('out_stat', getElementFromList, 1), RmICirn,
               "op_value")

    # Probability To Levelset : WM orientation
    WM_Orient = Node(ProbabilityToLevelset(), name='WM_Orientation')
    WM_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_Orient.name),
        WM_Orient, 'output_dir')
    wf.connect(RmICirn, 'out_file', WM_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in WM only
    WM_pvs = Node(RecursiveRidgeDiffusion(), name='PVS_in_WM')
    WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_pvs.inputs.ridge_intensities = "bright"
    WM_pvs.inputs.ridge_filter = "1D"
    WM_pvs.inputs.orientation = "orthogonal"
    WM_pvs.inputs.ang_factor = 1.0
    WM_pvs.inputs.min_scale = 0
    WM_pvs.inputs.max_scale = 3
    WM_pvs.inputs.propagation_model = "diffusion"
    WM_pvs.inputs.diffusion_factor = 1.0
    WM_pvs.inputs.similarity_scale = 1.0
    WM_pvs.inputs.neighborhood_size = 2
    WM_pvs.inputs.max_iter = 100
    WM_pvs.inputs.max_diff = 0.001
    WM_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_pvs.name),
        WM_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', WM_pvs, 'input_image')
    wf.connect(WM_Orient, 'levelset', WM_pvs, 'surface_levelset')
    wf.connect(RmICirn, 'out_file', WM_pvs, 'loc_prior')

    # Extract Lesions : extract WM PVS
    extract_WM_pvs = Node(LesionExtraction(), name='ExtractPVSfromWM')
    extract_WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WM_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WM_pvs.inputs.csf_boundary_partial_vol_dist = 3.0
    extract_WM_pvs.inputs.lesion_clust_dist = 1.0
    extract_WM_pvs.inputs.prob_min_thresh = 0.1
    extract_WM_pvs.inputs.prob_max_thresh = 0.33
    extract_WM_pvs.inputs.small_lesion_size = 4.0
    extract_WM_pvs.inputs.save_data = True
    extract_WM_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WM_pvs.name), extract_WM_pvs,
               'output_dir')
    wf.connect(WM_pvs, 'propagation', extract_WM_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WM_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WM_pvs, 'levelset_boundary_image')
    wf.connect(RmICirn, 'out_file', extract_WM_pvs, 'location_prior_image')
    '''
    2nd branch
    '''

    # Image calculator : internal capsule witout ventricules
    ICwoVent = Node(ImageMaths(), name="ICWithoutVentricules")
    ICwoVent.inputs.op_string = "-sub"
    ICwoVent.inputs.out_file = "icwovent.nii.gz"
    wf.connect(DMRP, "internal_capsule_pv", ICwoVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", ICwoVent, "in_file2")

    # Image calculator : remove ventricles IC
    RmVentIC = Node(ImageMaths(), name="RmVentIC")
    RmVentIC.inputs.op_string = "-sub"
    RmVentIC.inputs.out_file = "RmVentIC.nii.gz"
    wf.connect(ICwoVent, "out_file", RmVentIC, "in_file")
    wf.connect(VentProba, "out_file", RmVentIC, "in_file2")

    # Intensity Range Normalization (4)
    getMaxRmVentIC = Node(ImageStats(op_string='-r'), name="getMaxRmVentIC")
    wf.connect(RmVentIC, 'out_file', getMaxRmVentIC, 'in_file')

    RmVentICirn = Node(AbcImageMaths(), name="IntensityNormalization6")
    RmVentICirn.inputs.op_string = "-div"
    RmVentICirn.inputs.out_file = "normRmVentIC.nii.gz"
    wf.connect(RmVentIC, 'out_file', RmVentICirn, 'in_file')
    wf.connect(getMaxRmVentIC, ('out_stat', getElementFromList, 1),
               RmVentICirn, "op_value")

    # Probability To Levelset : IC orientation
    IC_Orient = Node(ProbabilityToLevelset(), name='IC_Orientation')
    IC_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_Orient.name),
        IC_Orient, 'output_dir')
    wf.connect(RmVentICirn, 'out_file', IC_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in IC only
    IC_pvs = Node(RecursiveRidgeDiffusion(), name='RecursiveRidgeDiffusion2')
    IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_pvs.inputs.ridge_intensities = "bright"
    IC_pvs.inputs.ridge_filter = "1D"
    IC_pvs.inputs.orientation = "undefined"
    IC_pvs.inputs.ang_factor = 1.0
    IC_pvs.inputs.min_scale = 0
    IC_pvs.inputs.max_scale = 3
    IC_pvs.inputs.propagation_model = "diffusion"
    IC_pvs.inputs.diffusion_factor = 1.0
    IC_pvs.inputs.similarity_scale = 1.0
    IC_pvs.inputs.neighborhood_size = 2
    IC_pvs.inputs.max_iter = 100
    IC_pvs.inputs.max_diff = 0.001
    IC_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_pvs.name),
        IC_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', IC_pvs, 'input_image')
    wf.connect(IC_Orient, 'levelset', IC_pvs, 'surface_levelset')
    wf.connect(RmVentICirn, 'out_file', IC_pvs, 'loc_prior')

    # Extract Lesions : extract IC PVS
    extract_IC_pvs = Node(LesionExtraction(), name='ExtractPVSfromIC')
    extract_IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_IC_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_IC_pvs.inputs.csf_boundary_partial_vol_dist = 4.0
    extract_IC_pvs.inputs.lesion_clust_dist = 1.0
    extract_IC_pvs.inputs.prob_min_thresh = 0.25
    extract_IC_pvs.inputs.prob_max_thresh = 0.5
    extract_IC_pvs.inputs.small_lesion_size = 4.0
    extract_IC_pvs.inputs.save_data = True
    extract_IC_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_IC_pvs.name), extract_IC_pvs,
               'output_dir')
    wf.connect(IC_pvs, 'propagation', extract_IC_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_IC_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_IC_pvs, 'levelset_boundary_image')
    wf.connect(RmVentICirn, 'out_file', extract_IC_pvs, 'location_prior_image')
    '''
    3rd branch
    '''

    # Image calculator :
    RmInter = Node(ImageMaths(), name="RemoveInterVentricules")
    RmInter.inputs.op_string = "-sub"
    RmInter.inputs.out_file = "rminter.nii.gz"
    wf.connect(ERC2, 'region_pv', RmInter, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInter, "in_file2")

    # Image calculator :
    AddVentHorns = Node(ImageMaths(), name="AddVentHorns")
    AddVentHorns.inputs.op_string = "-add"
    AddVentHorns.inputs.out_file = "rminter.nii.gz"
    wf.connect(RmInter, 'out_file', AddVentHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddVentHorns, "in_file2")

    # Intensity Range Normalization (5)
    getMaxAddVentHorns = Node(ImageStats(op_string='-r'),
                              name="getMaxAddVentHorns")
    wf.connect(AddVentHorns, 'out_file', getMaxAddVentHorns, 'in_file')

    AddVentHornsirn = Node(AbcImageMaths(), name="IntensityNormalization7")
    AddVentHornsirn.inputs.op_string = "-div"
    AddVentHornsirn.inputs.out_file = "normAddVentHorns.nii.gz"
    wf.connect(AddVentHorns, 'out_file', AddVentHornsirn, 'in_file')
    wf.connect(getMaxAddVentHorns, ('out_stat', getElementFromList, 1),
               AddVentHornsirn, "op_value")

    # Extract Lesions : extract White Matter Hyperintensities
    extract_WMH = Node(LesionExtraction(), name='Extract_WMH')
    extract_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_WMH.inputs.lesion_clust_dist = 1.0
    extract_WMH.inputs.prob_min_thresh = 0.84
    extract_WMH.inputs.prob_max_thresh = 0.84
    extract_WMH.inputs.small_lesion_size = 4.0
    extract_WMH.inputs.save_data = True
    extract_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WMH.name), extract_WMH,
               'output_dir')
    wf.connect(ERC2, 'background_proba', extract_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_WMH2 = extract_WMH.clone(name='Extract_WMH2')
    # extract_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH2.name),extract_WMH2,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH2,'location_prior_image')
    #
    # extract_WMH3 = extract_WMH.clone(name='Extract_WMH3')
    # extract_WMH3.inputs.gm_boundary_partial_vol_dist = 3.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH3.name),extract_WMH3,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     FINDING SMALL WMHs     ####
    ####################################
    Small round WMHs near the cortex are often missed by the main algorithm, 
    so we're adding this one that takes care of them.
    '''

    # Recursive Ridge Diffusion : round WMH detection
    round_WMH = Node(RecursiveRidgeDiffusion(), name='round_WMH')
    round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    round_WMH.inputs.ridge_intensities = "bright"
    round_WMH.inputs.ridge_filter = "0D"
    round_WMH.inputs.orientation = "undefined"
    round_WMH.inputs.ang_factor = 1.0
    round_WMH.inputs.min_scale = 1
    round_WMH.inputs.max_scale = 4
    round_WMH.inputs.propagation_model = "none"
    round_WMH.inputs.diffusion_factor = 1.0
    round_WMH.inputs.similarity_scale = 0.1
    round_WMH.inputs.neighborhood_size = 4
    round_WMH.inputs.max_iter = 100
    round_WMH.inputs.max_diff = 0.001
    round_WMH.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, round_WMH.name),
        round_WMH, 'output_dir')
    wf.connect(ERC2, 'background_proba', round_WMH, 'input_image')
    wf.connect(AddVentHornsirn, 'out_file', round_WMH, 'loc_prior')

    # Extract Lesions : extract round WMH
    extract_round_WMH = Node(LesionExtraction(), name='Extract_round_WMH')
    extract_round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_round_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_round_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_round_WMH.inputs.lesion_clust_dist = 1.0
    extract_round_WMH.inputs.prob_min_thresh = 0.33
    extract_round_WMH.inputs.prob_max_thresh = 0.33
    extract_round_WMH.inputs.small_lesion_size = 6.0
    extract_round_WMH.inputs.save_data = True
    extract_round_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_round_WMH.name),
               extract_round_WMH, 'output_dir')
    wf.connect(round_WMH, 'ridge_pv', extract_round_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_round_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_round_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_round_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_round_WMH2 = extract_round_WMH.clone(name='Extract_round_WMH2')
    # extract_round_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH2.name),extract_round_WMH2,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH2,'location_prior_image')
    #
    # extract_round_WMH3 = extract_round_WMH.clone(name='Extract_round_WMH3')
    # extract_round_WMH3.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH3.name),extract_round_WMH3,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     COMBINE BOTH TYPES     ####
    ####################################
    Small round WMHs and regular WMH together before thresholding
    +
    PVS from white matter and internal capsule
    '''

    # Image calculator : WM + IC DVRS
    DVRS = Node(ImageMaths(), name="DVRS")
    DVRS.inputs.op_string = "-max"
    DVRS.inputs.out_file = "DVRS_map.nii.gz"
    wf.connect(extract_WM_pvs, 'lesion_score', DVRS, "in_file")
    wf.connect(extract_IC_pvs, "lesion_score", DVRS, "in_file2")

    # Image calculator : WMH + round
    WMH = Node(ImageMaths(), name="WMH")
    WMH.inputs.op_string = "-max"
    WMH.inputs.out_file = "WMH_map.nii.gz"
    wf.connect(extract_WMH, 'lesion_score', WMH, "in_file")
    wf.connect(extract_round_WMH, "lesion_score", WMH, "in_file2")

    #===========================================================================
    # WMH2 = Node(ImageMaths(), name="WMH2")
    # WMH2.inputs.op_string = "-max"
    # WMH2.inputs.out_file = "WMH2_map.nii.gz"
    # wf.connect(extract_WMH2,'lesion_score',WMH2,"in_file")
    # wf.connect(extract_round_WMH2,"lesion_score", WMH2, "in_file2")
    #
    # WMH3 = Node(ImageMaths(), name="WMH3")
    # WMH3.inputs.op_string = "-max"
    # WMH3.inputs.out_file = "WMH3_map.nii.gz"
    # wf.connect(extract_WMH3,'lesion_score',WMH3,"in_file")
    # wf.connect(extract_round_WMH3,"lesion_score", WMH3, "in_file2")
    #===========================================================================

    # Image calculator : multiply by boundnary partial volume
    WMH_mul = Node(ImageMaths(), name="WMH_mul")
    WMH_mul.inputs.op_string = "-mul"
    WMH_mul.inputs.out_file = "final_mask.nii.gz"
    wf.connect(WMH, "out_file", WMH_mul, "in_file")
    wf.connect(MGDM, "distance", WMH_mul, "in_file2")

    #===========================================================================
    # WMH2_mul = Node(ImageMaths(), name="WMH2_mul")
    # WMH2_mul.inputs.op_string = "-mul"
    # WMH2_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH2,"out_file", WMH2_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH2_mul, "in_file2")
    #
    # WMH3_mul = Node(ImageMaths(), name="WMH3_mul")
    # WMH3_mul.inputs.op_string = "-mul"
    # WMH3_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH3,"out_file", WMH3_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH3_mul, "in_file2")
    #===========================================================================
    '''
    ##########################################
    ####      SEGMENTATION THRESHOLD      ####
    ##########################################
    A threshold of 0.5 is very conservative, because the final lesion score is the product of two probabilities.
    This needs to be optimized to a value between 0.25 and 0.5 to balance false negatives 
    (dominant at 0.5) and false positives (dominant at low values).
    '''

    # Threshold binary mask :
    DVRS_mask = Node(Threshold(), name="DVRS_mask")
    DVRS_mask.inputs.thresh = 0.25
    DVRS_mask.inputs.direction = "below"
    wf.connect(DVRS, "out_file", DVRS_mask, "in_file")

    # Threshold binary mask : 025
    WMH1_025 = Node(Threshold(), name="WMH1_025")
    WMH1_025.inputs.thresh = 0.25
    WMH1_025.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_025, "in_file")

    #===========================================================================
    # WMH2_025 = Node(Threshold(), name="WMH2_025")
    # WMH2_025.inputs.thresh = 0.25
    # WMH2_025.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_025, "in_file")
    #
    # WMH3_025 = Node(Threshold(), name="WMH3_025")
    # WMH3_025.inputs.thresh = 0.25
    # WMH3_025.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_025, "in_file")
    #===========================================================================

    # Threshold binary mask : 050
    WMH1_050 = Node(Threshold(), name="WMH1_050")
    WMH1_050.inputs.thresh = 0.50
    WMH1_050.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_050, "in_file")

    #===========================================================================
    # WMH2_050 = Node(Threshold(), name="WMH2_050")
    # WMH2_050.inputs.thresh = 0.50
    # WMH2_050.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_050, "in_file")
    #
    # WMH3_050 = Node(Threshold(), name="WMH3_050")
    # WMH3_050.inputs.thresh = 0.50
    # WMH3_050.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_050, "in_file")
    #===========================================================================

    # Threshold binary mask : 075
    WMH1_075 = Node(Threshold(), name="WMH1_075")
    WMH1_075.inputs.thresh = 0.75
    WMH1_075.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_075, "in_file")

    #===========================================================================
    # WMH2_075 = Node(Threshold(), name="WMH2_075")
    # WMH2_075.inputs.thresh = 0.75
    # WMH2_075.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_075, "in_file")
    #
    # WMH3_075 = Node(Threshold(), name="WMH3_075")
    # WMH3_075.inputs.thresh = 0.75
    # WMH3_075.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_075, "in_file")
    #===========================================================================

    ## Outputs

    DVRS_Output = Node(IdentityInterface(fields=[
        'mask', 'region', 'lesion_size', 'lesion_proba', 'boundary', 'label',
        'score'
    ]),
                       name='DVRS_Output')
    wf.connect(DVRS_mask, 'out_file', DVRS_Output, 'mask')

    WMH_output = Node(IdentityInterface(fields=[
        'mask1025', 'mask1050', 'mask1075', 'mask2025', 'mask2050', 'mask2075',
        'mask3025', 'mask3050', 'mask3075'
    ]),
                      name='WMH_output')
    wf.connect(WMH1_025, 'out_file', WMH_output, 'mask1025')
    #wf.connect(WMH2_025,'out_file',WMH_output,'mask2025')
    #wf.connect(WMH3_025,'out_file',WMH_output,'mask3025')
    wf.connect(WMH1_050, 'out_file', WMH_output, 'mask1050')
    #wf.connect(WMH2_050,'out_file',WMH_output,'mask2050')
    #wf.connect(WMH3_050,'out_file',WMH_output,'mask3050')
    wf.connect(WMH1_075, 'out_file', WMH_output, 'mask1075')
    #wf.connect(WMH2_075,'out_file',WMH_output,'mask2070')
    #wf.connect(WMH3_075,'out_file',WMH_output,'mask3075')

    return wf
示例#42
0
subject_list = [
    '229', '230', '232', '233', '234', '235', '237', '242', '243', '244',
    '245', '252', '253', '255', '261', '262', '263', '264', '273', '274',
    '281', '282', '286', '287', '362', '363', '364', '365', '366', '236',
    '271', '272'
]

# subject_list = ['229', '230', '365', '274']

# subject_list = ['230', '365']

output_dir = 'Plus_Maze_output'
working_dir = 'Plus_Maze_workingdir'

Plus_Maze_workflow = Workflow(name='Plus_Maze_workflow')
Plus_Maze_workflow.base_dir = opj(experiment_dir, working_dir)

# -----------------------------------------------------------------------------------------------------
# In[3]:

# Infosource - a function free node to iterate over the list of subject names
infosource = Node(IdentityInterface(fields=['subject_id']), name="infosource")
infosource.iterables = [('subject_id', subject_list)]

# -----------------------------------------------------------------------------------------------------
# In[4]:

templates = {'plus_maze': 'Data/{subject_id}/plus_maze_{subject_id}.avi'}

selectfiles = Node(SelectFiles(templates, base_directory=experiment_dir),
示例#43
0
文件: example.py 项目: can-lab/IndNet
"""

__author__ = "Florian Krause"


import os
from glob import glob

from nipype.interfaces import io
from nipype.pipeline.engine import Node, Workflow

from indnet import create_indnet_workflow


# Set up a workflow
core_networks = Workflow(name='core_networks')
core_networks.base_dir = "/path/to/base_directory/"  # set working/output directory

# Create indnet node
indnet = create_indnet_workflow(hp_cutoff=100, smoothing=5, smm_threshold=0.66, binarise_threshold=0.5, melodic_seed=123456, aggr_aroma=False)
indnet.inputs.inputspec.anat_file = "/path/to/t1.nii"  # point to anatomical T1 scan (NiFTI file)
indnet.inputs.inputspec.func_file = "/path/to/rs.nii"  # point to functional resting state scan (NiFTI file)
TEMPL_DIR = os.path.abspath("./Functional_ROIs")  # point to FIND template directory
indnet.inputs.inputspec.templates = [
    os.path.join(TEMPL_DIR, 'anterior_Salience', "anterior_Salience.nii.gz"),
    os.path.join(TEMPL_DIR, 'Auditory', 'Auditory.nii.gz'),
    os.path.join(TEMPL_DIR, 'Basal_Ganglia', 'Basal_Ganglia.nii.gz'),
    os.path.join(TEMPL_DIR, 'dorsal_DMN', 'dDMN.nii.gz'),
    os.path.join(TEMPL_DIR, 'high_Visual', 'high_Visual.nii.gz'),
    os.path.join(TEMPL_DIR, 'Language', 'Language.nii.gz'),
    os.path.join(TEMPL_DIR, 'LECN', 'LECN.nii.gz'),
示例#44
0
def preprocessing_pipeline(cfg):
    import os

    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as freesurfer

    # LeiCA modules
    from utils import zip_and_save_running_scripts
    from preprocessing.rsfMRI_preprocessing import create_rsfMRI_preproc_pipeline
    from preprocessing.converter import create_converter_structural_pipeline, create_converter_functional_pipeline, \
        create_converter_diffusion_pipeline

    # INPUT PARAMETERS
    dicom_dir = cfg['dicom_dir']
    working_dir = cfg['working_dir']
    freesurfer_dir = cfg['freesurfer_dir']
    template_dir = cfg['template_dir']
    script_dir = cfg['script_dir']
    ds_dir = cfg['ds_dir']

    subject_id = cfg['subject_id']
    TR_list = cfg['TR_list']

    vols_to_drop = cfg['vols_to_drop']
    lp_cutoff_freq = cfg['lp_cutoff_freq']
    hp_cutoff_freq = cfg['hp_cutoff_freq']
    use_fs_brainmask = cfg['use_fs_brainmask']

    use_n_procs = cfg['use_n_procs']
    plugin_name = cfg['plugin_name']

    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir)

    wf = Workflow(name='LeiCA_resting')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'),
                      execution={
                          'stop_on_first_crash': True,
                          'remove_unnecessary_outputs': True,
                          'job_finished_timeout': 120
                      })
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(
        working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')

    #####################################
    # SET ITERATORS
    #####################################
    # GET SCAN TR_ID ITERATOR
    scan_infosource = Node(util.IdentityInterface(fields=['TR_id']),
                           name='scan_infosource')
    scan_infosource.iterables = ('TR_id', TR_list)

    #####################################
    # FETCH MRI DATA
    #####################################
    # GET LATERAL VENTRICLE MASK
    templates_atlases = {
        'lat_ventricle_mask_MNI':
        'cpac_image_resources/HarvardOxford-lateral-ventricles-thr25-2mm.nii.gz'
    }
    selectfiles_templates = Node(nio.SelectFiles(templates_atlases,
                                                 base_directory=template_dir),
                                 name="selectfiles_templates")

    if not True:  # releases 1-6 with 01... format subject_id
        # GET FUNCTIONAL DATA
        templates_funct = {
            'funct_dicom': '{subject_id}/session_1/RfMRI_*_{TR_id}'
        }

        selectfiles_funct = Node(nio.SelectFiles(templates_funct,
                                                 base_directory=dicom_dir),
                                 name="selectfiles_funct")
        selectfiles_funct.inputs.subject_id = subject_id

        wf.connect(scan_infosource, 'TR_id', selectfiles_funct, 'TR_id')

        # GET STRUCTURAL DATA
        templates_struct = {
            't1w_dicom': '{subject_id}/anat',
            'dMRI_dicom': '{subject_id}/session_1/DTI_mx_137/*.dcm'
        }  # *.dcm for dMRI as Dcm2nii requires this

        selectfiles_struct = Node(nio.SelectFiles(templates_struct,
                                                  base_directory=dicom_dir),
                                  name="selectfiles_struct")
        selectfiles_struct.inputs.subject_id = subject_id

    else:  #startin with release 6 new folder structure
        templates_funct = {
            'funct_dicom': '*/{subject_id}/*_V2/REST_{TR_id}*/*.dcm'
        }

        selectfiles_funct = Node(nio.SelectFiles(templates_funct,
                                                 base_directory=dicom_dir),
                                 name="selectfiles_funct")
        selectfiles_funct.inputs.subject_id = subject_id

        wf.connect(scan_infosource, 'TR_id', selectfiles_funct, 'TR_id')

        # GET STRUCTURAL DATA
        templates_struct = {
            't1w_dicom': '*/{subject_id}/*_V2/MPRAGE_SIEMENS_DEFACED*/*.dcm',
            'dMRI_dicom': '*/{subject_id}/*_V2/DIFF_137_AP*/*.dcm'
        }  # *.dcm for dMRI as Dcm2nii requires this

        selectfiles_struct = Node(nio.SelectFiles(templates_struct,
                                                  base_directory=dicom_dir),
                                  name="selectfiles_struct")
        selectfiles_struct.inputs.subject_id = subject_id

    #####################################
    # COPY RUNNING SCRIPTS
    #####################################
    copy_scripts = Node(util.Function(input_names=['subject_id', 'script_dir'],
                                      output_names=['zip_file'],
                                      function=zip_and_save_running_scripts),
                        name='copy_scripts')
    copy_scripts.inputs.script_dir = script_dir
    copy_scripts.inputs.subject_id = subject_id
    wf.connect(copy_scripts, 'zip_file', ds, 'scripts')

    #####################################
    # CONVERT DICOMs
    #####################################
    # CONVERT STRUCT 2 NIFTI
    converter_struct = create_converter_structural_pipeline(
        working_dir, ds_dir, 'converter_struct')
    wf.connect(selectfiles_struct, 't1w_dicom', converter_struct,
               'inputnode.t1w_dicom')

    # CONVERT dMRI 2 NIFTI
    converter_dMRI = create_converter_diffusion_pipeline(
        working_dir, ds_dir, 'converter_dMRI')
    wf.connect(selectfiles_struct, 'dMRI_dicom', converter_dMRI,
               'inputnode.dMRI_dicom')

    # CONVERT FUNCT 2 NIFTI
    converter_funct = create_converter_functional_pipeline(
        working_dir, ds_dir, 'converter_funct')
    wf.connect(selectfiles_funct, 'funct_dicom', converter_funct,
               'inputnode.epi_dicom')
    wf.connect(scan_infosource, 'TR_id', converter_funct,
               'inputnode.out_format')

    #####################################
    # START RSFMRI PREPROCESSING ANALYSIS
    #####################################
    # rsfMRI PREPROCESSING
    rsfMRI_preproc = create_rsfMRI_preproc_pipeline(working_dir,
                                                    freesurfer_dir, ds_dir,
                                                    use_fs_brainmask,
                                                    'rsfMRI_preprocessing')
    rsfMRI_preproc.inputs.inputnode.vols_to_drop = vols_to_drop
    rsfMRI_preproc.inputs.inputnode.lp_cutoff_freq = lp_cutoff_freq
    rsfMRI_preproc.inputs.inputnode.hp_cutoff_freq = hp_cutoff_freq
    rsfMRI_preproc.inputs.inputnode.subject_id = subject_id

    wf.connect(converter_struct, 'outputnode.t1w', rsfMRI_preproc,
               'inputnode.t1w')
    wf.connect(converter_funct, 'outputnode.epi', rsfMRI_preproc,
               'inputnode.epi')
    wf.connect(converter_funct, 'outputnode.TR_ms', rsfMRI_preproc,
               'inputnode.TR_ms')
    wf.connect(selectfiles_templates, 'lat_ventricle_mask_MNI', rsfMRI_preproc,
               'inputnode.lat_ventricle_mask_MNI')

    #####################################
    # RUN WF
    #####################################
    wf.write_graph(dotfilename=wf.name, graph2use='colored',
                   format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name)
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
示例#45
0
def make_w_warp():

    n_in = Node(IdentityInterface(fields=[
        'func',
        'motion_parameters',
        'fmap',
        ]), name='input')

    n_out = Node(IdentityInterface(fields=[
        'warping',
        ]), name='output')

    n_allineate = Node(interface=Allineate(), name='allineate')
    n_allineate.inputs.one_pass = True
    n_allineate.inputs.cost = 'hellinger'
    n_allineate.inputs.args = '-master BASE'
    n_allineate.inputs.warp_type = 'shift_rotate'
    n_allineate.inputs.outputtype = 'NIFTI'

    n_qwarp = Node(interface=Qwarp(), name='qwarp')
    n_qwarp.inputs.outputtype = 'NIFTI'
    n_qwarp.inputs.plusminus = True

    n_merge = Node(
        interface=Function(
            input_names=[
                'warp0',
                'warp1'
                ],
            output_names=[
                'nwarp',
            ],
            function=merge_warping,
            ),
        name='merge_warp')

    w = Workflow('warping')

    w.connect(n_in, 'fmap', n_allineate, 'in_file')
    w.connect(n_in, 'func', n_allineate, 'reference')

    w.connect(n_allineate, 'out_file', n_qwarp, 'in_file')
    w.connect(n_in, 'func', n_qwarp, 'base_file')

    w.connect(n_qwarp, 'base_warp', n_merge, 'warp0')
    w.connect(n_in, 'motion_parameters', n_merge, 'warp1')

    w.connect(n_merge, 'nwarp', n_out, 'warping')

    return w
示例#46
0
def create_hc_connec(subject, working_dir, data_dir, freesurfer_dir, out_dir,
                     epi_resolution, standard_brain, standard_brain_resampled,
                     standard_brain_mask, standard_brain_mask_resampled,
                     fwhm_smoothing, side, TR, highpass, lowpass):
    # set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    # main workflow
    hc_connec = Workflow(name='hc_connec_thr099_scrubbed')
    hc_connec.base_dir = working_dir
    hc_connec.config['execution'][
        'crashdump_dir'] = hc_connec.base_dir + "/crash_files"

    # select files
    templates = {
        #'rest_head': 'resting_state/denoise/rest_preprocessed_nativespace.nii.gz', #denoised and bandpass-filtered native space (2x2x2mm) image
        'rest2anat_scrubbed':
        'preprocessing/preprocessed/{subject}/scrubbed_interpolated/rest2anat_denoised_scrubbed_intep.nii.gz',  #denoised, scrubbed, interp, bp-filtered native space
        'ants_affine':
        'preprocessing/preprocessed/{subject}/structural/transforms2mni/transform0GenericAffine.mat',
        'ants_warp':
        'preprocessing/preprocessed/{subject}/structural/transforms2mni/transform1Warp.nii.gz',
        'scrubvols': 'quality_reports/poldrack_reports/{subject}/scrubvols.txt'
    }

    selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")
    selectfiles.inputs.subject = subject

    denoise = create_denoise_pipeline()
    denoise.inputs.inputnode.highpass_sigma = 1. / (2 * TR * highpass)
    denoise.inputs.inputnode.lowpass_sigma = 1. / (2 * TR * lowpass)
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4
    denoise.inputs.inputnode.tr = TR

    #drop scrubbed volumes
    scrub_volumes = Node(util.Function(
        input_names=['scrubvols', 'in_file', 'working_dir'],
        output_names=['filename_scrubbed_img'],
        function=scrub_timepoints),
                         name='scrub_volumes')
    scrub_volumes.inputs.working_dir = working_dir

    #get T1 brainmask
    get_T1_brainmask = create_get_T1_brainmask()
    get_T1_brainmask.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    get_T1_brainmask.inputs.inputnode.fs_subject_id = subject

    #workflow to extract HC and transform into individual space
    transform_hc = create_transform_hc()
    transform_hc.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    transform_hc.inputs.inputnode.fs_subject_id = subject
    transform_hc.inputs.inputnode.resolution = 2
    transform_hc.inputs.inputnode.working_dir = working_dir

    #workflow to extract timeseries and correlate
    corr_ts = create_corr_ts()

    #workflow to tranform correlations to MNI space
    ants_registration = create_ants_registration_pipeline()
    ants_registration.inputs.inputnode.ref = standard_brain  #_resampled: 2x2x2mm brain for RSV

    #
    smoothing = create_smoothing_pipeline()
    smoothing.inputs.inputnode.fwhm = fwhm_smoothing
    #sink to store files
    sink = Node(
        nio.DataSink(parameterization=True, base_directory=out_dir),
        #   substitutions=[('_binarize', 'binarize'), -> don't really seem to work and I don't know why.
        #                   #('_binarize', 'anterior_hc'),
        #                   ('_ants_reg1', 'posterior_hc'),
        #                   #('_ants_reg', 'anterior_hc'),
        #                   ('_smooth1', 'posterior_hc'),
        #                   ('_smooth0', 'anterior_hc'),
        #                   ('corr_Z_trans', 'corr_Z_MNI')],
        name='sink')

    sink.inputs.substitutions = [('_binarize0', 'posterior_hc'),
                                 ('_binarize1', 'anterior_hc'),
                                 ('_ants_reg0', 'posterior_hc'),
                                 ('_ants_reg1', 'anterior_hc'),
                                 ('_smooth0', 'posterior_hc'),
                                 ('_smooth1', 'anterior_hc'),
                                 ('_apply_FisherZ0', 'posterior_hc'),
                                 ('_apply_FisherZ1', 'anterior_hc')]

    # connections
    hc_connec.connect([
        #bandpass-filtering implemented after scrubbing and replacement!
        (selectfiles, scrub_volumes, [('scrubvols', 'scrubvols')]),
        (get_T1_brainmask, transform_hc, [('outputnode.T1',
                                           'inputnode.anat_head')]),
        (transform_hc, corr_ts, [('outputnode.hc_transformed_bin',
                                  'inputnode.hc_mask')]),
        (selectfiles, denoise, [('rest2anat_scrubbed',
                                 'inputnode.epi_denoised')]),
        (denoise, scrub_volumes, [('outputnode.normalized_file', 'in_file')]),
        (scrub_volumes, corr_ts, [('filename_scrubbed_img', 'inputnode.ts')]),
        (corr_ts, sink,
         [('outputnode.corrmap_z', 'hc_connectivity_thr099.scrubbed.' + side +
           '.corr.nativespace.@transformed')]),
        (corr_ts, ants_registration, [('outputnode.corrmap_z',
                                       'inputnode.corr_Z')]),
        (selectfiles, ants_registration, [('ants_affine',
                                           'inputnode.ants_affine')]),
        (selectfiles, ants_registration, [('ants_warp', 'inputnode.ants_warp')
                                          ]),
        (ants_registration, sink,
         [('outputnode.ants_reg_corr_Z',
           'hc_connectivity_thr099.scrubbed.' + side + '.corr.ants')]),
        (ants_registration, smoothing, [('outputnode.ants_reg_corr_Z',
                                         'inputnode.ts_transformed')]),
        (smoothing, sink,
         [('outputnode.ts_smoothed',
           'hc_connectivity_thr099.scrubbed.' + side + '.corr.smoothed')]),
    ])

    hc_connec.run(
    )  #it can't run in multiproc as in one moment one file is hardcoded and saved to the disk which is
示例#47
0
def make_w_masking():

    n_in = Node(IdentityInterface(fields=[
        'func',
        'fmap',  # mean
        ]), name='input')

    n_out = Node(IdentityInterface(fields=[
        'func',
        'fmap',  # mean
        ]), name='output')

    n_mask_func = Node(interface=Automask(), name='mask_func')
    n_mask_func.inputs.clfrac = 0.4
    n_mask_func.inputs.dilate = 4
    n_mask_func.inputs.args = '-nbhrs 15'
    n_mask_func.inputs.outputtype = 'NIFTI'

    n_mask_fmap = n_mask_func.clone('mask_fmap')

    n_mul = Node(interface=BinaryMaths(), name='mul')
    n_mul.inputs.operation = 'mul'

    n_masking = Node(interface=BinaryMaths(), name='masking')
    n_masking.inputs.operation = 'mul'

    n_masking_fmap = Node(interface=BinaryMaths(), name='masking_fmap')
    n_masking_fmap.inputs.operation = 'mul'

    w = Workflow('masking')

    w.connect(n_in, 'func', n_mask_func, 'in_file')
    w.connect(n_in, 'fmap', n_mask_fmap, 'in_file')
    w.connect(n_mask_fmap, 'out_file', n_mul, 'in_file')
    w.connect(n_mask_func, 'out_file', n_mul, 'operand_file')
    w.connect(n_in, 'func', n_masking, 'in_file')
    w.connect(n_mul, 'out_file', n_masking, 'operand_file')
    w.connect(n_masking, 'out_file', n_out, 'func')

    w.connect(n_in, 'fmap', n_masking_fmap, 'in_file')
    w.connect(n_mul, 'out_file', n_masking_fmap, 'operand_file')

    w.connect(n_masking_fmap, 'out_file', n_out, 'fmap')

    return w
def run_workflow():
    raise Exception("This code was not tested after refactoring to be used by "
                    "preprocessing_workflow.py.")
    config.enable_debug_mode()

    # ------------------ Specify variables
    ds_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

    data_dir = ds_root
    output_dir = 'func_unwarp'
    working_dir = 'workingdirs/func_unwarp'

    subject_list = ['eddy']
    session_list = ['20170511']

    # ------------------ Input Files
    infosource = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
    ]),
                      name="infosource")

    infosource.iterables = [
        ('session_id', session_list),
        ('subject_id', subject_list),
    ]
    # SelectFiles
    templates = {
        'funcs':
        'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/func/'
        'sub-{subject_id}_ses-{session_id}_'
        'task-*_bold_res-1x1x1_preproc.nii.gz',

        # Use *-roi for testing
        #    'task-curvetracing_run-01_bold_res-1x1x1_preproc-roi.nii.gz',
        'fmap_phasediff':
        'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
        'sub-{subject_id}_ses-{session_id}_phasediff_res-1x1x1_preproc'
        '.nii.gz',
        'fmap_magnitude':
        'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
        'sub-{subject_id}_ses-{session_id}_magnitude1_res-1x1x1_preproc'
        '.nii.gz',
        'fmap_mask':
        'transformed-manual-fmap-mask/sub-{subject_id}/ses-{session_id}/fmap/'
        'sub-{subject_id}_ses-{session_id}_'
        'magnitude1_res-1x1x1_preproc.nii.gz',
    }
    inputfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                      name="input_files")

    # ------------------ Output Files
    # Datasink
    outputfiles = Node(nio.DataSink(base_directory=ds_root,
                                    container=output_dir,
                                    parameterization=True),
                       name="output_files")

    # Use the following DataSink output substitutions
    outputfiles.inputs.substitutions = [
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('/undistorted/', '/'),
        ('/undistorted_masks/', '/'),
        ('_unwarped.nii.gz', '.nii.gz'),
        ('phasediff_radians_unwrapped_mask', '_rec-unwrapped_phasediff'),
    ]
    outputfiles.inputs.regexp_substitutions = [
        (r'_fugue[0-9]+/', r'func/'), (r'_undistort_masks[0-9]+/', r'func/'),
        (r'_ses-([a-zA-Z0-9]*)_sub-([a-zA-Z0-9]*)', r'sub-\2/ses-\1')
    ]

    # -------------------------------------------- Create Pipeline

    workflow = Workflow(name='undistort',
                        base_dir=os.path.join(ds_root, working_dir))

    workflow.connect([(infosource, inputfiles, [('subject_id', 'subject_id'),
                                                ('session_id', 'session_id')])
                      ])

    undistort_flow = create_workflow()

    # Connect sub-workflow inputs
    workflow.connect([(inputfiles, undistort_flow, [
        ('subject_id', 'in.subject_id'),
        ('session_id', 'in.session_id'),
        ('fmap_phasediff', 'in.fmap_phasediff'),
        ('fmap_magnitude', 'in.fmap_magnitude'),
        ('fmap_mask', 'in.fmap_mask'),
    ]), (undistort_flow, outputfiles, [
        ('out.unwarped_file', 'undistorted'),
    ])])

    workflow.connect(undistort_flow, 'unwarped_file', outputfiles,
                     'undistorted')
    workflow.connect(undistort_masks, 'unwarped_file', outputfiles,
                     'undistorted_masks')

    workflow.stop_on_first_crash = True
    workflow.keep_inputs = True
    workflow.remove_unnecessary_outputs = False
    workflow.write_graph()
    workflow.run()
def create_transform_pipeline(name='transfrom_timeseries'):

    # set fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # initiate workflow
    transform_ts = Workflow(name='transform_timeseries')

    # inputnode
    inputnode = Node(util.IdentityInterface(
        fields=['orig_ts', 'anat_head', 'mat_moco', 'fullwarp', 'resolution']),
                     name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(
        fields=['trans_ts', 'trans_ts_mean', 'resamp_brain']),
                      name='outputnode')

    #resample anatomy
    resample = Node(fsl.FLIRT(datatype='float',
                              out_file='T1_resampled.nii.gz'),
                    name='resample_anat')
    transform_ts.connect([
        (inputnode, resample, [('anat_head', 'in_file'),
                               ('anat_head', 'reference'),
                               ('resolution', 'apply_isoxfm')]),
        (resample, outputnode, [('out_file', 'resamp_brain')])
    ])

    # split timeseries in single volumes
    split = Node(fsl.Split(dimension='t', out_base_name='timeseries'),
                 name='split')

    transform_ts.connect([(inputnode, split, [('orig_ts', 'in_file')])])

    # applymoco premat and fullwarpfield
    applywarp = MapNode(fsl.ApplyWarp(interp='spline',
                                      relwarp=True,
                                      out_file='rest2anat.nii.gz',
                                      datatype='float'),
                        iterfield=['in_file', 'premat'],
                        name='applywarp')

    transform_ts.connect([(split, applywarp, [('out_files', 'in_file')]),
                          (inputnode, applywarp, [('mat_moco', 'premat'),
                                                  ('fullwarp', 'field_file')]),
                          (resample, applywarp, [('out_file', 'ref_file')])])

    # re-concatenate volumes
    merge = Node(fsl.Merge(dimension='t', merged_file='rest2anat.nii.gz'),
                 name='merge')
    transform_ts.connect([(applywarp, merge, [('out_file', 'in_files')]),
                          (merge, outputnode, [('merged_file', 'trans_ts')])])

    # calculate new mean
    tmean = Node(fsl.maths.MeanImage(dimension='T',
                                     out_file='rest_mean2anat_lowres.nii.gz'),
                 name='tmean')

    transform_ts.connect([(merge, tmean, [('merged_file', 'in_file')]),
                          (tmean, outputnode, [('out_file', 'trans_ts_mean')])
                          ])

    return transform_ts