예제 #1
0
    def __init__(self, datasink, TR, num_vol):
        # specify input and output nodes
        self.datasink = datasink
        self.TR = TR
        self.num_vol = num_vol

        # specify nodes
        # SpecifyModel - Generates SPM-specific Model
        self.modelspec = pe.Node(interface=model.SpecifySPMModel(),
                                 name='model_specification')
        self.modelspec.inputs.input_units = 'secs'
        self.modelspec.inputs.output_units = 'secs'
        self.modelspec.inputs.time_repetition = self.TR
        self.modelspec.inputs.high_pass_filter_cutoff = 128
        subjectinfo = [
            Bunch(conditions=['None'],
                  onsets=[list(range(self.num_vol))],
                  durations=[[0.5]])
        ]
        self.modelspec.inputs.subject_info = subjectinfo

        # Level1Design - Generates an SPM design matrix
        self.level1design = pe.Node(interface=spm.Level1Design(),
                                    name='first_level_design')
        self.level1design.inputs.bases = {'hrf': {'derivs': [1, 1]}}
        self.level1design.inputs.interscan_interval = self.TR
        self.level1design.inputs.timing_units = 'secs'

        # EstimateModel - estimate the parameters of the model
        # method can be 'Classical', 'Bayesian' or 'Bayesian2'
        self.level1estimate = pe.Node(interface=spm.EstimateModel(),
                                      name="first_level_estimate")
        self.level1estimate.inputs.estimation_method = {'Classical': 1}

        self.threshold = pe.Node(interface=spm.Threshold(), name="threshold")
        self.threshold.inputs.contrast_index = 1

        # EstimateContrast - estimates contrasts
        self.contrast_estimate = pe.Node(interface=spm.EstimateContrast(),
                                         name="contrast_estimate")
        cont1 = ('active > rest', 'T', ['None'], [1])
        contrasts = [cont1]
        self.contrast_estimate.inputs.contrasts = contrasts

        # specify workflow instance
        self.workflow = pe.Workflow(name='first_level_analysis_workflow')

        # connect nodes
        self.workflow.connect([
            (self.modelspec, self.level1design, [('session_info',
                                                  'session_info')]),
            (self.level1design, self.level1estimate, [('spm_mat_file',
                                                       'spm_mat_file')]),
            (self.level1estimate, self.contrast_estimate,
             [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'),
              ('residual_image', 'residual_image')]),
            # (self.contrast_estimate, self.threshold, [('spm_mat_file', 'spm_mat_file'), ('spmT_images', 'stat_image')]),
            (self.contrast_estimate, self.datasink,
             [('con_images', 'contrast_img'), ('spmT_images', 'contrast_T')])
        ])
예제 #2
0
def build_pipeline(scans, vectors, names, contrasts, destdir,
                   explicitmask, analysis_name='analysis',
                   verbose=True):
    ''' Build a Nipype pipeline for Multiple Regression analysis over a given
    type of parametric maps (param), using data from an Excel sheet as
    regressors (columns in 'names') and a given explicit mask.

    The whole analysis will be performed in the directory 'destdir'.'''

    print(['Analysis name:', analysis_name])

    centering = [1] * len(names)

    if verbose:
        print(['Scans (%s):' % len(scans), scans])
        print(['Vectors (%s)' % len(vectors)])
        print(['Names (%s):' % len(names), names])
        print(['Contrasts (%s):' % len(contrasts), contrasts])
        
    covariates = []
    for name, v, c in zip(names, vectors, centering):
        covariates.append(dict(name=name, centering=c, vector=v))

    model = spm.model.MultipleRegressionDesign(in_files=scans,
                                               user_covariates=covariates,
                                               explicit_mask_file=explicitmask,
                                               use_implicit_threshold=True)

    est = spm.EstimateModel(estimation_method={'Classical': 1})
    con = spm.EstimateContrast(contrasts=contrasts,
                               group_contrast=True)

    # Creating Workflow
    a = pe.Workflow(name=analysis_name)
    a.base_dir = destdir

    n1 = pe.Node(model, name='modeldesign')
    n2 = pe.Node(est, name='estimatemodel')
    n3 = pe.Node(con, name='estimatecontrasts')

    a.connect([(n1, n2, [('spm_mat_file', 'spm_mat_file')]),
               (n2, n3, [('spm_mat_file', 'spm_mat_file'),
                         ('beta_images', 'beta_images'),
                         ('residual_image', 'residual_image')]), ])
    a.config['execution']['stop_on_first_rerun'] = True
    a.config['execution']['remove_unnecessary_outputs'] = False
    return a
예제 #3
0
        conditions=['Task-Odd', 'Task-Even'],
        onsets=[list(range(15, 240, 60)),
                list(range(45, 240, 60))],
        durations=[[15], [15]])
] * 4

level1design = pe.Node(interface=spm.Level1Design(), name="level1design")
level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}}
level1design.inputs.timing_units = 'secs'
level1design.inputs.interscan_interval = specify_model.inputs.time_repetition

level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate")
level1estimate.inputs.estimation_method = {'Classical': 1}

contrastestimate = pe.Node(
    interface=spm.EstimateContrast(), name="contrastestimate")
cont1 = ('Task>Baseline', 'T', ['Task-Odd', 'Task-Even'], [0.5, 0.5])
cont2 = ('Task-Odd>Task-Even', 'T', ['Task-Odd', 'Task-Even'], [1, -1])
contrastestimate.inputs.contrasts = [cont1, cont2]

modelling = pe.Workflow(name="modelling")
modelling.connect(specify_model, 'session_info', level1design, 'session_info')
modelling.connect(level1design, 'spm_mat_file', level1estimate, 'spm_mat_file')
modelling.connect(level1estimate, 'spm_mat_file', contrastestimate,
                  'spm_mat_file')
modelling.connect(level1estimate, 'beta_images', contrastestimate,
                  'beta_images')
modelling.connect(level1estimate, 'residual_image', contrastestimate,
                  'residual_image')
"""Having preprocessing and modelling workflows we need to connect them
together, add data grabbing facility and save the results. For this we will
예제 #4
0
import os

from nipype.interfaces.matlab import MatlabCommand
MatlabCommand.set_default_paths('/home/rj299/project/MATLAB/toolbox/spm12/'
                                )  # set default SPM12 path in my computer.

#%% Gourp analysis - based on SPM - should consider the fsl Randomize option (other script)
# OneSampleTTestDesign - creates one sample T-Test Design
onesamplettestdes = Node(spm.OneSampleTTestDesign(), name="onesampttestdes")

# EstimateModel - estimates the model
level2estimate = Node(spm.EstimateModel(estimation_method={'Classical': 1}),
                      name="level2estimate")

# EstimateContrast - estimates group contrast
level2conestimate = Node(spm.EstimateContrast(group_contrast=True),
                         name="level2conestimate")
cont1 = ['Group', 'T', ['mean'], [1]]
level2conestimate.inputs.contrasts = [cont1]

# Which contrasts to use for the 2nd-level analysis
contrast_list = [
    'con_0001', 'con_0002', 'con_0003', 'con_0004', 'con_0005', 'con_0006',
    'con_0007', 'con_0008', 'con_0009', 'con_0010', 'con_0011', 'con_0012',
    'con_0013', 'con_0014'
]

subject_list = [
    2073, 2550, 2582, 2583, 2584, 2585, 2588, 2592, 2593, 2594, 2596, 2597,
    2598, 2599, 2600, 2624, 2650, 2651, 2652, 2653, 2654, 2655, 2656, 2657,
    2658, 2659, 2660, 2661, 2662, 2663, 2664, 2665, 2666
wf = Workflow(name="l1run", base_dir="/media/Data/work")
wf.connect([
    (infosource, datasource, [('subject_id', 'subject_id')]),
    (datasource, gunzip, [('func', 'in_file')]),
    (gunzip, modelspec, [('out_file', 'functional_runs')]),
    (infosource, getsubjectinfo, [('subject_id', 'subject_id')]),
    (getsubjectinfo, modelspec, [('subject_info', 'subject_info')]),
])
wf.connect([(modelspec, level1design, [("session_info", "session_info")])])

##########################################################################3

level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate")
level1estimate.inputs.estimation_method = {'Classical': 1}

contrastestimate = pe.Node(interface=spm.EstimateContrast(),
                           name="contrastestimate")
contrastestimate.inputs.contrasts = contrasts
contrastestimate.overwrite = True
contrastestimate.config = {'execution': {'remove_unnecessary_outputs': False}}

########################################################################
#%% Connecting level1 estimation and contrasts
wf.connect([
    (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
    (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'),
                                        ('beta_images', 'beta_images'),
                                        ('residual_image', 'residual_image')]),
])

###############################################################
예제 #6
0
def create_first_SPM(name='modelfit'):
    """First level task-fMRI modelling workflow
    
    Parameters
    ----------
    name : name of workflow. Default = 'modelfit'
    
    Inputs
    ------
    inputspec.session_info :
    inputspec.interscan_interval :
    inputspec.contrasts :
    inputspec.functional_data :
    inputspec.bases :
    inputspec.model_serial_correlations :
    
    Outputs
    -------
    outputspec.copes :
    outputspec.varcopes :
    outputspec.dof_file :
    outputspec.pfiles :
    outputspec.parameter_estimates :
    outputspec.zstats :
    outputspec.tstats :
    outputspec.design_image :
    outputspec.design_file :
    outputspec.design_cov :
    
    Returns
    -------
    workflow : first-level workflow
    """
    import nipype.interfaces.spm as spm  # fsl
    import nipype.interfaces.freesurfer as fs
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    modelfit = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'estimation_method',
        'bases', 'mask', 'model_serial_correlations'
    ]),
                        name='inputspec')

    level1design = pe.Node(interface=spm.Level1Design(timing_units='secs'),
                           name="create_level1_design")

    modelestimate = pe.Node(interface=spm.EstimateModel(),
                            name='estimate_model')

    conestimate = pe.Node(interface=spm.EstimateContrast(),
                          name='estimate_contrast')

    convert = pe.MapNode(interface=fs.MRIConvert(out_type='nii'),
                         name='convert',
                         iterfield=['in_file'])

    outputspec = pe.Node(util.IdentityInterface(fields=[
        'RPVimage', 'beta_images', 'mask_image', 'residual_image',
        'con_images', 'ess_images', 'spmF_images', 'spmT_images',
        'spm_mat_file'
    ]),
                         name='outputspec')

    # Utility function

    pop_lambda = lambda x: x[0]

    # Setup the connections

    modelfit.connect([
        (inputspec, level1design,
         [('interscan_interval', 'interscan_interval'),
          ('session_info', 'session_info'), ('bases', 'bases'),
          ('mask', 'mask_image'),
          ('model_serial_correlations', 'model_serial_correlations')]),
        (inputspec, conestimate, [('contrasts', 'contrasts')]),
        (inputspec, modelestimate, [('estimation_method', 'estimation_method')
                                    ]),
        (level1design, modelestimate, [('spm_mat_file', 'spm_mat_file')]),
        (modelestimate, conestimate, [('beta_images', 'beta_images'),
                                      ('residual_image', 'residual_image'),
                                      ('spm_mat_file', 'spm_mat_file')]),
        (modelestimate, outputspec, [('RPVimage', 'RPVimage'),
                                     ('beta_images', 'beta_images'),
                                     ('mask_image', 'mask_image'),
                                     ('residual_image', 'residual_image')]),
        (conestimate, convert, [('con_images', 'in_file')]),
        (convert, outputspec, [('out_file', 'con_images')]),
        (conestimate, outputspec, [('ess_images', 'ess_images'),
                                   ('spmF_images', 'spmF_images'),
                                   ('spmT_images', 'spmT_images'),
                                   ('spm_mat_file', 'spm_mat_file')])
    ])

    return modelfit
  

"""
Define nodes
"""

#Node: OneSampleTTest - to perform an one sample t-test analysis on the volume
oneSampleTTestVolDes = pe.Node(interface=spm.OneSampleTTestDesign(),
                               name="oneSampleTTestVolDes")

#Node: EstimateModel - to estimate the model
l2estimate = pe.Node(interface=spm.EstimateModel(), name="l2estimate")
l2estimate.inputs.estimation_method = {'Classical' : 1}

#Node: EstimateContrast - to estimate the contrast (in this example just one)
l2conestimate = pe.Node(interface = spm.EstimateContrast(), name="l2conestimate")
cont1 = ('Group','T', ['mean'],[1])
l2conestimate.inputs.contrasts = [cont1]
l2conestimate.inputs.group_contrast = True

#Node: Threshold - to threshold the estimated contrast
l2threshold = pe.Node(interface = spm.Threshold(), name="l2threshold")
l2threshold.inputs.contrast_index = 1
l2threshold.inputs.use_fwe_correction = False
l2threshold.inputs.use_topo_fdr = True
l2threshold.inputs.extent_threshold = 1
#voxel threshold
l2threshold.inputs.extent_fdr_p_threshold = 0.05
#cluster threshold (value is in -ln()): 1.301 = 0.05; 2 = 0.01; 3 = 0.001,
l2threshold.inputs.height_threshold = 3
예제 #8
0
        (smooth, modelspec, [('smoothed_files', 'functional_runs')]),   
        (runinfo, modelspec, [('info', 'subject_info'), ('realign_file', 'realignment_parameters')]),
        
        ])
wfSPM.connect([(modelspec, level1design, [("session_info", "session_info")])])




##########################################################################3

level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate")
level1estimate.inputs.estimation_method = {'Classical': 1}

contrastestimate = pe.Node(
    interface=spm.EstimateContrast(), name="contrastestimate")
#contrastestimate.inputs.contrasts = contrasts
contrastestimate.overwrite = True
contrastestimate.config = {'execution': {'remove_unnecessary_outputs': False}}
contrastestimate.inputs.contrasts = contrasts


########################################################################
#%% Connecting level1 estimation and contrasts
wfSPM.connect([
         (level1design, level1estimate, [('spm_mat_file','spm_mat_file')]),
         (level1estimate, contrastestimate,
            [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'),
            ('residual_image', 'residual_image')]),
    ])
예제 #9
0
def build_pipeline(model_def):

    # create pointers to needed values from
    # the model dictionary
    # TODO - this could be refactored
    TR = model_def['TR']
    subject_list = model_def['subject_list']
    JSON_MODEL_FILE = model_def['model_path']

    working_dir = model_def['working_dir']
    output_dir = model_def['output_dir']

    SUBJ_DIR = model_def['SUBJ_DIR']
    PROJECT_DIR = model_def['PROJECT_DIR']
    TASK_NAME = model_def['TaskName']
    RUNS = model_def['Runs']
    MODEL_NAME = model_def['ModelName']
    PROJECT_NAME = model_def['ProjectID']
    BASE_DIR = model_def['BaseDirectory']

    SERIAL_CORRELATIONS = "AR(1)" if not model_def.get(
        'SerialCorrelations') else model_def.get('SerialCorrelations')
    RESIDUALS = model_def.get('GenerateResiduals')

    # SpecifyModel - Generates SPM-specific Model

    modelspec = pe.Node(model.SpecifySPMModel(concatenate_runs=False,
                                              input_units='secs',
                                              output_units='secs',
                                              time_repetition=TR,
                                              high_pass_filter_cutoff=128),
                        output_units='scans',
                        name="modelspec")

    # #### Level 1 Design node
    #
    # ** TODO -- get the right matching template file for fmriprep **
    #
    # * ??do we need a different mask than:
    #
    #     `'/data00/tools/spm8/apriori/brainmask_th25.nii'`

    # Level1Design - Generates an SPM design matrix
    level1design = pe.Node(
        spm.Level1Design(
            bases={'hrf': {
                'derivs': [0, 0]
            }},
            timing_units='secs',
            interscan_interval=TR,
            # model_serial_correlations='AR(1)', # [none|AR(1)|FAST]',
            # 8/21/20 mbod - allow for value to be set in JSON model spec
            model_serial_correlations=SERIAL_CORRELATIONS,

            # TODO - allow for specified masks
            mask_image=BRAIN_MASK_PATH,
            global_intensity_normalization='none'),
        name="level1design")

    # #### Estimate Model node
    # EstimateModel - estimate the parameters of the model
    level1estimate = pe.Node(
        spm.EstimateModel(
            estimation_method={'Classical': 1},
            # 8/21/20 mbod - allow for value to be set in JSON model spec
            write_residuals=RESIDUALS),
        name="level1estimate")

    # #### Estimate Contrasts node
    # EstimateContrast - estimates contrasts
    conestimate = pe.Node(spm.EstimateContrast(), name="conestimate")

    # ## Setup pipeline workflow for level 1 model
    # Initiation of the 1st-level analysis workflow
    l1analysis = pe.Workflow(name='l1analysis')

    # Connect up the 1st-level analysis components
    l1analysis.connect([
        (modelspec, level1design, [('session_info', 'session_info')]),
        (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
        (level1estimate, conestimate, [('spm_mat_file', 'spm_mat_file'),
                                       ('beta_images', 'beta_images'),
                                       ('residual_image', 'residual_image')])
    ])

    # ## Set up nodes for file handling and subject selection
    # ### `getsubjectinfo` node
    #
    # * Use `get_subject_info()` function to generate spec data structure for first level model design matrix

    # Get Subject Info - get subject specific condition information
    getsubjectinfo = pe.Node(util.Function(
        input_names=['subject_id', 'model_path'],
        output_names=['subject_info', 'realign_params', 'condition_names'],
        function=get_subject_info),
                             name='getsubjectinfo')

    makecontrasts = pe.Node(util.Function(
        input_names=['subject_id', 'condition_names', 'model_path'],
        output_names=['contrasts'],
        function=make_contrast_list),
                            name='makecontrasts')

    if model_def.get('ExcludeDummyScans'):
        ExcludeDummyScans = model_def['ExcludeDummyScans']
    else:
        ExcludeDummyScans = 0

    #if DEBUG:
    #    print(f'Excluding {ExcludeDummyScans} dummy scans.')

    trimdummyscans = pe.MapNode(Trim(begin_index=ExcludeDummyScans),
                                name='trimdummyscans',
                                iterfield=['in_file'])

    # ### `infosource` node
    #
    # * iterate over list of subject ids and generate subject ids and produce list of contrasts for subsequent nodes

    # Infosource - a function free node to iterate over the list of subject names
    infosource = pe.Node(util.IdentityInterface(
        fields=['subject_id', 'model_path', 'resolution', 'smoothing']),
                         name="infosource")

    try:
        fwhm_list = model_def['smoothing_list']
    except:
        fwhm_list = [4, 6, 8]

    try:
        resolution_list = model_def['resolutions']
    except:
        resolution_list = ['low', 'medium', 'high']

    infosource.iterables = [
        ('subject_id', subject_list),
        ('model_path', [JSON_MODEL_FILE] * len(subject_list)),
        ('resolution', resolution_list),
        ('smoothing', ['fwhm_{}'.format(s) for s in fwhm_list])
    ]

    # SelectFiles - to grab the data (alternativ to DataGrabber)

    ## TODO: here need to figure out how to incorporate the run number and task name in call
    templates = {
        'func':
        '{subject_id}/{resolution}/{smoothing}/sr{subject_id}_task-' +
        TASK_NAME + '_run-0*_*MNI*preproc*.nii'
    }

    selectfiles = pe.Node(nio.SelectFiles(
        templates,
        base_directory='{}/{}/derivatives/nipype/resampled_and_smoothed'.
        format(BASE_DIR, PROJECT_NAME)),
                          working_dir=working_dir,
                          name="selectfiles")

    # ### Specify datasink node
    #
    # * copy files to keep from various working folders to output folder for model for subject

    # Datasink - creates output folder for important outputs
    datasink = pe.Node(
        nio.DataSink(
            base_directory=SUBJ_DIR,
            parameterization=True,
            #container=output_dir
        ),
        name="datasink")

    datasink.inputs.base_directory = output_dir

    # Use the following DataSink output substitutions
    substitutions = []
    subjFolders = [(
        '_model_path.*resolution_(low|medium|high)_smoothing_(fwhm_\\d{1,2})_subject_id_sub-.*/(.*)$',
        '\\1/\\2/\\3')]
    substitutions.extend(subjFolders)
    datasink.inputs.regexp_substitutions = substitutions

    # datasink connections

    datasink_in_outs = [('conestimate.spm_mat_file', '@spm'),
                        ('level1estimate.beta_images', '@betas'),
                        ('level1estimate.mask_image', '@mask'),
                        ('conestimate.spmT_images', '@spmT'),
                        ('conestimate.con_images', '@con'),
                        ('conestimate.spmF_images', '@spmF')]

    if model_def.get('GenerateResiduals'):
        datasink_in_outs.append(
            ('level1estimate.residual_images', '@residuals'))

    # ---------

    # ## Set up workflow for whole process

    pipeline = pe.Workflow(
        name='first_level_model_{}_{}'.format(TASK_NAME.upper(), MODEL_NAME))
    pipeline.base_dir = os.path.join(SUBJ_DIR, working_dir)

    pipeline.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id'),
                                   ('resolution', 'resolution'),
                                   ('smoothing', 'smoothing')]),
        (infosource, getsubjectinfo, [('subject_id', 'subject_id'),
                                      ('model_path', 'model_path')]),
        (infosource, makecontrasts, [('subject_id', 'subject_id'),
                                     ('model_path', 'model_path')]),
        (getsubjectinfo, makecontrasts, [('condition_names', 'condition_names')
                                         ]),
        (getsubjectinfo, l1analysis,
         [('subject_info', 'modelspec.subject_info'),
          ('realign_params', 'modelspec.realignment_parameters')]),
        (makecontrasts, l1analysis, [('contrasts', 'conestimate.contrasts')]),

        #                  (selectfiles, l1analysis, [('func',
        #                                          'modelspec.functional_runs')]),
        (selectfiles, trimdummyscans, [('func', 'in_file')]),
        (trimdummyscans, l1analysis, [('out_file', 'modelspec.functional_runs')
                                      ]),
        (infosource, datasink, [('subject_id', 'container')]),
        (l1analysis, datasink, datasink_in_outs)
    ])

    return pipeline
예제 #10
0
    def test_clone_trait(self):
        """ Method to test trait clone from string description.
        """
        # Test first to build trait description from nipype traits and then
        # to instanciate the trait
        to_test_fields = {
            "timing_units":
            "traits.Enum(('secs', 'scans'))",
            "bases": ("traits.Dict(traits.Enum(('hrf', 'fourier', "
                      "'fourier_han', 'gamma', 'fir')), traits.Any())"),
            "mask_image":
            "traits.File(Undefined)",
            "microtime_onset":
            "traits.Float()",
            "mask_threshold": ("traits.Either(traits.Enum(('-Inf',)), "
                               "traits.Float())")
        }
        i = spm.Level1Design()
        for field, result in six.iteritems(to_test_fields):

            # Test to build the trait expression
            trait = i.inputs.trait(field)
            expression = build_expression(trait)
            self.assertEqual(expression, result)

            # Try to clone the trait
            trait = eval_trait(expression)()
            self.assertEqual(build_expression(trait), result)

        to_test_fields = {
            "contrasts":
            ("traits.List(traits.Either(traits.Tuple(traits.Str(), "
             "traits.Enum(('T',)), traits.List(traits.Str()), "
             "traits.List(traits.Float())), traits.Tuple(traits.Str(), "
             "traits.Enum(('T',)), traits.List(traits.Str()), "
             "traits.List(traits.Float()), traits.List(traits.Float())), "
             "traits.Tuple(traits.Str(), traits.Enum(('F',)), "
             "traits.List(traits.Either(traits.Tuple(traits.Str(), "
             "traits.Enum(('T',)), traits.List(traits.Str()), "
             "traits.List(traits.Float())), traits.Tuple(traits.Str(), "
             "traits.Enum(('T',)), traits.List(traits.Str()), "
             "traits.List(traits.Float()), traits.List(traits.Float())"
             "))))))"),
            "use_derivs":
            "traits.Bool()"
        }
        i = spm.EstimateContrast()
        for field, result in six.iteritems(to_test_fields):

            # Test to build the trait expression
            trait = i.inputs.trait(field)
            expression = build_expression(trait)
            self.assertEqual(expression, result)

            # Try to clone the trait
            trait = eval_trait(expression)()
            self.assertEqual(build_expression(trait), result)

        # Test to clone some traits
        trait_description = ["Float", "Int"]
        handler = clone_trait(trait_description)
        trait = handler.as_ctrait()
        self.assertEqual(trait_description, trait_ids(trait))
예제 #11
0
파일: fmri_spm.py 프로젝트: yzw0041/nipype
level1design = pe.Node(interface=spm.Level1Design(), name="level1design")
level1design.inputs.timing_units = modelspec.inputs.output_units
level1design.inputs.interscan_interval = modelspec.inputs.time_repetition
level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}}
"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the
parameters of the model.
"""

level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate")
level1estimate.inputs.estimation_method = {'Classical': 1}
"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the
first level contrasts specified in a few steps above.
"""

contrastestimate = pe.Node(
    interface=spm.EstimateContrast(), name="contrastestimate")
contrastestimate.inputs.contrasts = contrasts
contrastestimate.overwrite = True
contrastestimate.config = {'execution': {'remove_unnecessary_outputs': False}}
"""
Setup the pipeline
------------------

The nodes created above do not describe the flow of data. They merely
describe the parameters used for each function. In this section we
setup the connections between the nodes such that appropriate outputs
from nodes are piped into appropriate inputs of other nodes.

Use the :class:`nipype.pipeline.engine.Pipeline` to create a
graph-based execution pipeline for first level analysis. The config
options tells the pipeline engine to use `workdir` as the disk
def create_model_fit_pipeline(high_pass_filter_cutoff=128,
                              nipy=False,
                              ar1=True,
                              name="model",
                              save_residuals=False):
    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'outlier_files', "realignment_parameters", "functional_runs", "mask",
        'conditions', 'onsets', 'durations', 'TR', 'contrasts', 'units',
        'sparse'
    ]),
                        name="inputnode")

    modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec")
    if high_pass_filter_cutoff:
        modelspec.inputs.high_pass_filter_cutoff = high_pass_filter_cutoff

    create_subject_info = pe.Node(interface=util.Function(
        input_names=['conditions', 'onsets', 'durations'],
        output_names=['subject_info'],
        function=create_subject_inf),
                                  name="create_subject_info")

    modelspec.inputs.concatenate_runs = True
    #modelspec.inputs.input_units             = units
    modelspec.inputs.output_units = "secs"
    #modelspec.inputs.time_repetition         = tr
    #modelspec.inputs.subject_info = subjectinfo

    model_pipeline = pe.Workflow(name=name)

    model_pipeline.connect([
        (inputnode, create_subject_info, [('conditions', 'conditions'),
                                          ('onsets', 'onsets'),
                                          ('durations', 'durations')]),
        (inputnode, modelspec, [('realignment_parameters',
                                 'realignment_parameters'),
                                ('functional_runs', 'functional_runs'),
                                ('outlier_files', 'outlier_files'),
                                ('units', 'input_units'),
                                ('TR', 'time_repetition')]),
        (create_subject_info, modelspec, [('subject_info', 'subject_info')]),
    ])

    if nipy:
        model_estimate = pe.Node(interface=FitGLM(), name="level1estimate")
        model_estimate.inputs.TR = tr
        model_estimate.inputs.normalize_design_matrix = True
        model_estimate.inputs.save_residuals = save_residuals
        if ar1:
            model_estimate.inputs.model = "ar1"
            model_estimate.inputs.method = "kalman"
        else:
            model_estimate.inputs.model = "spherical"
            model_estimate.inputs.method = "ols"

        model_pipeline.connect([
            (modelspec, model_estimate, [('session_info', 'session_info')]),
            (inputnode, model_estimate, [('mask', 'mask')])
        ])

        if contrasts:
            contrast_estimate = pe.Node(interface=EstimateContrast(),
                                        name="contrastestimate")
            contrast_estimate.inputs.contrasts = contrasts
            model_pipeline.connect([
                (model_estimate, contrast_estimate,
                 [("beta", "beta"), ("nvbeta", "nvbeta"), ("s2", "s2"),
                  ("dof", "dof"), ("axis", "axis"), ("constants", "constants"),
                  ("reg_names", "reg_names")]),
                (inputnode, contrast_estimate, [('mask', 'mask')]),
            ])
    else:
        level1design = pe.Node(interface=spm.Level1Design(),
                               name="level1design")
        level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}}
        if ar1:
            level1design.inputs.model_serial_correlations = "AR(1)"
        else:
            level1design.inputs.model_serial_correlations = "none"

        level1design.inputs.timing_units = modelspec.inputs.output_units

        #level1design.inputs.interscan_interval = modelspec.inputs.time_repetition
        #        if sparse:
        #            level1design.inputs.microtime_resolution = n_slices*2
        #        else:
        #            level1design.inputs.microtime_resolution = n_slices
        #level1design.inputs.microtime_onset = ref_slice

        microtime_resolution = pe.Node(interface=util.Function(
            input_names=['volume', 'sparse'],
            output_names=['microtime_resolution'],
            function=_get_microtime_resolution),
                                       name="microtime_resolution")

        level1estimate = pe.Node(interface=spm.EstimateModel(),
                                 name="level1estimate")
        level1estimate.inputs.estimation_method = {'Classical': 1}

        contrastestimate = pe.Node(interface=spm.EstimateContrast(),
                                   name="contrastestimate")
        #contrastestimate.inputs.contrasts = contrasts

        threshold = pe.MapNode(interface=spm.Threshold(),
                               name="threshold",
                               iterfield=['contrast_index', 'stat_image'])
        #threshold.inputs.contrast_index = range(1,len(contrasts)+1)

        threshold_topo_ggmm = neuroutils.CreateTopoFDRwithGGMM(
            "threshold_topo_ggmm")
        #threshold_topo_ggmm.inputs.inputnode.contrast_index = range(1,len(contrasts)+1)

        model_pipeline.connect([
            (modelspec, level1design, [('session_info', 'session_info')]),
            (inputnode, level1design, [('mask', 'mask_image'),
                                       ('TR', 'interscan_interval'),
                                       (("functional_runs", get_ref_slice),
                                        "microtime_onset")]),
            (inputnode, microtime_resolution, [("functional_runs", "volume"),
                                               ("sparse", "sparse")]),
            (microtime_resolution, level1design, [("microtime_resolution",
                                                   "microtime_resolution")]),
            (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
            (inputnode, contrastestimate, [('contrasts', 'contrasts')]),
            (level1estimate, contrastestimate,
             [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'),
              ('residual_image', 'residual_image')]),
            (contrastestimate, threshold, [('spm_mat_file', 'spm_mat_file'),
                                           ('spmT_images', 'stat_image')]),
            (inputnode, threshold, [(('contrasts', _get_contrast_index),
                                     'contrast_index')]),
            (level1estimate, threshold_topo_ggmm, [('mask_image',
                                                    'inputnode.mask_file')]),
            (contrastestimate, threshold_topo_ggmm,
             [('spm_mat_file', 'inputnode.spm_mat_file'),
              ('spmT_images', 'inputnode.stat_image')]),
            (inputnode, threshold_topo_ggmm,
             [(('contrasts', _get_contrast_index), 'inputnode.contrast_index')
              ]),
        ])

    return model_pipeline
def create_2lvl(do_one_sample, name="group", mask=None):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.spm as spm
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu

    wk = pe.Workflow(name=name)

    inputspec = pe.Node(niu.IdentityInterface(fields=[
        'copes', 'estimation_method', 'template', "contrasts",
        "include_intercept", "regressors", "p_thresh", "height_thresh",
        'min_cluster_size'
    ]),
                        name='inputspec')

    if do_one_sample:
        model = pe.Node(spm.OneSampleTTestDesign(), name='onesample')
    else:
        model = pe.Node(spm.MultipleRegressionDesign(), name='l2model')
        wk.connect(inputspec, 'regressors', model, "user_covariates")
        wk.connect(inputspec, 'include_intercept', model, 'include_intercept')

    est_model = pe.Node(spm.EstimateModel(), name='estimate_model')
    wk.connect(inputspec, 'copes', model, 'in_files')
    wk.connect(inputspec, 'estimation_method', est_model, 'estimation_method')
    wk.connect(model, 'spm_mat_file', est_model, 'spm_mat_file')

    if mask == None:
        bet = pe.Node(fsl.BET(mask=True, frac=0.3, output_type='NIFTI'),
                      name="template_brainmask")
        wk.connect(inputspec, 'template', bet, 'in_file')
        wk.connect(bet, 'mask_file', model, 'explicit_mask_file')

    else:
        wk.connect(inputspec, 'template', model, 'explicit_mask_file')

    est_cont = pe.Node(spm.EstimateContrast(group_contrast=True),
                       name='estimate_contrast')

    wk.connect(inputspec, 'contrasts', est_cont, "contrasts")
    wk.connect(est_model, 'spm_mat_file', est_cont, "spm_mat_file")
    wk.connect(est_model, 'residual_image', est_cont, "residual_image")
    wk.connect(est_model, 'beta_images', est_cont, "beta_images")

    thresh = pe.MapNode(spm.Threshold(use_fwe_correction=False,
                                      use_topo_fdr=True,
                                      height_threshold_type='p-value'),
                        name='fdr',
                        iterfield=['stat_image', 'contrast_index'])
    wk.connect(est_cont, 'spm_mat_file', thresh, 'spm_mat_file')
    wk.connect(est_cont, 'spmT_images', thresh, 'stat_image')
    wk.connect(inputspec, 'min_cluster_size', thresh, 'extent_threshold')
    count = lambda x: range(1, len(x) + 1)

    wk.connect(inputspec, ('contrasts', count), thresh, 'contrast_index')
    wk.connect(inputspec, 'p_thresh', thresh, 'extent_fdr_p_threshold')
    wk.connect(inputspec, 'height_thresh', thresh, 'height_threshold')

    outputspec = pe.Node(niu.IdentityInterface(fields=[
        'RPVimage', 'beta_images', 'mask_image', 'residual_image',
        'con_images', 'ess_images', 'spmF_images', 'spmT_images',
        'spm_mat_file', 'pre_topo_fdr_map', 'thresholded_map'
    ]),
                         name='outputspec')

    wk.connect(est_model, 'RPVimage', outputspec, 'RPVimage')
    wk.connect(est_model, 'beta_images', outputspec, 'beta_images')
    wk.connect(est_model, 'mask_image', outputspec, 'mask_image')
    wk.connect(est_model, 'residual_image', outputspec, 'residual_image')
    wk.connect(est_cont, 'con_images', outputspec, 'con_images')
    wk.connect(est_cont, 'ess_images', outputspec, 'ess_images')
    wk.connect(est_cont, 'spmF_images', outputspec, 'spmF_images')
    wk.connect(est_cont, 'spmT_images', outputspec, 'spmT_images')
    wk.connect(est_cont, 'spm_mat_file', outputspec, 'spm_mat_file')
    wk.connect(thresh, 'pre_topo_fdr_map', outputspec, 'pre_topo_fdr_map')
    wk.connect(thresh, 'thresholded_map', outputspec, 'thresholded_map')
    return wk
예제 #14
0
con14 = ['ver_neg', 'T', condnames, [0, 0, 0, 0, -1, 0]]
con15 = ['chk_neg', 'T', condnames, [0, 0, 0, 0, 0, -1]]
con16 = ['faces_neg', 'T', condnames, [-1 / 3, -1 / 3, -1 / 3, 0, 0, 0]]
con17 = ['fill_neg', 'T', condnames, [0, 0, 0, -1 / 3, -1 / 3, -1 / 3]]
con18 = [
    'faceNfill_neg', 'T', condnames,
    [-1 / 6, -1 / 6, -1 / 6, -1 / 6, -1 / 6, -1 / 6]
]

#contrast_list=[con1,con2,con3,con4,con5,con6,con7,con8,con9]
contrast_list = [
    con1, con2, con3, con4, con5, con6, con7, con8, con9, con10, con11, con12,
    con13, con14, con15, con16, con17, con18
]

conestimate = Node(spm.EstimateContrast(), name='contrast_estimate')
conestimate.inputs.contrasts = contrast_list

substitutions = ('_subject_id_', '')
sink = Node(DataSink(), name='sink')
sink.inputs.substitutions = substitutions
sink.inputs.base_directory = out_dir

preproc = Workflow(name='preproc')
preproc.base_dir = work_dir
preproc.connect([
    (infosource, select_files, [('subject_id', 'subject_id')]),

    #Convert files to nii
    (select_files, anat_stack, [('anat', 'dicom_files')]),
    (select_files, epi_s1_stack, [('epi_s1', 'dicom_files')]),
예제 #15
0
파일: first_level.py 프로젝트: cnlab/muri
        interscan_interval=TR,
        model_serial_correlations='none',  #'AR(1)',
        mask_image='/data00/tools/spm8/apriori/brainmask_th25.nii',
        global_intensity_normalization='none'),
    name="level1design")

# #### Estimate Model node

# EstimateModel - estimate the parameters of the model
level1estimate = pe.Node(spm.EstimateModel(estimation_method={'Classical': 1}),
                         name="level1estimate")

# #### Estimate Contrasts node

# EstimateContrast - estimates contrasts
conestimate = pe.Node(spm.EstimateContrast(), name="conestimate")

# ## Setup pipeline workflow for level 1 model

# Initiation of the 1st-level analysis workflow
l1analysis = pe.Workflow(name='l1analysis')

# Connect up the 1st-level analysis components
l1analysis.connect([
    (modelspec, level1design, [('session_info', 'session_info')]),
    (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
    (level1estimate, conestimate, [('spm_mat_file', 'spm_mat_file'),
                                   ('beta_images', 'beta_images'),
                                   ('residual_image', 'residual_image')])
])
예제 #16
0
:class:`nipype.interfaces.spm.Level1Design`.
"""

level1design = pe.Node(spm.Level1Design(), name="level1design")
level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}}
"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the
parameters of the model.
"""

level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate")
level1estimate.inputs.estimation_method = {'Classical': 1}
"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the
first level contrasts specified in a few steps above.
"""

contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate")
"""Use :class: `nipype.interfaces.utility.Select` to select each contrast for
reporting.
"""

selectcontrast = pe.Node(niu.Select(), name="selectcontrast")
"""Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of
the contrast estimate and a background image into one volume.
"""

overlaystats = pe.Node(fsl.Overlay(), name="overlaystats")
overlaystats.inputs.stat_thresh = (3, 10)
overlaystats.inputs.show_negative_stats = True
overlaystats.inputs.auto_thresh_bg = True
"""Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid
statistical volumes for a report of the first-level results.
예제 #17
0
"""

level1design = pe.Node(interface=spm.Level1Design(), name="level1design")
"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the
parameters of the model.
"""

level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate")
level1estimate.inputs.estimation_method = {'Classical': 1}

threshold = pe.Node(interface=spm.Threshold(), name="threshold")
"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the
first level contrasts specified in a few steps above.
"""

contrastestimate = pe.Node(interface=spm.EstimateContrast(),
                           name="contrastestimate")


def pickfirst(l):
    return l[0]


l1analysis.connect([
    (modelspec, level1design, [('session_info', 'session_info')]),
    (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
    (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'),
                                        ('beta_images', 'beta_images'),
                                        ('residual_image', 'residual_image')]),
    (contrastestimate, threshold, [('spm_mat_file', 'spm_mat_file'),
                                   (('spmT_images', pickfirst), 'stat_image')
    out = level1design.run()

    #shutil.move(out.outputs.spm_mat_file,output_dir+'S'+str(subj)+'/by_category')

    print "- Estimate",
    sys.stdout.flush()

    level1estimate = spm.EstimateModel()
    level1estimate.inputs.estimation_method = {'Classical': 1}
    level1estimate.inputs.spm_mat_file = output_dir + 'S' + str(
        subj) + '/by_category/SPM.mat'

    out = level1estimate.run()

    print "- Contrast",
    level1contrast = spm.EstimateContrast()
    level1contrast.inputs.spm_mat_file = out.outputs.spm_mat_file
    level1contrast.inputs.beta_images = out.outputs.beta_images
    level1contrast.inputs.residual_image = out.outputs.residual_image
    cont1 = ('family>party', 'T', ['family', 'party'], [1, -1])
    cont2 = ('family>sex', 'T', ['family', 'sex'], [1, -1])
    cont3 = ('family>work', 'T', ['family', 'work'], [1, -1])
    cont4 = ('party>sex', 'T', ['party', 'sex'], [1, -1])
    cont5 = ('party>work', 'T', ['party', 'work'], [1, -1])
    cont6 = ('sex>work', 'T', ['sex', 'work'], [1, -1])
    contrasts = [cont1, cont2, cont3, cont4, cont5, cont6]
    level1contrast.inputs.contrasts = contrasts

    out = level1contrast.run()

    print "- %f" % (time.time() - starttime)