Ejemplo n.º 1
0
def model_fitting(source_img, prepped_img, subject_info, task):
    taskdir = os.path.join(outputdir, task)
    if not os.path.exists(taskdir):
        os.mkdir(taskdir)

    # skull strip the preprocessed BOLD
    bet = fsl.BET()
    bet.inputs.in_file = prepped_img
    bet.inputs.frac = 0.7
    bet.inputs.functional = True
    bet.inputs.out_file = os.path.join(taskdir, task + "_input_functional_bet.nii.gz")
    bet_res = bet.run()
    bettedinput = bet_res.outputs.out_file

    task_vs_baseline = [task + " vs baseline", 'T', [task], [1]]  # set up contrasts
    contrasts = [task_vs_baseline]

    modelfit = pe.Workflow(name='modelfit', base_dir=taskdir)  # generate the model fitting workflow
    modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")  # generate design info
    level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")  # generate fsf file
    modelgen = pe.MapNode(  # generate .mat file
        interface=fsl.FEATModel(),
        name='modelgen',
        iterfield=['fsf_file', 'ev_files'])
    feat = pe.Node(  # feat statistics
        interface=fsl.FEAT(),
        name='feat',
        iterfield=['fsf_file'])

    # put it all together
    modelfit.connect([
        (modelspec, level1design, [('session_info', 'session_info')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]),
        (level1design, feat, [('fsf_files', 'fsf_file')])])

    # define inputs to workflow
    modelspec.inputs.input_units = 'secs'
    modelspec.inputs.functional_runs = bettedinput
    modelspec.inputs.time_repetition = source_img.entities['RepetitionTime']
    modelspec.inputs.high_pass_filter_cutoff = 90
    modelspec.inputs.subject_info = subject_info

    level1design.inputs.interscan_interval = source_img.entities['RepetitionTime']
    level1design.inputs.bases = {'gamma': {'gammasigma': 3, 'gammadelay': 6, 'derivs': True}}
    level1design.inputs.contrasts = contrasts
    level1design.inputs.model_serial_correlations = True

    # Run the model-fitting pipeline. Main outputs are a feat directory (w/ functional img) and a design.mat file
    res = modelfit.run()

    # outputs
    feat_dir = list(res.nodes)[3].result.outputs.feat_dir
    thresh_img = feat_dir + "/thresh_zstat1.nii.gz"

    return thresh_img
Ejemplo n.º 2
0
def init_taskbased_wf(
    workdir=None,
    feature=None,
    condition_files=None,
    condition_units=None,
    memcalc=MemoryCalculator(),
):
    """
    create workflow to calculate a first level glm for task functional data
    """
    if feature is not None:
        name = f"{formatlikebids(feature.name)}_wf"
    else:
        name = "taskbased_wf"
    workflow = pe.Workflow(name=name)

    #
    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=[
                "tags",
                "vals",
                "metadata",
                "bold",
                "mask",
                "repetition_time",
                "confounds_selected",
                "condition_names",
                "condition_files",
                "condition_units",
            ]
        ),
        name="inputnode",
    )
    outputnode = pe.Node(niu.IdentityInterface(fields=["resultdicts"]), name="outputnode")

    if feature is not None:
        inputnode.inputs.condition_names = feature.conditions

    if condition_files is not None:
        inputnode.inputs.condition_files = condition_files

    if condition_units is not None:
        inputnode.inputs.condition_units = condition_units

    #
    make_resultdicts_a = pe.Node(
        MakeResultdicts(tagkeys=["feature"], imagekeys=["design_matrix", "contrast_matrix"]),
        name="make_resultdicts_a",
    )
    if feature is not None:
        make_resultdicts_a.inputs.feature = feature.name
    workflow.connect(inputnode, "tags", make_resultdicts_a, "tags")
    workflow.connect(inputnode, "vals", make_resultdicts_a, "vals")
    workflow.connect(inputnode, "metadata", make_resultdicts_a, "metadata")
    make_resultdicts_b = pe.Node(
        MakeResultdicts(
            tagkeys=["feature", "taskcontrast"],
            imagekeys=["effect", "variance", "z", "dof", "mask"],
            metadatakeys=["sources"],
        ),
        name="make_resultdicts_b",
    )
    if feature is not None:
        make_resultdicts_b.inputs.feature = feature.name
    workflow.connect(inputnode, "tags", make_resultdicts_b, "tags")
    workflow.connect(inputnode, "vals", make_resultdicts_b, "vals")
    workflow.connect(inputnode, "metadata", make_resultdicts_b, "metadata")
    workflow.connect(inputnode, "mask", make_resultdicts_b, "mask")

    workflow.connect(make_resultdicts_b, "resultdicts", outputnode, "resultdicts")

    #
    merge_resultdicts = pe.Node(niu.Merge(2), name="merge_resultdicts")
    workflow.connect(make_resultdicts_a, "resultdicts", merge_resultdicts, "in1")
    workflow.connect(make_resultdicts_b, "resultdicts", merge_resultdicts, "in2")
    resultdict_datasink = pe.Node(
        ResultdictDatasink(base_directory=workdir), name="resultdict_datasink"
    )
    workflow.connect(merge_resultdicts, "out", resultdict_datasink, "indicts")

    # parse condition files into three (ordered) lists
    parseconditionfile = pe.Node(ParseConditionFile(), name="parseconditionfile")
    workflow.connect(inputnode, "condition_names", parseconditionfile, "condition_names")
    workflow.connect(inputnode, "condition_files", parseconditionfile, "in_any")

    fillna = pe.Node(FillNA(), name="fillna")
    workflow.connect(inputnode, "confounds_selected", fillna, "in_tsv")

    # first level model specification
    modelspec = pe.Node(model.SpecifyModel(), name="modelspec")
    if hasattr(feature, "high_pass_filter_cutoff"):
        modelspec.inputs.high_pass_filter_cutoff = feature.high_pass_filter_cutoff
    else:
        modelspec.inputs.high_pass_filter_cutoff = np.inf
    workflow.connect(inputnode, "bold", modelspec, "functional_runs")
    workflow.connect(inputnode, "condition_units", modelspec, "input_units")
    workflow.connect(inputnode, "repetition_time", modelspec, "time_repetition")
    workflow.connect(fillna, "out_no_header", modelspec, "realignment_parameters")
    workflow.connect(parseconditionfile, "subject_info", modelspec, "subject_info")

    # transform contrasts dictionary to nipype list data structure
    contrasts = []
    if feature is not None:
        condition_names = feature.conditions
        for contrast in feature.contrasts:
            contrast_values = [contrast["values"].get(c, 0.0) for c in condition_names]
            contrasts.append(
                [contrast["name"], contrast["type"].upper(), condition_names, contrast_values]
            )
    contrast_names = list(map(firststr, contrasts))
    make_resultdicts_b.inputs.taskcontrast = contrast_names

    # generate design from first level specification
    level1design = pe.Node(
        fsl.Level1Design(
            contrasts=contrasts,
            model_serial_correlations=True,
            bases={"dgamma": {"derivs": False}},
        ),
        name="level1design",
    )
    workflow.connect(inputnode, "repetition_time", level1design, "interscan_interval")
    workflow.connect(modelspec, "session_info", level1design, "session_info")

    # generate required input files for FILMGLS from design
    modelgen = pe.Node(fsl.FEATModel(), name="modelgen")
    workflow.connect([(level1design, modelgen, [(("fsf_files", firststr), "fsf_file")])])
    workflow.connect([(level1design, modelgen, [(("ev_files", ravel), "ev_files")])])

    # calculate range of image values to determine cutoff value
    stats = pe.Node(fsl.ImageStats(op_string="-R"), name="stats")
    workflow.connect(inputnode, "bold", stats, "in_file")
    cutoff = pe.Node(
        niu.Function(input_names=["obj"], output_names=["min_val"], function=firstfloat),
        name="cutoff",
    )
    workflow.connect(stats, "out_stat", cutoff, "obj")

    # actually estimate the first level model
    modelestimate = pe.Node(
        fsl.FILMGLS(smooth_autocorr=True, mask_size=5),
        name="modelestimate"
    )
    workflow.connect(inputnode, "bold", modelestimate, "in_file")
    workflow.connect(cutoff, "min_val", modelestimate, "threshold")
    workflow.connect(modelgen, "design_file", modelestimate, "design_file")
    workflow.connect(modelgen, "con_file", modelestimate, "tcon_file")

    # make dof volume
    makedofvolume = pe.Node(
        MakeDofVolume(), iterfield=["dof_file", "copes"], name="makedofvolume"
    )
    workflow.connect(modelestimate, "copes", makedofvolume, "copes")
    workflow.connect(modelestimate, "dof_file", makedofvolume, "dof_file")

    workflow.connect(modelestimate, "copes", make_resultdicts_b, "effect")
    workflow.connect(modelestimate, "varcopes", make_resultdicts_b, "variance")
    workflow.connect(modelestimate, "zstats", make_resultdicts_b, "z")
    workflow.connect(makedofvolume, "out_file", make_resultdicts_b, "dof")

    #
    mergecolumnnames = pe.Node(niu.Merge(2), name="mergecolumnnames")
    mergecolumnnames.inputs.in1 = condition_names
    workflow.connect(fillna, "column_names", mergecolumnnames, "in2")

    design_unvest = pe.Node(Unvest(), name="design_unvest")
    workflow.connect(modelgen, "design_file", design_unvest, "in_vest")

    design_tsv = pe.Node(MergeColumns(1), name="design_tsv")
    workflow.connect(design_unvest, "out_no_header", design_tsv, "in1")
    workflow.connect(mergecolumnnames, "out", design_tsv, "column_names1")

    contrast_unvest = pe.Node(Unvest(), name="contrast_unvest")
    workflow.connect(modelgen, "con_file", contrast_unvest, "in_vest")

    contrast_tsv = pe.Node(MergeColumns(1), name="contrast_tsv")
    contrast_tsv.inputs.row_index = contrast_names
    workflow.connect(contrast_unvest, "out_no_header", contrast_tsv, "in1")
    workflow.connect(mergecolumnnames, "out", contrast_tsv, "column_names1")

    workflow.connect(design_tsv, "out_with_header", make_resultdicts_a, "design_matrix")
    workflow.connect(contrast_tsv, "out_with_header", make_resultdicts_a, "contrast_matrix")

    return workflow
Ejemplo n.º 3
0
def modelfit_fsl(wf_name='modelfit'):
    """

    Fit 1st level GLM using FSL routines

    Usage (TODO)

    modelfit.inputs.inputspec.fwhm = 12
    modelfit.inputs.inputspec.brain_mask = ['/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz', '/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz']

    modelfit.inputs.inputspec.input_units = 'secs'
    modelfit.inputs.inputspec.in_file = ['/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz', '/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz']
    modelfit.inputs.inputspec.TR = 2
    modelfit.inputs.inputspec.high_pass_filter_cutoff = 100 #sigma in TR
    modelfit.inputs.inputspec.event_files = ['/opt/shared2/nipype-test/testblock/a']

    cont1 = ['whisker', 'T', ['a', 'a'], [1.0, 0.0]]
    cont2 = ['-whisker', 'T', ['a', 'a'], [-1.0, 0.0]]
    cont3 = ['Task','F', [cont1, cont2]]
    contrasts = [cont1]

    modelfit.inputs.inputspec.contrasts = contrasts #TODO: change condition names

    modelfit.inputs.inputspec.bases_function = {'dgamma': {'derivs':  True}}
    modelfit.inputs.inputspec.model_serial_correlations = True


    #modelfit.write_graph('graph.dot');
    modelfit.write_graph('graph.dot', graph2use='colored');
    x=modelfit.run()
    #x=modelfit.run(plugin='MultiProc', plugin_args={'n_procs': 8})

    server.serve_content(modelfit)
    """

    modelfit = pe.Workflow(name=wf_name)
    """
        Set up a node to define all inputs required for the preprocessing workflow

    """

    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=[
            'in_file', 'ev_file', 'confounders', 'contrasts',
            'high_pass_filter_cutoff', 'fwhm', 'interscan_interval', 'TR',
            'input_units', 'bases_function', 'model_serial_correlations',
            'brain_mask'
        ],
        mandatory_inputs=True),
                        name='inputspec')

    #TODO: eliminate brain mask

    #inputnode.iterables=[('high_pass_filter_cutoff', [30, 60, 90, 120, 500])]
    """
        Set up a node to define outputs for the preprocessing workflow

    """

    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=['zstats', 'zfstats', 'copes', 'varcopes'],
        mandatory_inputs=True),
                         name='outputspec')

    # collect subject info

    getsubjectinfo = pe.MapNode(util.Function(
        input_names=['ev_file', 'confounders'],
        output_names=['subject_info'],
        function=get_subject_info),
                                name='getsubjectinfo',
                                iterfield=['confounders'])

    # nipype.algorithms.modelgen.SpecifyModel to generate design information.

    modelspec = pe.MapNode(interface=model.SpecifyModel(),
                           name="modelspec",
                           iterfield=['subject_info'])

    # smooth #TODO: move into preproc pipeline

    smooth = preproc.create_susan_smooth("smooth")
    #smooth.get_node( "smooth").iterables=[('fwhm', [6., 8., 10., 12., 14., 16.])]

    toSigma = pe.Node(interface=util.Function(
        input_names=['high_pass_filter_cutoff', 'TR'],
        output_names=['high_pass_filter_opstring'],
        function=highpass_operand),
                      name='toSigma')

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt',
                                                   op_string=''),
                          iterfield=['in_file'],
                          name='highpass')

    # Use nipype.interfaces.fsl.Level1Design to generate a run specific fsf file for analysis

    level1design = pe.MapNode(interface=fsl.Level1Design(),
                              name="level1design",
                              iterfield='session_info')

    # Use nipype.interfaces.fsl.FEATModel to generate a run specific mat file for use by FILMGLS

    modelgen = pe.MapNode(interface=fsl.FEATModel(),
                          name='modelgen',
                          iterfield=['fsf_file', 'ev_files'])

    # Use nipype.interfaces.fsl.FILMGLS to estimate a model specified by a mat file and a functional run

    modelestimate = pe.MapNode(
        interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5,
                              threshold=200),
        name='modelestimate',
        #iterfield=['design_file', 'in_file'])
        iterfield=['in_file', 'design_file'])

    # Use nipype.interfaces.fsl.ContrastMgr to generate contrast estimates

    conestimate = pe.MapNode(interface=fsl.ContrastMgr(),
                             name='conestimate',
                             iterfield=[
                                 'param_estimates', 'sigmasquareds',
                                 'corrections', 'dof_file', 'tcon_file'
                             ])

    modelfit.connect([
        (
            inputnode,
            smooth,
            [
                ('in_file', 'inputnode.in_files'),
                ('fwhm', 'inputnode.fwhm'),  # in iterable
                ('brain_mask', 'inputnode.mask_file')
            ]),
        (smooth, highpass, [('outputnode.smoothed_files', 'in_file')]),
        (inputnode, toSigma, [('high_pass_filter_cutoff',
                               'high_pass_filter_cutoff')]),
        (inputnode, toSigma, [('TR', 'TR')]),
        (toSigma, highpass, [('high_pass_filter_opstring', 'op_string')]),
        (inputnode, getsubjectinfo, [('ev_file', 'ev_file'),
                                     ('confounders', 'confounders')]),
        (getsubjectinfo, modelspec, [('subject_info', 'subject_info')]),
        (highpass, modelspec, [('out_file', 'functional_runs')]),
        (highpass, modelestimate, [('out_file', 'in_file')]),
        (inputnode, modelspec, [
            ('input_units', 'input_units'),
            ('TR', 'time_repetition'),
            ('high_pass_filter_cutoff', 'high_pass_filter_cutoff'),
        ]),
        (inputnode, level1design, [('TR', 'interscan_interval'),
                                   ('model_serial_correlations',
                                    'model_serial_correlations'),
                                   ('bases_function', 'bases'),
                                   ('contrasts', 'contrasts')]),
        (modelspec, level1design, [('session_info', 'session_info')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                  ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),
        (modelgen, conestimate, [('con_file', 'tcon_file')]),
        (modelestimate, conestimate, [('param_estimates', 'param_estimates'),
                                      ('sigmasquareds', 'sigmasquareds'),
                                      ('corrections', 'corrections'),
                                      ('dof_file', 'dof_file')]),
        (conestimate, outputnode, [('zstats', 'zstats'),
                                   ('zfstats', 'zfstats'), ('copes', 'copes'),
                                   ('varcopes', 'varcopes')])
    ])

    return modelfit
Ejemplo n.º 4
0
def create_modelfit_workflow(name='modelfit'):
    """Create an FSL individual modelfitting workflow

    Example
    -------

    >>> modelfit = create_modelfit_workflow()
    >>> modelfit.base_dir = '.'
    >>> info = dict()
    >>> modelfit.inputs.inputspec.session_info = info
    >>> modelfit.inputs.inputspec.interscan_interval = 3.
    >>> modelfit.inputs.inputspec.film_threshold = 1000
    >>> modelfit.run() #doctest: +SKIP

    Inputs::

         inputspec.session_info : info generated by modelgen.SpecifyModel
         inputspec.interscan_interval : interscan interval
         inputspec.contrasts : list of contrasts
         inputspec.film_threshold : image threshold for FILM estimation

    Outputs::

         outputspec.realignment_parameters : realignment parameter files
         outputspec.smoothed_files : smoothed functional files
         outputspec.outlier_files : list of outliers
         outputspec.outlier_stats : statistics of outliers
         outputspec.outlier_plots : images of outliers
         outputspec.mask_file : binary mask file in reference image space
         outputspec.reg_file : registration file that maps reference image to
                               freesurfer space
         outputspec.reg_cost : cost of registration (useful for detecting
                               misalignment)
    """

    modelfit = pe.Workflow(name=name)

    """
    Create the nodes
    """

    inputspec = pe.Node(util.IdentityInterface(fields=['session_info',
                                                       'interscan_interval',
                                                       'contrasts',
                                                       'film_threshold',
                                                       'functional_data',
                                                       'bases',
                                                       'model_serial_correlations']),
                        name='inputspec')
    level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")
    modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen',
                          iterfield=['fsf_file', 'ev_files'])
    modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                     mask_size=5),
                               name='modelestimate',
                               iterfield=['design_file', 'in_file'])
    conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate',
                             iterfield=['tcon_file', 'param_estimates',
                                        'sigmasquareds', 'corrections',
                                        'dof_file'])
    ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop',
                                                  suffix='_pval'),
                         name='ztop',
                         iterfield=['in_file'])
    outputspec = pe.Node(util.IdentityInterface(fields=['copes', 'varcopes',
                                                        'dof_file', 'pfiles',
                                                        'parameter_estimates']),
                         name='outputspec')

    """
    Utility function
    """

    pop_lambda = lambda x: x[0]

    """
    Setup the connections
    """

    modelfit.connect([
        (inputspec, level1design, [('interscan_interval', 'interscan_interval'),
                                   ('session_info', 'session_info'),
                                   ('contrasts', 'contrasts'),
                                   ('bases', 'bases'),
                                   ('model_serial_correlations',
                                    'model_serial_correlations')]),
        (inputspec, modelestimate, [('film_threshold', 'threshold'),
                                    ('functional_data', 'in_file')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),
        (modelgen, conestimate, [('con_file', 'tcon_file')]),
        (modelestimate, conestimate, [('param_estimates', 'param_estimates'),
                                    ('sigmasquareds', 'sigmasquareds'),
                                    ('corrections', 'corrections'),
                                    ('dof_file', 'dof_file')]),
        (conestimate, ztopval, [(('zstats', pop_lambda), 'in_file')]),
        (ztopval, outputspec, [('out_file', 'pfiles')]),
        (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'),
                                     ('dof_file', 'dof_file')]),
        (conestimate, outputspec, [('copes', 'copes'),
                                   ('varcopes', 'varcopes')]),
        ])
    return modelfit
Ejemplo n.º 5
0
def init_taskbased_wf(analysis=None, memcalc=MemoryCalculator()):
    """
    create workflow to calculate a first level glm for task functional data
    """

    assert isinstance(analysis, Analysis)
    assert isinstance(analysis.tags, Tags)

    # make bold file variant specification
    boldfilefields = ["bold_file"]
    varianttupls = [("space", analysis.tags.space)]
    if analysis.tags.grand_mean_scaled is not None:
        assert isinstance(analysis.tags.grand_mean_scaled, GrandMeanScaledTag)
        varianttupls.append(analysis.tags.grand_mean_scaled.as_tupl())
    if analysis.tags.band_pass_filtered is not None:
        assert isinstance(analysis.tags.band_pass_filtered,
                          BandPassFilteredTag)
        assert analysis.tags.band_pass_filtered.type == "gaussian"
        varianttupls.append(analysis.tags.band_pass_filtered.as_tupl())
    if analysis.tags.confounds_removed is not None:
        assert isinstance(analysis.tags.confounds_removed, ConfoundsRemovedTag)
        confounds_removed_names = tuple(
            name for name in analysis.tags.confounds_removed.names
            if "aroma_motion" in name)
        varianttupls.append(("confounds_removed", confounds_removed_names))
        confounds_extract_names = tuple(
            name for name in analysis.tags.confounds_removed.names
            if "aroma_motion" not in name)
        if len(confounds_extract_names) > 0:
            boldfilefields.append("confounds_file")
            varianttupls.append(("confounds_extract", confounds_extract_names))
    if analysis.tags.smoothed is not None:
        assert isinstance(analysis.tags.smoothed, SmoothedTag)
        varianttupls.append(analysis.tags.smoothed.as_tupl())
    variantdict = dict(varianttupls)

    boldfilevariant = (tuple(boldfilefields), tuple(varianttupls))

    assert analysis.name is not None
    workflow = pe.Workflow(name=analysis.name)

    # inputs are the bold file, the mask file and the confounds file
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            *boldfilefields, "mask_file", "condition_files", "metadata"
        ]),
        name="inputnode",
    )

    # parse condition files into three (ordered) lists
    parseconditionfile = pe.Node(
        interface=ParseConditionFile(),
        name="parseconditionfile",
    )
    workflow.connect(inputnode, "condition_files", parseconditionfile,
                     "in_any")

    def get_repetition_time(dic):
        return dic.get("RepetitionTime")

    # first level model specification
    modelspec = pe.Node(
        interface=model.SpecifyModel(input_units="secs", ),
        name="modelspec",
    )
    workflow.connect([
        (
            inputnode,
            modelspec,
            [
                ("bold_file", "functional_runs"),
                (("metadata", get_repetition_time), "time_repetition"),
            ],
        ),
        (parseconditionfile, modelspec, [("subject_info", "subject_info")]),
    ])
    if "band_pass_filtered" in variantdict:
        modelspec.inputs.high_pass_filter_cutoff = float(
            analysis.tags.band_pass_filtered.high)
    if "confounds_extract" in variantdict:
        workflow.connect([(inputnode, modelspec,
                           [("confounds_file", "realignment_parameters")])])

    # transform contrasts dictionary to nipype list data structure
    contrasts = [[
        contrast.name,
        contrast.type.upper(), *map(list, zip(*contrast.values.items()))
    ] for contrast in analysis.contrasts]

    # generate design from first level specification
    level1design = pe.Node(
        interface=fsl.Level1Design(
            contrasts=contrasts,
            model_serial_correlations=True,
            bases={"dgamma": {
                "derivs": False
            }},
        ),
        name="level1design",
    )
    workflow.connect([
        (
            inputnode,
            level1design,
            [(("metadata", get_repetition_time), "interscan_interval")],
        ),
        (modelspec, level1design, [("session_info", "session_info")]),
    ])

    # generate required input files for FILMGLS from design
    modelgen = pe.Node(interface=fsl.FEATModel(),
                       name="modelgen",
                       iterfield=["fsf_file", "ev_files"])
    workflow.connect([(
        level1design,
        modelgen,
        [("fsf_files", "fsf_file"), ("ev_files", "ev_files")],
    )])

    # calculate range of image values to determine cutoff value
    # for FILMGLS
    boldfilecutoff = pe.Node(interface=fsl.ImageStats(op_string="-R"),
                             name="boldfilecutoff")
    workflow.connect([(inputnode, boldfilecutoff, [("bold_file", "in_file")])])

    # actually estimate the first level model
    modelestimate = pe.Node(
        interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5),
        name="modelestimate",
        iterfield=["design_file", "in_file", "tcon_file"],
    )
    workflow.connect([
        (inputnode, modelestimate, [("bold_file", "in_file")]),
        (boldfilecutoff, modelestimate, [(("out_stat", firstfloat),
                                          "threshold")]),
        (
            modelgen,
            modelestimate,
            [("design_file", "design_file"), ("con_file", "tcon_file")],
        ),
    ])

    # make dof volume
    makedofvolume = pe.MapNode(
        interface=MakeDofVolume(),
        iterfield=["dof_file", "cope_file"],
        name="makedofvolume",
    )
    workflow.connect([
        (
            modelestimate,
            makedofvolume,
            [(("copes", first), "cope_file"), ("dof_file", "dof_file")],
        ),
    ])

    outputnode = pe.Node(
        interface=MakeResultdicts(keys=[
            "firstlevelanalysisname",
            "firstlevelfeaturename",
            "cope",
            "varcope",
            "zstat",
            "dof_file",
            "mask_file",
        ]),
        name="outputnode",
    )
    outputnode.inputs.firstlevelanalysisname = analysis.name
    outputnode.inputs.firstlevelfeaturename = list(map(first, contrasts))
    workflow.connect([
        (inputnode, outputnode, [("metadata", "basedict"),
                                 ("mask_file", "mask_file")]),
        (
            modelestimate,
            outputnode,
            [
                (("copes", ravel), "cope"),
                (("varcopes", ravel), "varcope"),
                (("zstats", ravel), "zstat"),
            ],
        ),
        (makedofvolume, outputnode, [("out_file", "dof_file")]),
    ])

    return workflow, (boldfilevariant, )
###########
#
# SETTING UP THE FIRST LEVEL ANALYSIS NODES
#
###########

# model specification
modelspec = Node(modelgen.SpecifyModel(subject_info=subject_info,
                                       input_units='secs',
                                       time_repetition=TR,
                                       high_pass_filter_cutoff=100),
                 name="modelspec")

# first-level design
level1design = Node(fsl.Level1Design(bases={'dgamma':{'derivs': True}},
                                     interscan_interval=TR,
                                     model_serial_correlations=True,
                                     contrasts=contrast_list),
                    name="level1design")

# creating all the other files necessary to run the model
modelgen = Node(fsl.FEATModel(),
                name='modelgen')

# then running through FEAT
feat = Node(fsl.FEAT(),
            name="feat")

# creating datasink to collect outputs
datasink = Node(DataSink(base_directory=outDir),
                name='datasink')
Ejemplo n.º 7
0
def create_first(name='modelfit'):
    """First level task-fMRI modelling workflow
    
    Parameters
    ----------
    name : name of workflow. Default = 'modelfit'
    
    Inputs
    ------
    inputspec.session_info :
    inputspec.interscan_interval :
    inputspec.contrasts :
    inputspec.film_threshold :
    inputspec.functional_data :
    inputspec.bases :
    inputspec.model_serial_correlations :
    
    Outputs
    -------
    outputspec.copes :
    outputspec.varcopes :
    outputspec.dof_file :
    outputspec.pfiles :
    outputspec.parameter_estimates :
    outputspec.zstats :
    outputspec.tstats :
    outputspec.design_image :
    outputspec.design_file :
    outputspec.design_cov :
    
    Returns
    -------
    workflow : first-level workflow
    """
    import nipype.interfaces.fsl as fsl  # fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    modelfit = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ]),
                        name='inputspec')

    level1design = pe.Node(interface=fsl.Level1Design(),
                           name="create_level1_design")

    modelgen = pe.MapNode(interface=fsl.FEATModel(),
                          name='generate_model',
                          iterfield=['fsf_file', 'ev_files'])

    modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                     mask_size=5),
                               name='estimate_model',
                               iterfield=['design_file', 'in_file'])

    conestimate = pe.MapNode(interface=fsl.ContrastMgr(),
                             name='estimate_contrast',
                             iterfield=[
                                 'tcon_file', 'param_estimates',
                                 'sigmasquareds', 'corrections', 'dof_file'
                             ])

    ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop',
                                                  suffix='_pval'),
                         name='z2pval',
                         iterfield=['in_file'])
    outputspec = pe.Node(util.IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'pfiles', 'parameter_estimates',
        'zstats', 'tstats', 'design_image', 'design_file', 'design_cov',
        'sigmasquareds'
    ]),
                         name='outputspec')

    # Utility function

    pop_lambda = lambda x: x[0]

    # Setup the connections

    modelfit.connect([
        (inputspec, level1design,
         [('interscan_interval', 'interscan_interval'),
          ('session_info', 'session_info'), ('contrasts', 'contrasts'),
          ('bases', 'bases'),
          ('model_serial_correlations', 'model_serial_correlations')]),
        (inputspec, modelestimate, [('film_threshold', 'threshold'),
                                    ('functional_data', 'in_file')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                  ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),
        (modelgen, conestimate, [('con_file', 'tcon_file')]),
        (modelestimate, conestimate, [('param_estimates', 'param_estimates'),
                                      ('sigmasquareds', 'sigmasquareds'),
                                      ('corrections', 'corrections'),
                                      ('dof_file', 'dof_file')]),
        (conestimate, ztopval, [(('zstats', pop_lambda), 'in_file')]),
        (ztopval, outputspec, [('out_file', 'pfiles')]),
        (modelestimate, outputspec, [('param_estimates',
                                      'parameter_estimates'),
                                     ('dof_file', 'dof_file'),
                                     ('sigmasquareds', 'sigmasquareds')]),
        (conestimate, outputspec, [('copes', 'copes'),
                                   ('varcopes', 'varcopes'),
                                   ('tstats', 'tstats'), ('zstats', 'zstats')])
    ])
    modelfit.connect(modelgen, 'design_image', outputspec, 'design_image')
    modelfit.connect(modelgen, 'design_file', outputspec, 'design_file')
    modelfit.connect(modelgen, 'design_cov', outputspec, 'design_cov')
    return modelfit
Ejemplo n.º 8
0
contrasts = [cont1, cont2, cont3, cont4, cont5]


#%%
l1_spec = pe.Node(SpecifyModel(
    parameter_source='FSL',
    input_units='secs',
    high_pass_filter_cutoff=120,
    time_repetition = tr,
), name='l1_spec')

# l1_model creates a first-level model design
l1_model = pe.Node(fsl.Level1Design(
    bases={'dgamma': {'derivs': True}},
    model_serial_correlations=True,
    interscan_interval = tr,
    contrasts=contrasts
    # orthogonalization=orthogonality,
), name='l1_model')

# feat_spec generates an fsf model specification file
feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec')

# feat_fit actually runs FEAT
feat_fit = pe.Node(fsl.FEAT(), name='feat_fit', mem_gb=5)

## instead of FEAT
#modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
#                                                 mask_size=5,
#                                                 threshold=1000),
#                                                 name='modelestimate',
Ejemplo n.º 9
0
                name="Target_ModelSpec",
                iterfield=[
                    'event_files', 'functional_runs', 'realignment_parameters'
                ])

            analysis.connect(data, 'ev_target', target_modelspec,
                             'event_files')
            analysis.connect(question_trimmer, ('roi_file', nest_list),
                             target_modelspec, 'functional_runs')
            analysis.connect(mp_trimmer, ('trimmed', nest_list),
                             target_modelspec, 'realignment_parameters')

            target_level1design = MapNode(
                interface=fsl.Level1Design(bases={'dgamma': {
                    'derivs': False
                }},
                                           interscan_interval=.400,
                                           model_serial_correlations=True),
                name="Target_level1design",
                iterfield=['session_info', 'contrasts'])
            analysis.connect(target_modelspec, 'session_info',
                             target_level1design, 'session_info')
            analysis.connect(data, 'target_contrasts', target_level1design,
                             'contrasts')

            target_FEATModel = MapNode(interface=fsl.model.FEATModel(),
                                       name='Target_FEATModel',
                                       iterfield=['ev_files', 'fsf_file'])
            analysis.connect(target_level1design, ('ev_files', unnest_list),
                             target_FEATModel, 'ev_files')
            analysis.connect(target_level1design, ('fsf_files', unnest_list),
Ejemplo n.º 10
0
def L1PIPE(): 

	# ---1) Import modules
	import nipype.interfaces.fsl as fsl 
	import nipype.pipeline.engine as pe
	import nipype.algorithms.modelgen as model
	import glob
	from nipype import Function
	import matplotlib
	import nipype.interfaces.utility as util
	import os


	#--- 2) Specify model node
	specify_model = pe.Node(interface=model.SpecifyModel(), name="SPECIFY_MODEL")
	specify_model.inputs.input_units = 'secs'

	runs=raw_input('Please drag in the pre-processsed functional data\n')
	runs2= runs.strip('\'"')

	NIFTIDIR=os.path.split(runs2)[0]

	specify_model.inputs.functional_runs = [runs2]
	specify_model.inputs.time_repetition = float(raw_input('Enter the TR (s)\n'))
	specify_model.inputs.high_pass_filter_cutoff = float(raw_input('Enter the High pass filter cutoff (s)\n'))
	EVENTFILES=raw_input('Please drag in the directory of 3 column event files')
	EVENTFILES2=EVENTFILES.strip('\'"')
	EVENTFILESLIST=glob.glob(EVENTFILES2 + '/*')
	specify_model.inputs.event_files=sorted(EVENTFILESLIST)


	#--- 3) Level 1 design node.
	Designer=pe.Node(interface=fsl.Level1Design(),name='DESIGN')
	Designer.inputs.interscan_interval = float(specify_model.inputs.time_repetition)
	Designer.inputs.bases = {'dgamma':{'derivs': False}}
	Designer.inputs.model_serial_correlations=bool(0)

	#--- 4) Make some contrasts
	cont1=('Task', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
	cont2=('Up', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, 0, 0, 1, 0, 0, 1])
	cont3=('SC', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, 1, 1, 0, 0, 0, 0])
	cont4=('UpvSC', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, -1, -1, 1, 0, 0, 1])
	Designer.inputs.contrasts=[cont1, cont2, cont3, cont4]

	#--- 5) FSL model node
	Model=pe.Node(interface=fsl.FEATModel(),name='FEATMODEL')

	#--- 6) FILM GSL node
	fgls=pe.Node(interface=fsl.FILMGLS(),name='FILM_GLS')
	fgls.inputs.in_file=runs2

	#--- 7) outputnode for the design image (gets binned otherwise)
	outputnode = pe.Node(interface=util.IdentityInterface(fields=['im','cope','varcope','dof','resid','params','sigmas']),name='outputnode')


	#--- 8)  Plotting node
	def plot(in_file):
		from nilearn import image
		from nilearn import plotting
		import matplotlib
		display=plotting.plot_stat_map(stat_map_img = in_file, display_mode='z', cut_coords=10, threshold=float(0))
		matplotlib.pyplot.show()



	plotter=pe.MapNode(Function(input_names=['in_file'],output_names='display',function=plot),iterfield=['in_file'],name='PLOTTER')

	workflow = pe.Workflow(name='L1PIPE')
	

	workflow.connect(specify_model,'session_info',Designer,'session_info')
	workflow.connect(Designer,'fsf_files',Model,'fsf_file')
	workflow.connect(Designer,'ev_files',Model,'ev_files')
	workflow.connect(Model,'design_file',fgls,'design_file')
	workflow.connect(Model,'con_file',fgls,'tcon_file')
	workflow.connect(Model,'design_image',outputnode,'im')
	
	# Feed the z stats to the plotter.
	workflow.connect(fgls,'zstats',plotter,'in_file')
	workflow.connect(fgls,'copes',outputnode,'cope')
	workflow.connect(fgls,'varcopes',outputnode,'varcope')
	workflow.connect(fgls,'dof_file',outputnode,'dof')
	workflow.connect(fgls,'residual4d',outputnode,'resid')
	workflow.connect(fgls,'param_estimates',outputnode,'params')
	workflow.connect(fgls,'sigmasquareds',outputnode,'sigmas')
	




	workflow.base_dir = NIFTIDIR
	workflow.write_graph(graph2use='exec')
	workflow.run()
Ejemplo n.º 11
0
def init_glm_wf(conditions,
                contrasts,
                repetition_time,
                use_mov_pars,
                name="glm"):
    """
    create workflow to calculate a first level glm for task functional data

    :param conditions: dictionary of conditions with onsets and durations 
        by condition names
    :param contrasts: dictionary of contrasts by names
    :param repetition_time: repetition time
    :param use_mov_pars: if true, regress out movement parameters when 
        calculating the glm
    :param name: workflow name (Default value = "glm")

    """
    workflow = pe.Workflow(name=name)

    # inputs are the bold file, the mask file and the confounds file
    # that contains the movement parameters
    inputnode = pe.Node(niu.IdentityInterface(
        fields=["bold_file", "mask_file", "confounds_file"]),
                        name="inputnode")

    # transform (unordered) conditions dictionary into three (ordered) lists

    names = list(conditions.keys())
    onsets = [conditions[k]["onsets"] for k in names]
    durations = [conditions[k]["durations"] for k in names]

    # first level model specification
    modelspec = pe.Node(interface=model.SpecifyModel(
        input_units="secs",
        high_pass_filter_cutoff=128.,
        time_repetition=repetition_time,
        subject_info=Bunch(conditions=names,
                           onsets=onsets,
                           durations=durations)),
                        name="modelspec")

    # transform contrasts dictionary to nipype list data structure
    contrasts_ = [[k, "T"] +
                  [list(i) for i in zip(*[(n, val) for n, val in v.items()])]
                  for k, v in contrasts.items()]

    connames = [k[0] for k in contrasts_]

    # outputs are cope, varcope and zstat for each contrast and a dof_file
    outputnode = pe.Node(niu.IdentityInterface(fields=sum(
        [["%s_img" % conname,
          "%s_varcope" % conname,
          "%s_zstat" % conname] for conname in connames], []) + ["dof_file"]),
                         name="outputnode")

    outputnode._interface.names = connames

    # generate design from first level specification
    level1design = pe.Node(interface=fsl.Level1Design(
        contrasts=contrasts_,
        interscan_interval=repetition_time,
        model_serial_correlations=True,
        bases={"dgamma": {
            "derivs": False
        }}),
                           name="level1design")

    # generate required input files for FILMGLS from design
    modelgen = pe.Node(interface=fsl.FEATModel(),
                       name="modelgen",
                       iterfield=["fsf_file", "ev_files"])

    # calculate range of image values to determine cutoff value
    # for FILMGLS
    stats = pe.Node(interface=fsl.ImageStats(op_string="-R"), name="stats")

    # actuallt estimate the firsy level model
    modelestimate = pe.Node(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                  mask_size=5),
                            name="modelestimate",
                            iterfield=["design_file", "in_file", "tcon_file"])

    # mask regression outputs
    maskimgs = pe.MapNode(interface=fsl.ApplyMask(),
                          name="maskimgs",
                          iterfield=["in_file"])
    maskvarcopes = pe.MapNode(interface=fsl.ApplyMask(),
                              name="maskvarcopes",
                              iterfield=["in_file"])
    maskzstats = pe.MapNode(interface=fsl.ApplyMask(),
                            name="maskzstats",
                            iterfield=["in_file"])

    # split regression outputs by name
    splitimgs = pe.Node(interface=niu.Split(splits=[1
                                                    for conname in connames]),
                        name="splitimgs")
    splitvarcopes = pe.Node(
        interface=niu.Split(splits=[1 for conname in connames]),
        name="splitvarcopes")
    splitzstats = pe.Node(
        interface=niu.Split(splits=[1 for conname in connames]),
        name="splitzstats")

    # pass movement parameters to glm model specification if requested
    c = [("bold_file", "functional_runs")]
    if use_mov_pars:
        c.append(("confounds_file", "realignment_parameters"))

    workflow.connect([
        (inputnode, modelspec, c),
        (inputnode, modelestimate, [("bold_file", "in_file")]),
        (modelspec, level1design, [("session_info", "session_info")]),
        (level1design, modelgen, [("fsf_files", "fsf_file"),
                                  ("ev_files", "ev_files")]),
        (inputnode, stats, [("bold_file", "in_file")]),
        (stats, modelestimate, [(("out_stat", get_float), "threshold")]),
        (modelgen, modelestimate, [("design_file", "design_file"),
                                   ("con_file", "tcon_file")]),
        (inputnode, maskimgs, [("mask_file", "mask_file")]),
        (inputnode, maskvarcopes, [("mask_file", "mask_file")]),
        (inputnode, maskzstats, [("mask_file", "mask_file")]),
        (modelestimate, maskimgs, [
            (("copes", flatten), "in_file"),
        ]),
        (modelestimate, maskvarcopes, [
            (("varcopes", flatten), "in_file"),
        ]),
        (modelestimate, maskzstats, [
            (("zstats", flatten), "in_file"),
        ]),
        (modelestimate, outputnode, [("dof_file", "dof_file")]),
        (maskimgs, splitimgs, [
            ("out_file", "inlist"),
        ]),
        (maskvarcopes, splitvarcopes, [
            ("out_file", "inlist"),
        ]),
        (maskzstats, splitzstats, [
            ("out_file", "inlist"),
        ]),
    ])

    # connect outputs named for the contrasts
    for i, conname in enumerate(connames):
        workflow.connect(splitimgs, "out%i" % (i + 1), outputnode,
                         "%s_img" % conname)
        workflow.connect(splitvarcopes, "out%i" % (i + 1), outputnode,
                         "%s_varcope" % conname)
        workflow.connect(splitzstats, "out%i" % (i + 1), outputnode,
                         "%s_zstat" % conname)

    return workflow, connames
Ejemplo n.º 12
0
#Wraps command **fslmaths**
NodeHash_17446a20 = pe.MapNode(interface = fsl.MeanImage(), name = 'NodeName_17446a20', iterfield = ['in_file'])
NodeHash_17446a20.inputs.dimension = 'T'

#Wraps command **fslmaths**
NodeHash_b5a5810 = pe.MapNode(interface = fsl.BinaryMaths(), name = 'NodeName_b5a5810', iterfield = ['in_file', 'operand_file'])
NodeHash_b5a5810.inputs.operation = 'add'

#Makes a model specification compatible with spm/fsl designers.
NodeHash_1e7a3420 = pe.MapNode(interface = modelgen.SpecifyModel(), name = 'NodeName_1e7a3420', iterfield = ['functional_runs', 'subject_info'])
NodeHash_1e7a3420.inputs.high_pass_filter_cutoff = 0
NodeHash_1e7a3420.inputs.input_units = 'secs'
NodeHash_1e7a3420.inputs.time_repetition = 2.0

#Generate FEAT specific files
NodeHash_9bb0d40 = pe.MapNode(interface = fsl.Level1Design(), name = 'NodeName_9bb0d40', iterfield = ['session_info'])
NodeHash_9bb0d40.inputs.bases = {'dgamma':{'derivs': False}}
NodeHash_9bb0d40.inputs.contrasts = [('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1])]
NodeHash_9bb0d40.inputs.interscan_interval = 2.0
NodeHash_9bb0d40.inputs.model_serial_correlations = True

#Wraps command **feat_model**
NodeHash_6b33f50 = pe.MapNode(interface = fsl.FEATModel(), name = 'NodeName_6b33f50', iterfield = ['ev_files', 'fsf_file'])

#Wraps command **film_gls**
NodeHash_2762fb60 = pe.MapNode(interface = fsl.FILMGLS(), name = 'NodeName_2762fb60', iterfield = ['design_file', 'in_file', 'tcon_file'])

#Wraps command **fslmaths**
NodeHash_2df82970 = pe.MapNode(interface = fsl.MeanImage(), name = 'NodeName_2df82970', iterfield = ['in_file'])
NodeHash_2df82970.inputs.dimension = 'T'
Ejemplo n.º 13
0
datasink.inputs.container = 'results/'

# Model Specs

#Use nipype.algorithms.modelgen.SpecifyModel to generate design information.
modelspec = pe.Node(interface=SpecifyModel(), name="modelspec")
modelspec.inputs.input_units = 'secs'
modelspec.inputs.time_repetition = TR
modelspec.inputs.high_pass_filter_cutoff = high_pass

#Use nipype.interfaces.fsl.Level1Design to generate a run specific fsf file for analysis
level1design = pe.Node(
    interface=fsl.Level1Design(
        interscan_interval=TR,
        model_serial_correlations=modelSerialCorrelations,
        bases={'dgamma': {
            'derivs': False
        }},
        contrasts=conts),
    name="level1design",
)

#Use nipype.interfaces.fsl.FEATModel to generate a run specific mat file for use by FILMGLS
modelgen = pe.MapNode(
    interface=fsl.FEATModel(),
    name='modelgen',
    iterfield=['fsf_file', 'ev_files'],
)

#Use nipype.interfaces.fsl.FILMGLS to estimate a model specified by a mat file and a functional run
modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
#Wraps command **fslmaths**
NodeHash_1fdac460 = pe.MapNode(interface=fsl.BinaryMaths(),
                               name='NodeName_1fdac460',
                               iterfield=['in_file', 'operand_file'])
NodeHash_1fdac460.inputs.operation = 'add'

#Makes a model specification compatible with spm/fsl designers.
NodeHash_214dcae0 = pe.MapNode(interface=modelgen.SpecifyModel(),
                               name='NodeName_214dcae0',
                               iterfield=['functional_runs', 'subject_info'])
NodeHash_214dcae0.inputs.high_pass_filter_cutoff = 0
NodeHash_214dcae0.inputs.input_units = 'secs'
NodeHash_214dcae0.inputs.time_repetition = 2.0

#Generate FEAT specific files
NodeHash_2087a210 = pe.MapNode(interface=fsl.Level1Design(),
                               name='NodeName_2087a210',
                               iterfield=['session_info'])
NodeHash_2087a210.inputs.bases = {'dgamma': {'derivs': False}}
NodeHash_2087a210.inputs.contrasts = [
    ('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1])
]
NodeHash_2087a210.inputs.interscan_interval = 2.0
NodeHash_2087a210.inputs.model_serial_correlations = True

#Wraps command **feat_model**
NodeHash_219c0190 = pe.MapNode(interface=fsl.FEATModel(),
                               name='NodeName_219c0190',
                               iterfield=['ev_files', 'fsf_file'])

#Wraps command **film_gls**
Ejemplo n.º 15
0
def first_level_wf(pipeline, subject_id, task_id, output_dir):
    """
    First level workflow
    """
    workflow = pe.Workflow(name='_'.join((pipeline, subject_id, task_id)))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_preproc', 'contrasts', 'confounds', 'brainmask', 'events_file'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['sigma_pre', 'sigma_post', 'out_stats']),
        name='outputnode')

    conf2movpar = pe.Node(niu.Function(function=_confounds2movpar),
                          name='conf2movpar')
    masker = pe.Node(fsl.ApplyMask(), name='masker')
    bim = pe.Node(afni.BlurInMask(fwhm=5.0, outputtype='NIFTI_GZ'),
                  name='bim',
                  mem_gb=20)

    ev = pe.Node(EventsFilesForTask(task=task_id), name='events')

    l1 = pe.Node(SpecifyModel(
        input_units='secs',
        time_repetition=2,
        high_pass_filter_cutoff=100,
        parameter_source='FSL',
    ),
                 name='l1')

    l1model = pe.Node(fsl.Level1Design(interscan_interval=2,
                                       bases={'dgamma': {
                                           'derivs': True
                                       }},
                                       model_serial_correlations=True),
                      name='l1design')

    l1featmodel = pe.Node(fsl.FEATModel(), name='l1model')
    l1estimate = pe.Node(fsl.FEAT(), name='l1estimate', mem_gb=40)

    pre_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                         detrend=True,
                                         args='-ShowMeClassicFWHM'),
                              name='smooth_pre_afni',
                              mem_gb=20)
    post_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                          detrend=True,
                                          args='-ShowMeClassicFWHM'),
                               name='smooth_post_afni',
                               mem_gb=20)

    pre_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_pre', mem_gb=20)
    post_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_post', mem_gb=20)

    def _resels(val):
        return val**(1 / 3.)

    def _fwhm(fwhm):
        from numpy import mean
        return float(mean(fwhm, dtype=float))

    workflow.connect([
        (inputnode, masker, [('bold_preproc', 'in_file'),
                             ('brainmask', 'mask_file')]),
        (inputnode, ev, [('events_file', 'in_file')]),
        (inputnode, l1model, [('contrasts', 'contrasts')]),
        (inputnode, conf2movpar, [('confounds', 'in_confounds')]),
        (inputnode, bim, [('brainmask', 'mask')]),
        (masker, bim, [('out_file', 'in_file')]),
        (bim, l1, [('out_file', 'functional_runs')]),
        (ev, l1, [('event_files', 'event_files')]),
        (conf2movpar, l1, [('out', 'realignment_parameters')]),
        (l1, l1model, [('session_info', 'session_info')]),
        (ev, l1model, [('orthogonalization', 'orthogonalization')]),
        (l1model, l1featmodel, [('fsf_files', 'fsf_file'),
                                ('ev_files', 'ev_files')]),
        (l1model, l1estimate, [('fsf_files', 'fsf_file')]),
        # Smooth
        (inputnode, pre_smooth, [('bold_preproc', 'zstat_file'),
                                 ('brainmask', 'mask_file')]),
        (bim, post_smooth, [('out_file', 'zstat_file')]),
        (inputnode, post_smooth, [('brainmask', 'mask_file')]),
        (pre_smooth, outputnode, [(('resels', _resels), 'sigma_pre')]),
        (post_smooth, outputnode, [(('resels', _resels), 'sigma_post')]),

        # Smooth with AFNI
        (inputnode, pre_smooth_afni, [('bold_preproc', 'in_file'),
                                      ('brainmask', 'mask')]),
        (bim, post_smooth_afni, [('out_file', 'in_file')]),
        (inputnode, post_smooth_afni, [('brainmask', 'mask')]),
    ])

    # Writing outputs
    csv = pe.Node(AddCSVRow(in_file=str(output_dir / 'smoothness.csv')),
                  name='addcsv_%s_%s' % (subject_id, pipeline))
    csv.inputs.sub_id = subject_id
    csv.inputs.pipeline = pipeline

    # Datasinks
    ds_stats = pe.Node(niu.Function(function=_feat_stats), name='ds_stats')
    ds_stats.inputs.subject_id = subject_id
    ds_stats.inputs.task_id = task_id
    ds_stats.inputs.variant = pipeline
    ds_stats.inputs.out_path = output_dir
    setattr(ds_stats.interface, '_always_run', True)

    workflow.connect([
        (outputnode, csv, [('sigma_pre', 'smooth_pre'),
                           ('sigma_post', 'smooth_post')]),
        (pre_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_pre')]),
        (post_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_post')]),
        (l1estimate, ds_stats, [('feat_dir', 'feat_dir')]),
        (ds_stats, outputnode, [('out', 'out_stats')]),
    ])
    return workflow
"""
#Master Node
modelfit = pe.Workflow(name='modelfit')

#generate design information
modelspec = pe.Node(interface=model.SpecifyModel(
    input_units=input_units,
    time_repetition=TR,
    high_pass_filter_cutoff=hpcutoff),
                    name="modelspec")

#generate a run specific fsf file for analysis
level1design = pe.Node(interface=fsl.Level1Design(
    interscan_interval=TR,
    bases={'dgamma': {
        'derivs': False
    }},
    contrasts=contrasts,
    model_serial_correlations=True),
                       name="level1design")

#generate a run specific mat file for use by FILMGLS
modelgen = pe.MapNode(interface=fsl.FEATModel(),
                      name='modelgen',
                      iterfield=['fsf_file', 'ev_files'])

#estomate Model
modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                 mask_size=5,
                                                 threshold=1000),
                           name='modelestimate',
Ejemplo n.º 17
0
def fsl_run_level_wf(
    model,
    step,
    bids_dir,
    output_dir,
    work_dir,
    subject_id,
    database_path,
    smoothing_fwhm=None,
    smoothing_level=None,
    smoothing_type=None,
    use_rapidart=False,
    detrend_poly=None,
    align_volumes=None,
    smooth_autocorrelations=False,
    despike=False,
    name="fsl_run_level_wf",
):
    """Generate run level workflow for a given model."""
    bids_dir = Path(bids_dir)
    work_dir = Path(work_dir)
    workflow = pe.Workflow(name=name)

    level = step["Level"]

    dimensionality = 3  # Nipype FSL.SUSAN Default
    if smoothing_type == "inp":
        dimensionality = 2

    workflow.__desc__ = ""
    (work_dir / model["Name"]).mkdir(exist_ok=True)

    include_entities = {}
    if "Input" in model:
        if "Include" in model["Input"]:
            include_entities = model["Input"]["Include"]
    include_entities.update({"subject": subject_id})

    getter = pe.Node(
        BIDSGet(
            database_path=database_path,
            fixed_entities=include_entities,
            align_volumes=align_volumes,
        ),
        name="func_select",
    )

    get_info = pe.MapNode(
        GetRunModelInfo(model=step, detrend_poly=detrend_poly),
        iterfield=[
            "metadata_file", "regressor_file", "events_file", "entities"
        ],
        name=f"get_{level}_info",
    )

    despiker = pe.MapNode(
        afni.Despike(outputtype="NIFTI_GZ"),
        iterfield=["in_file"],
        name="despiker",
    )

    realign_runs = pe.MapNode(
        fsl.MCFLIRT(output_type="NIFTI_GZ", interpolation="sinc"),
        iterfield=["in_file", "ref_file"],
        name="func_realign",
    )

    wrangle_volumes = pe.MapNode(
        IdentityInterface(fields=["functional_file"]),
        iterfield=["functional_file"],
        name="wrangle_volumes",
    )

    specify_model = pe.MapNode(
        modelgen.SpecifyModel(high_pass_filter_cutoff=-1.0,
                              input_units="secs"),
        iterfield=["functional_runs", "subject_info", "time_repetition"],
        name=f"model_{level}_specify",
    )

    fit_model = pe.MapNode(
        IdentityInterface(
            fields=[
                "session_info", "interscan_interval", "contrasts",
                "functional_data"
            ],
            mandatory_inputs=True,
        ),
        iterfield=[
            "functional_data", "session_info", "interscan_interval",
            "contrasts"
        ],
        name=f"model_{level}_fit",
    )

    first_level_design = pe.MapNode(
        fsl.Level1Design(
            bases={"dgamma": {
                "derivs": False
            }},
            model_serial_correlations=False,
        ),
        iterfield=["session_info", "interscan_interval", "contrasts"],
        name=f"model_{level}_design",
    )

    generate_model = pe.MapNode(
        fsl.FEATModel(output_type="NIFTI_GZ"),
        iterfield=["fsf_file", "ev_files"],
        name=f"model_{level}_generate",
    )

    estimate_model = pe.MapNode(
        fsl.FILMGLS(
            threshold=0.0,  # smooth_autocorr=True
            output_type="NIFTI_GZ",
            results_dir="results",
            smooth_autocorr=False,
            autocorr_noestimate=True,
        ),
        iterfield=["design_file", "in_file", "tcon_file"],
        name=f"model_{level}_estimate",
    )

    if smooth_autocorrelations:
        first_level_design.inputs.model_serial_correlations = True
        estimate_model.inputs.smooth_autocorr = True
        estimate_model.inputs.autocorr_noestimate = False

    calculate_p = pe.MapNode(
        fsl.ImageMaths(output_type="NIFTI_GZ",
                       op_string="-ztop",
                       suffix="_pval"),
        iterfield=["in_file"],
        name=f"model_{level}_caculate_p",
    )

    image_pattern = ("[sub-{subject}/][ses-{session}/]"
                     "[sub-{subject}_][ses-{session}_]"
                     "task-{task}_[acq-{acquisition}_]"
                     "[rec-{reconstruction}_][run-{run}_]"
                     "[echo-{echo}_][space-{space}_]contrast-{contrast}_"
                     "stat-{stat<effect|variance|z|p|t|F>}_statmap.nii.gz")

    run_rapidart = pe.MapNode(
        ra.ArtifactDetect(
            use_differences=[True, False],
            use_norm=True,
            zintensity_threshold=3,
            norm_threshold=1,
            bound_by_brainmask=True,
            mask_type="file",
            parameter_source="FSL",
        ),
        iterfield=["realignment_parameters", "realigned_files", "mask_file"],
        name="rapidart_run",
    )

    reshape_rapidart = pe.MapNode(
        Function(
            input_names=[
                "run_info", "functional_file", "outlier_file",
                "contrast_entities"
            ],
            output_names=["run_info", "contrast_entities"],
            function=utils.reshape_ra,
        ),
        iterfield=[
            "run_info", "functional_file", "outlier_file", "contrast_entities"
        ],
        name="reshape_rapidart",
    )

    mean_img = pe.MapNode(
        fsl.ImageMaths(output_type="NIFTI_GZ",
                       op_string="-Tmean",
                       suffix="_mean"),
        iterfield=["in_file", "mask_file"],
        name="smooth_susan_avgimg",
    )

    median_img = pe.MapNode(
        fsl.ImageStats(output_type="NIFTI_GZ", op_string="-k %s -p 50"),
        iterfield=["in_file", "mask_file"],
        name="smooth_susan_medimg",
    )

    merge = pe.Node(Merge(2, axis="hstack"), name="smooth_merge")

    run_susan = pe.MapNode(
        fsl.SUSAN(output_type="NIFTI_GZ"),
        iterfield=["in_file", "brightness_threshold", "usans"],
        name="smooth_susan",
    )

    mask_functional = pe.MapNode(ApplyMask(),
                                 iterfield=["in_file", "mask_file"],
                                 name="mask_functional")

    # Exists solely to correct undesirable behavior of FSL
    # that results in loss of constant columns
    correct_matrices = pe.MapNode(
        Function(
            input_names=["design_matrix"],
            output_names=["design_matrix"],
            function=utils.correct_matrix,
        ),
        iterfield=["design_matrix"],
        run_without_submitting=True,
        name=f"correct_{level}_matrices",
    )

    collate = pe.Node(
        MergeAll(
            fields=[
                "effect_maps",
                "variance_maps",
                "zscore_maps",
                "pvalue_maps",
                "tstat_maps",
                "contrast_metadata",
            ],
            check_lengths=True,
        ),
        name=f"collate_{level}",
    )

    collate_outputs = pe.Node(
        CollateWithMetadata(
            fields=[
                "effect_maps", "variance_maps", "zscore_maps", "pvalue_maps",
                "tstat_maps"
            ],
            field_to_metadata_map={
                "effect_maps": {
                    "stat": "effect"
                },
                "variance_maps": {
                    "stat": "variance"
                },
                "zscore_maps": {
                    "stat": "z"
                },
                "pvalue_maps": {
                    "stat": "p"
                },
                "tstat_maps": {
                    "stat": "t"
                },
            },
        ),
        name=f"collate_{level}_outputs",
    )

    plot_matrices = pe.MapNode(
        PlotMatrices(output_dir=output_dir, database_path=database_path),
        iterfield=["mat_file", "con_file", "entities", "run_info"],
        run_without_submitting=True,
        name=f"plot_{level}_matrices",
    )

    ds_contrast_maps = pe.MapNode(
        BIDSDataSink(base_directory=output_dir, path_patterns=image_pattern),
        iterfield=["entities", "in_file"],
        run_without_submitting=True,
        name=f"ds_{level}_contrast_maps",
    )

    wrangle_outputs = pe.Node(
        IdentityInterface(fields=["contrast_metadata", "contrast_maps"]),
        name=f"wrangle_{level}_outputs",
    )

    # Setup connections among nodes
    workflow.connect([(
        getter,
        get_info,
        [
            ("metadata_files", "metadata_file"),
            ("events_files", "events_file"),
            ("regressor_files", "regressor_file"),
            ("entities", "entities"),
        ],
    )])

    if align_volumes and despike:
        workflow.connect([
            (getter, despiker, [("functional_files", "in_file")]),
            (despiker, realign_runs, [("out_file", "in_file")]),
            (getter, realign_runs, [("reference_files", "ref_file")]),
            (
                realign_runs,
                wrangle_volumes,
                [("out_file", "functional_file")],
            ),
        ])
    elif align_volumes and not despike:
        workflow.connect([
            (
                getter,
                realign_runs,
                [("functional_files", "in_file"),
                 ("reference_files", "ref_file")],
            ),
            (
                realign_runs,
                wrangle_volumes,
                [("out_file", "functional_file")],
            ),
        ])
    elif despike:
        workflow.connect([
            (getter, despiker, [("functional_files", "in_file")]),
            (despiker, wrangle_volumes, [("out_file", "functional_file")]),
        ])
    else:
        workflow.connect([(getter, wrangle_volumes, [("functional_files",
                                                      "functional_file")])])

    if use_rapidart:
        workflow.connect([
            (get_info, run_rapidart, [("motion_parameters",
                                       "realignment_parameters")]),
            (getter, run_rapidart, [("mask_files", "mask_file")]),
            (
                wrangle_volumes,
                run_rapidart,
                [("functional_file", "realigned_files")],
            ),
            (
                run_rapidart,
                reshape_rapidart,
                [("outlier_files", "outlier_file")],
            ),
            (
                get_info,
                reshape_rapidart,
                [("run_info", "run_info"),
                 ("contrast_entities", "contrast_entities")],
            ),
            (wrangle_volumes, reshape_rapidart, [("functional_file",
                                                  "functional_file")]),
            (
                reshape_rapidart,
                specify_model,
                [("run_info", "subject_info")],
            ),
            (reshape_rapidart, plot_matrices, [("run_info", "run_info")]),
            (reshape_rapidart, collate, [("contrast_entities",
                                          "contrast_metadata")]),
        ])
    else:
        workflow.connect([
            (get_info, specify_model, [("run_info", "subject_info")]),
            (get_info, plot_matrices, [("run_info", "run_info")]),
            (
                get_info,
                collate,
                [("contrast_entities", "contrast_metadata")],
            ),
        ])

    if smoothing_level == "l1" or smoothing_level == "run":
        run_susan.inputs.fwhm = smoothing_fwhm
        run_susan.inputs.dimension = dimensionality
        estimate_model.inputs.mask_size = smoothing_fwhm
        workflow.connect([
            (wrangle_volumes, mean_img, [("functional_file", "in_file")]),
            (
                wrangle_volumes,
                median_img,
                [("functional_file", "in_file")],
            ),
            (getter, mean_img, [("mask_files", "mask_file")]),
            (getter, median_img, [("mask_files", "mask_file")]),
            (mean_img, merge, [("out_file", "in1")]),
            (median_img, merge, [("out_stat", "in2")]),
            (wrangle_volumes, run_susan, [("functional_file", "in_file")]),
            (
                median_img,
                run_susan,
                [(
                    ("out_stat", utils.get_btthresh),
                    "brightness_threshold",
                )],
            ),
            (merge, run_susan, [(("out", utils.get_usans), "usans")]),
            (getter, mask_functional, [("mask_files", "mask_file")]),
            (run_susan, mask_functional, [("smoothed_file", "in_file")]),
            (
                mask_functional,
                specify_model,
                [("out_file", "functional_runs")],
            ),
            (
                mask_functional,
                fit_model,
                [("out_file", "functional_data")],
            ),
        ])

    else:
        workflow.connect([
            (getter, mask_functional, [("mask_files", "mask_file")]),
            (
                wrangle_volumes,
                mask_functional,
                [("functional_file", "in_file")],
            ),
            (
                mask_functional,
                specify_model,
                [("out_file", "functional_runs")],
            ),
            (
                mask_functional,
                fit_model,
                [("out_file", "functional_data")],
            ),
        ])

    workflow.connect([
        (
            get_info,
            specify_model,
            [("repetition_time", "time_repetition")],
        ),
        (specify_model, fit_model, [("session_info", "session_info")]),
        (
            get_info,
            fit_model,
            [("repetition_time", "interscan_interval"),
             ("run_contrasts", "contrasts")],
        ),
        (
            fit_model,
            first_level_design,
            [
                ("interscan_interval", "interscan_interval"),
                ("session_info", "session_info"),
                ("contrasts", "contrasts"),
            ],
        ),
        (first_level_design, generate_model, [("fsf_files", "fsf_file")]),
        (first_level_design, generate_model, [("ev_files", "ev_files")]),
    ])

    if detrend_poly:
        workflow.connect([
            (
                generate_model,
                correct_matrices,
                [("design_file", "design_matrix")],
            ),
            (
                correct_matrices,
                plot_matrices,
                [("design_matrix", "mat_file")],
            ),
            (
                correct_matrices,
                estimate_model,
                [("design_matrix", "design_file")],
            ),
        ])

    else:
        workflow.connect([
            (generate_model, plot_matrices, [("design_file", "mat_file")]),
            (
                generate_model,
                estimate_model,
                [("design_file", "design_file")],
            ),
        ])

    workflow.connect([
        (getter, plot_matrices, [("entities", "entities")]),
        (generate_model, plot_matrices, [("con_file", "con_file")]),
        (fit_model, estimate_model, [("functional_data", "in_file")]),
        (generate_model, estimate_model, [("con_file", "tcon_file")]),
        (
            estimate_model,
            calculate_p,
            [(("zstats", utils.flatten), "in_file")],
        ),
        (
            estimate_model,
            collate,
            [
                ("copes", "effect_maps"),
                ("varcopes", "variance_maps"),
                ("zstats", "zscore_maps"),
                ("tstats", "tstat_maps"),
            ],
        ),
        (calculate_p, collate, [("out_file", "pvalue_maps")]),
        (
            collate,
            collate_outputs,
            [
                ("effect_maps", "effect_maps"),
                ("variance_maps", "variance_maps"),
                ("zscore_maps", "zscore_maps"),
                ("pvalue_maps", "pvalue_maps"),
                ("tstat_maps", "tstat_maps"),
                ("contrast_metadata", "metadata"),
            ],
        ),
        (
            collate_outputs,
            ds_contrast_maps,
            [("out", "in_file"), ("metadata", "entities")],
        ),
        (
            collate_outputs,
            wrangle_outputs,
            [("metadata", "contrast_metadata"), ("out", "contrast_maps")],
        ),
    ])

    return workflow
Ejemplo n.º 18
0
def model_fitting(source_img, prepped_img, subject_info, aroma, task, args,
                  mask_file, run_number):
    # Get the necessary parameters
    outputdir = args.outputdir
    fwhm = args.fwhm
    cthresh = args.cthresh
    alpha = args.alpha

    # Make a task directory in the output folder
    if run_number > 0:
        taskdir = os.path.join(outputdir,
                               task + "_run-0" + str(run_number + 1))
    else:
        taskdir = os.path.join(outputdir, task)

    if not os.path.exists(taskdir):
        os.mkdir(taskdir)
    os.mkdir(os.path.join(taskdir, 'stats'))
    os.mkdir(os.path.join(taskdir, 'figs'))

    processed_image = preprocess(aroma, fwhm, prepped_img, mask_file, taskdir,
                                 task)

    task_vs_baseline = [
        task + " vs baseline", 'T', [task, 'baseline'], [1, -1]
    ]  # set up contrasts
    contrasts = [task_vs_baseline]
    """
    Model fitting workflow

    Inputs::
         inputspec.session_info : info generated by modelgen.SpecifyModel
         inputspec.interscan_interval : interscan interval
         inputspec.contrasts : list of contrasts
         inputspec.film_threshold : image threshold for FILM estimation
         inputspec.model_serial_correlations
         inputspec.bases
    Outputs::
         outputspec.copes
         outputspec.varcopes
         outputspec.dof_file
         outputspec.zfiles
         outputspec.parameter_estimates
    """

    modelfit = pe.Workflow(name='modelfit', base_dir=taskdir)
    modelspec = pe.Node(interface=model.SpecifyModel(),
                        name="modelspec")  # generate design info
    inputspec = pe.Node(util.IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ]),
                        name='inputspec')
    level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")
    modelgen = pe.MapNode(interface=fsl.FEATModel(),
                          name='modelgen',
                          iterfield=['fsf_file', 'ev_files'])
    modelestimate = pe.MapNode(
        interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5),
        name='modelestimate',
        iterfield=['design_file', 'in_file', 'tcon_file'])
    merge_contrasts = pe.MapNode(interface=util.Merge(2),
                                 name='merge_contrasts',
                                 iterfield=['in1'])
    outputspec = pe.Node(util.IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'zfiles', 'parameter_estimates'
    ]),
                         name='outputspec')

    modelfit.connect([
        (modelspec, inputspec, [('session_info', 'session_info')]),
        (inputspec, level1design,
         [('interscan_interval', 'interscan_interval'),
          ('session_info', 'session_info'), ('contrasts', 'contrasts'),
          ('bases', 'bases'),
          ('model_serial_correlations', 'model_serial_correlations')]),
        (inputspec, modelestimate, [('film_threshold', 'threshold'),
                                    ('functional_data', 'in_file')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                  ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),
        (merge_contrasts, outputspec, [('out', 'zfiles')]),
        (modelestimate, outputspec, [('param_estimates',
                                      'parameter_estimates'),
                                     ('dof_file', 'dof_file')]),
    ])

    modelfit.connect([
        (modelgen, modelestimate, [('con_file', 'tcon_file'),
                                   ('fcon_file', 'fcon_file')]),
        (modelestimate, merge_contrasts, [('zstats', 'in1'),
                                          ('zfstats', 'in2')]),
        (modelestimate, outputspec, [('copes', 'copes'),
                                     ('varcopes', 'varcopes')]),
    ])

    # Define inputs to workflow
    modelspec.inputs.functional_runs = processed_image
    inputspec.inputs.functional_data = processed_image
    modelspec.inputs.subject_info = subject_info
    modelspec.inputs.input_units = 'secs'
    modelspec.inputs.time_repetition = source_img.entities['RepetitionTime']
    modelspec.inputs.high_pass_filter_cutoff = 90
    inputspec.inputs.model_serial_correlations = True
    inputspec.inputs.film_threshold = 10.0
    inputspec.inputs.interscan_interval = source_img.entities['RepetitionTime']
    inputspec.inputs.bases = {
        'gamma': {
            'gammasigma': 3,
            'gammadelay': 6,
            'derivs': True
        }
    }
    inputspec.inputs.contrasts = contrasts

    # Run the model-fitting pipeline. Main outputs are a feat directory (w/ functional img) and a design.mat file
    res = modelfit.run()

    # outputs
    output_txt = open(os.path.join(taskdir, task + '_outputs.txt'), 'w')
    print_outputs(output_txt, res)

    # The third node, FILM's, first element (i.e. only element) of its 'zstats' output
    z_img = list(res.nodes)[2].result.outputs.zstats[0]

    # Use False Discovery Rate theory to correct for multiple comparisons
    fdr_thresh_img, fdr_threshold = thresholding.map_threshold(
        stat_img=z_img,
        mask_img=mask_file,
        alpha=alpha,
        height_control='fdr',
        cluster_threshold=cthresh)
    print("Thresholding at FDR corrected threshold of " + str(fdr_threshold))
    fdr_thresh_img_path = os.path.join(taskdir,
                                       task + '_fdr_thresholded_z.nii.gz')
    nibabel.save(fdr_thresh_img, fdr_thresh_img_path)

    # Do a cluster analysis using the FDR corrected threshold on the original z_img
    print("Performing cluster analysis.")
    cl = fsl.Cluster(in_file=z_img, threshold=fdr_threshold)
    cluster_file = os.path.join(taskdir, 'stats', task + "_cluster_stats.txt")
    cluster_analysis(cluster_file, cl)

    # Resample the result image with AFNI
    resample_fdr_thresh_img_path = os.path.join(
        taskdir, task + '_fdr_thresholded_z_resample.nii.gz')
    print("Resampling thresholded image to MNI space")
    resample = afni.Resample(master=template,
                             out_file=resample_fdr_thresh_img_path,
                             in_file=fdr_thresh_img_path)
    resample.run()
    os.remove(fdr_thresh_img_path)

    print("Image to be returned: " + resample_fdr_thresh_img_path)

    return resample_fdr_thresh_img_path
def create_nuisance_modelfit_workflow(name='modelfit', f_contrasts=False):
    """
    Create an FSL  modelfitting workflow that returns also
    residual4d and sigmasquareds.

    Example
    -------

#    >>> modelfit = create_modelfit_workflow()
#    >>> modelfit.base_dir = '.'
#    >>> info = dict()
#    >>> modelfit.inputs.inputspec.session_info = info
#    >>> modelfit.inputs.inputspec.interscan_interval = 3.
#    >>> modelfit.inputs.inputspec.film_threshold = 1000
#    >>> modelfit.run() #doctest: +SKIP

    Inputs::

         inputspec.session_info : info generated by modelgen.SpecifyModel
         inputspec.interscan_interval : interscan interval
         inputspec.contrasts : list of contrasts
         inputspec.film_threshold : image threshold for FILM estimation
         inputspec.model_serial_correlations
         inputspec.bases

    Outputs::

         outputspec.copes
         outputspec.varcopes
         outputspec.dof_file
         outputspec.pfiles
         outputspec.zfiles
         outputspec.parameter_estimates
         outputspec.residual4d
         outputspec.sigmasquareds

    """

    version = 0
    if fsl.Info.version() and \
                    LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    modelfit = pe.Workflow(name=name)
    """
    Create the nodes
    """

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ]),
                        name='inputspec')
    level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")
    modelgen = pe.MapNode(interface=fsl.FEATModel(),
                          name='modelgen',
                          iterfield=['fsf_file', 'ev_files'])
    if version < 507:
        modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                         mask_size=5),
                                   name='modelestimate',
                                   iterfield=['design_file', 'in_file'])
    else:
        if f_contrasts:
            iterfield = ['design_file', 'in_file', 'tcon_file', 'fcon_file']
        else:
            iterfield = ['design_file', 'in_file', 'tcon_file']
        modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                         mask_size=5),
                                   name='modelestimate',
                                   iterfield=iterfield)

    if version < 507:
        if f_contrasts:
            iterfield = [
                'tcon_file', 'fcon_file', 'param_estimates', 'sigmasquareds',
                'corrections', 'dof_file'
            ]
        else:
            iterfield = [
                'tcon_file', 'param_estimates', 'sigmasquareds', 'corrections',
                'dof_file'
            ]
        conestimate = pe.MapNode(interface=fsl.ContrastMgr(),
                                 name='conestimate',
                                 iterfield=[
                                     'tcon_file', 'fcon_file',
                                     'param_estimates', 'sigmasquareds',
                                     'corrections', 'dof_file'
                                 ])

    if f_contrasts:
        iterfield = ['in1', 'in2']
    else:
        iterfield = ['in1']
    merge_contrasts = pe.MapNode(interface=util.Merge(2),
                                 name='merge_contrasts',
                                 iterfield=iterfield)

    ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop',
                                                  suffix='_pval'),
                         nested=True,
                         name='ztop',
                         iterfield=['in_file'])

    outputspec = pe.Node(util.IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'pfiles', 'zfiles',
        'parameter_estimates', 'residual4d', 'sigmasquareds'
    ]),
                         name='outputspec')
    """
    Setup the connections
    """

    modelfit.connect([
        (inputspec, level1design,
         [('interscan_interval', 'interscan_interval'),
          ('session_info', 'session_info'), ('contrasts', 'contrasts'),
          ('bases', 'bases'),
          ('model_serial_correlations', 'model_serial_correlations')]),
        (inputspec, modelestimate, [('film_threshold', 'threshold'),
                                    ('functional_data', 'in_file')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                  ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),

        # connect also residual4d and sigmasquared
        (modelestimate, outputspec, [('param_estimates',
                                      'parameter_estimates'),
                                     ('dof_file', 'dof_file'),
                                     ('residual4d', 'residual4d'),
                                     ('sigmasquareds', 'sigmasquareds')]),
    ])
    if version < 507:
        modelfit.connect([
            (modelgen, conestimate, [('con_file', 'tcon_file'),
                                     ('fcon_file', 'fcon_file')]),
            (modelestimate, conestimate, [('param_estimates',
                                           'param_estimates'),
                                          ('sigmasquareds', 'sigmasquareds'),
                                          ('corrections', 'corrections'),
                                          ('dof_file', 'dof_file')]),
            (conestimate, outputspec, [('copes', 'copes'),
                                       ('varcopes', 'varcopes')]),
        ])
    else:
        modelfit.connect([
            (modelgen, modelestimate, [('con_file', 'tcon_file'),
                                       ('fcon_file', 'fcon_file')]),
            (modelestimate, outputspec, [('copes', 'copes'),
                                         ('varcopes', 'varcopes')]),
        ])
    return modelfit
Ejemplo n.º 20
0
#Wraps command **fslmaths**
NodeHash_43b01b0 = pe.Node(interface=fsl.ApplyMask(), name='NodeName_43b01b0')

#Custom interface wrapping function Tsv2subjectinfo
NodeHash_3042f20 = pe.Node(interface=firstlevelhelpers.Tsv2subjectinfo,
                           name='NodeName_3042f20')

#Makes a model specification compatible with spm/fsl designers.
NodeHash_6bef320 = pe.Node(interface=modelgen.SpecifyModel(),
                           name='NodeName_6bef320')
NodeHash_6bef320.inputs.high_pass_filter_cutoff = 0
NodeHash_6bef320.inputs.input_units = 'secs'
NodeHash_6bef320.inputs.time_repetition = 2.0

#Generate FEAT specific files
NodeHash_8241250 = pe.Node(interface=fsl.Level1Design(),
                           name='NodeName_8241250')
NodeHash_8241250.inputs.bases = {'dgamma': {'derivs': False}}
NodeHash_8241250.inputs.contrasts = [
    ('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1])
]
NodeHash_8241250.inputs.interscan_interval = 2.0
NodeHash_8241250.inputs.model_serial_correlations = True

#Wraps command **feat_model**
NodeHash_8b12580 = pe.Node(interface=fsl.FEATModel(), name='NodeName_8b12580')

#Wraps command **film_gls**
NodeHash_5015c80 = pe.Node(interface=fsl.FILMGLS(), name='NodeName_5015c80')

#Generic datasink module to store structured outputs
Ejemplo n.º 21
0
def first_level_wf(in_files, output_dir, fwhm=6.0, name='wf_1st_level'):
    workflow = pe.Workflow(name=name)
    datasource = pe.Node(niu.Function(function=_dict_ds,
                                      output_names=DATA_ITEMS),
                         name='datasource')
    datasource.inputs.in_dict = in_files
    datasource.iterables = ('sub', sorted(in_files.keys()))

    # Extract motion parameters from regressors file
    runinfo = pe.Node(niu.Function(input_names=[
        'in_file', 'events_file', 'regressors_file', 'regressors_names'
    ],
                                   function=_bids2nipypeinfo,
                                   output_names=['info', 'realign_file']),
                      name='runinfo')

    # Set the column names to be used from the confounds file
    runinfo.inputs.regressors_names = ['dvars', 'framewise_displacement'] + \
        ['a_comp_cor_%02d' % i for i in range(6)] + ['cosine%02d' % i for i in range(4)]

    # SUSAN smoothing
    susan = create_susan_smooth()
    susan.inputs.inputnode.fwhm = fwhm

    l1_spec = pe.Node(SpecifyModel(parameter_source='FSL',
                                   input_units='secs',
                                   high_pass_filter_cutoff=100),
                      name='l1_spec')

    # l1_model creates a first-level model design
    l1_model = pe.Node(
        fsl.Level1Design(
            bases={'dgamma': {
                'derivs': True
            }},
            model_serial_correlations=True,
            #ENTER YOUR OWN CONTRAST HERE
            contrasts=[],
            # orthogonalization=orthogonality,
        ),
        name='l1_model')

    # feat_spec generates an fsf model specification file
    feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec')
    # feat_fit actually runs FEAT
    feat_fit = pe.Node(fsl.FEAT(), name='feat_fit', mem_gb=12)

    feat_select = pe.Node(nio.SelectFiles({
        'cope': 'stats/cope1.nii.gz',
        'pe': 'stats/pe[0-9][0-9].nii.gz',
        'tstat': 'stats/tstat1.nii.gz',
        'varcope': 'stats/varcope1.nii.gz',
        'zstat': 'stats/zstat1.nii.gz',
    }),
                          name='feat_select')

    ds_cope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                          keep_dtype=False,
                                          suffix='cope',
                                          desc='intask'),
                      name='ds_cope',
                      run_without_submitting=True)

    ds_varcope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                             keep_dtype=False,
                                             suffix='varcope',
                                             desc='intask'),
                         name='ds_varcope',
                         run_without_submitting=True)

    ds_zstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                           keep_dtype=False,
                                           suffix='zstat',
                                           desc='intask'),
                       name='ds_zstat',
                       run_without_submitting=True)

    ds_tstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                           keep_dtype=False,
                                           suffix='tstat',
                                           desc='intask'),
                       name='ds_tstat',
                       run_without_submitting=True)

    workflow.connect([
        (datasource, susan, [('bold', 'inputnode.in_files'),
                             ('mask', 'inputnode.mask_file')]),
        (datasource, runinfo, [('events', 'events_file'),
                               ('regressors', 'regressors_file')]),
        (susan, l1_spec, [('outputnode.smoothed_files', 'functional_runs')]),
        (datasource, l1_spec, [('tr', 'time_repetition')]),
        (datasource, l1_model, [('tr', 'interscan_interval')]),
        (datasource, ds_cope, [('bold', 'source_file')]),
        (datasource, ds_varcope, [('bold', 'source_file')]),
        (datasource, ds_zstat, [('bold', 'source_file')]),
        (datasource, ds_tstat, [('bold', 'source_file')]),
        (susan, runinfo, [('outputnode.smoothed_files', 'in_file')]),
        (runinfo, l1_spec, [('info', 'subject_info'),
                            ('realign_file', 'realignment_parameters')]),
        (l1_spec, l1_model, [('session_info', 'session_info')]),
        (l1_model, feat_spec, [('fsf_files', 'fsf_file'),
                               ('ev_files', 'ev_files')]),
        (l1_model, feat_fit, [('fsf_files', 'fsf_file')]),
        (feat_fit, feat_select, [('feat_dir', 'base_directory')]),
        (feat_select, ds_cope, [('cope', 'in_file')]),
        (feat_select, ds_varcope, [('varcope', 'in_file')]),
        (feat_select, ds_zstat, [('zstat', 'in_file')]),
        (feat_select, ds_tstat, [('tstat', 'in_file')]),
    ])
    return workflow
Ejemplo n.º 22
0
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

#Set up model fitting workflow (we assume data has been preprocessed with fmriprep)
modelfit = pe.Workflow(name='modelfit')

#Custom interface wrapping function Tsv2subjectinfo
tsv2subjinfo = pe.MapNode(util.Function(
    function=utils.tsv2subjectinfo,
    input_names=['events_file', 'exclude', 'confounds_file', 'trim_indices'],
    output_names=['subject_info']),
                          name="tsv2subjinfo",
                          iterfield=['events_file', 'confounds_file'])
modelspec = pe.MapNode(interface=model.SpecifyModel(),
                       name="modelspec",
                       iterfield=['subject_info'])
level1design = pe.MapNode(interface=fsl.Level1Design(),
                          name="level1design",
                          iterfield=['session_info'])
modelgen = pe.MapNode(interface=fsl.FEATModel(),
                      name='modelgen',
                      iterfield=["fsf_file", "ev_files"])

trim = pe.MapNode(interface=Trim(), name="trim", iterfield=['in_file'])
applymask = pe.MapNode(interface=fsl.ApplyMask(),
                       name="applymask",
                       iterfield=["in_file", "mask_file"])

modelestimate = pe.MapNode(interface=fsl.FILMGLS(),
                           name='modelestimate',
                           iterfield=['design_file', 'in_file', 'tcon_file'])
Ejemplo n.º 23
0
Use :class:`nipype.algorithms.modelgen.SpecifyModel` to generate design information.
"""

modelspec = pe.Node(interface=model.SpecifyModel(),                  
                    name="modelspec")

modelspec.inputs.input_units = 'secs'
modelspec.inputs.time_repetition = tr
modelspec.inputs.high_pass_filter_cutoff= 120
"""
Use :class:`nipype.interfaces.fsl.Level1Design` to generate a run specific fsf
file for analysis
"""

## Building contrasts
level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")
cont1 = ['Trauma1_0>Sad1_0', 'T', ['trauma1_0', 'sad1_0'], [1, -1]]
cont2 = ['Trauma1_0>Relax1_0', 'T', ['trauma1_0', 'relax1_0'], [1, -1]]
cont3 = ['Sad1_0>Relax1_0', 'T', ['sad1_0', 'relax1_0'], [1, -1]]
cont4 = ['trauma1_0 > trauma2_0', 'T', ['trauma1_0', 'trauma2_0'], [1, -1]]
cont5 = ['Trauma1_0>Trauma1_2_3', 'T', ['trauma1_0', 'trauma1_2','trauma1_3'], [1, -0.5, -0.5]]
cont6 = ['Trauma1 > Trauma2', 'T', ['trauma1_0', 'trauma1_1', 'trauma1_2', 'trauma1_3', 'trauma2_0', 'trauma2_1', 'trauma2_2', 'trauma2_3'], [0.25, 0.25, 0.25, 0.25, -0.25, -0.25, -0.25, -0.25 ]]
cont7 = ['Trauma1_01>relax1_01', 'T', ['trauma1_0', 'trauma1_1', 'relax1_0', 'relax1_1'], [0.5,0.5,-0.5,-0.5]]
contrasts = [cont1, cont2, cont3, cont4, cont5, cont6, cont7]


level1design.inputs.interscan_interval = tr
level1design.inputs.bases = {'dgamma': {'derivs': False}}
level1design.inputs.contrasts = contrasts
level1design.inputs.model_serial_correlations = True    
"""
#
l1_spec = pe.Node(SpecifyModel(
    parameter_source='FSL',
    input_units='secs',
    high_pass_filter_cutoff=120,
    time_repetition=tr,
),
                  name='l1_spec')

# l1_model creates a first-level model design
l1_model = pe.Node(
    fsl.Level1Design(
        bases={'dgamma': {
            'derivs': True
        }},  # adding temporal derivative of double gamma
        model_serial_correlations=True,
        interscan_interval=tr,
        contrasts=contrasts
        # orthogonalization=orthogonality,
    ),
    name='l1_model')

# feat_spec generates an fsf model specification file
feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec')

# feat_fit actually runs FEAT
feat_fit = pe.Node(fsl.FEAT(), name='feat_fit', mem_gb=5)

# instead of FEAT
# modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
#                                                 mask_size=5,