예제 #1
0
def index_lesion_workflow(msid, mseid, lesion):
    import nipype.interfaces.ants as ants
    from nipype.pipeline.engine import Node, Workflow, MapNode
    from nipype.interfaces.io import DataSink, DataGrabber
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.fsl as fsl
    from nipype.utils.filemanip import load_json

    working_directory = '/working/henry_temp/keshavan/'
    output_directory = os.path.split(lesion)[0]

    register = Workflow(name="indexed_lesion_{0}_{1}".format(msid, mseid))
    register.base_dir = working_directory
    inputnode = Node(IdentityInterface(fields=["lesion"]), name="inputspec")
    inputnode.inputs.lesion = lesion

    bin_math = Node(fsl.BinaryMaths(), name="Convert_to_binary")
    bin_math.inputs.operand_value = 1
    bin_math.inputs.operation = 'min'
    register.connect(inputnode, "lesion", bin_math, "in_file")

    cluster_lesion = Node(fsl.Cluster(threshold=0.0001,
                                      out_index_file=True,
                                      use_mm=True),
                          name="cluster_lesion")

    sinker = Node(DataSink(), name="sinker")
    sinker.inputs.base_directory = output_directory
    sinker.inputs.container = '.'
    sinker.inputs.substitutions = [('_maths', '')]

    register.connect(bin_math, "out_file", cluster_lesion, "in_file")
    register.connect(cluster_lesion, "index_file", sinker, "@cluster")

    from nipype.interfaces.freesurfer import SegStats
    segstats_lesion = Node(SegStats(), name="segstats_lesion")
    register.connect(cluster_lesion, "index_file", segstats_lesion,
                     "segmentation_file")
    register.connect(segstats_lesion, "summary_file", sinker, "@summaryfile")

    register.write_graph(graph2use='orig')
    register.config["Execution"] = {
        "keep_inputs": True,
        "remove_unnecessary_outputs": False
    }
    return register
예제 #2
0
def cluster_zstats(zstat, volume, dlh):

    import nipype.interfaces.fsl as fsl
    import os

    study_mask = '/Volumes/Amr_1TB/NARPS/narps_templateBrainExtractionMask.nii.gz'

    cope_no = zstat[
        -8]  #number of the zstat file after taking into account .nii.gz

    if zstat[-36:-32] == 'gain':
        cope = '/media/amr/Amr_4TB/NARPS/output_narps_proc_3rd_level/gain_stat_flameo_neg/+/cope{0}.nii.gz'.format(
            cope_no)
    elif zstat[-36:-32] == 'loss':
        cope = '/media/amr/Amr_4TB/NARPS/output_narps_proc_3rd_level/loss_stat_flameo_neg/+/cope{0}.nii.gz'.format(
            cope_no)

    #mask here not in a seperate node, because I need the original zstat to get the number
    mask_zstat = fsl.ApplyMask()
    mask_zstat.inputs.in_file = zstat
    mask_zstat.inputs.mask_file = study_mask
    mask_zstat_outputs = mask_zstat.run()
    masked_zstat = mask_zstat_outputs.outputs.out_file

    cluster_zstats = fsl.Cluster()

    cluster_zstats.inputs.in_file = masked_zstat
    cluster_zstats.inputs.cope_file = cope
    cluster_zstats.inputs.threshold = 3.1
    cluster_zstats.inputs.pthreshold = 0.001
    cluster_zstats.inputs.connectivity = 26
    cluster_zstats.inputs.volume = volume
    cluster_zstats.inputs.dlh = dlh

    cluster_zstats.inputs.out_threshold_file = 'thresh_zstat.nii.gz'
    cluster_zstats.inputs.out_index_file = 'cluster_mask_zstat.nii.gz'
    cluster_zstats.inputs.out_localmax_txt_file = 'lmax_zstat_std.txt'
    cluster_zstats.inputs.use_mm = True
    print(cluster_zstats.cmdline)

    cluster_zstats_outputs = cluster_zstats.run()

    threshold_file = cluster_zstats_outputs.outputs.threshold_file

    return masked_zstat, threshold_file
예제 #3
0
def cluster_image(name="threshold_cluster_makeimages"):
    from nipype.interfaces import fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    workflow = pe.Workflow(name=name)
    inputspec = pe.Node(util.IdentityInterface(fields=["zstat","mask","zthreshold","pthreshold","connectivity",'anatomical']),name="inputspec")
    smoothest = pe.MapNode(fsl.SmoothEstimate(), name='smooth_estimate', iterfield=['zstat_file'])
    workflow.connect(inputspec,'zstat', smoothest, 'zstat_file')
    workflow.connect(inputspec,'mask',smoothest, 'mask_file')

    cluster = pe.MapNode(fsl.Cluster(out_localmax_txt_file=True,
                                     out_index_file=True,
                                     out_localmax_vol_file=True), 
                         name='cluster', iterfield=['in_file','dlh','volume'])
    workflow.connect(smoothest,'dlh', cluster, 'dlh')
    workflow.connect(smoothest, 'volume', cluster, 'volume')
    workflow.connect(inputspec,"zthreshold",cluster,"threshold")
    workflow.connect(inputspec,"pthreshold",cluster,"pthreshold")
    workflow.connect(inputspec,"connectivity",cluster,"connectivity")
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_pval_file = True
    workflow.connect(inputspec,'zstat',cluster,'in_file')
    """
    labels = pe.MapNode(util.Function(input_names=['in_file','thr','csize'],
                                   output_names=['labels'],function=get_labels),
        name='labels',iterfield=["in_file"])

    workflow.connect(inputspec,"zthreshold",labels,"thr")
    workflow.connect(inputspec,"connectivity",labels,"csize")
    workflow.connect(cluster,"threshold_file",labels,"in_file")
    showslice=pe.MapNode(util.Function(input_names=['image_in','anat_file','coordinates','thr'],
                                    output_names=["outfiles"],function=show_slices),
              name='showslice',iterfield=["image_in","coordinates"])

    coords = pe.MapNode(util.Function(input_names=["in_file","img"],
                                   output_names=["coords"],
                                   function=get_coords2),
        name='getcoords', iterfield=["in_file","img"])

    workflow.connect(cluster,'threshold_file',showslice,'image_in')
    workflow.connect(inputspec,'anatomical',showslice,"anat_file")
    workflow.connect(inputspec,'zthreshold',showslice,'thr')
    workflow.connect(labels,'labels',coords,"img")
    workflow.connect(cluster,"threshold_file",coords,"in_file")
    workflow.connect(coords,"coords",showslice,"coordinates")

    overlay = pe.MapNode(util.Function(input_names=["stat_image",
                                                 "background_image",
                                                 "threshold"],
                                       output_names=["fnames"],function=overlay_new),
                         name='overlay', iterfield=["stat_image"])
    workflow.connect(inputspec,"anatomical", overlay,"background_image")
    workflow.connect(cluster,"threshold_file",overlay,"stat_image")
    workflow.connect(inputspec,"zthreshold",overlay,"threshold")
    """
    outputspec = pe.Node(util.IdentityInterface(fields=["corrected_z","localmax_txt","index_file","localmax_vol","slices","cuts","corrected_p"]),name='outputspec')
    workflow.connect(cluster,'threshold_file',outputspec,'corrected_z')
    workflow.connect(cluster,'index_file',outputspec,'index_file')
    workflow.connect(cluster,'localmax_vol_file',outputspec,'localmax_vol')
    #workflow.connect(showslice,"outfiles",outputspec,"slices")
    #workflow.connect(overlay,"fnames",outputspec,"cuts")
    workflow.connect(cluster,'localmax_txt_file',outputspec,'localmax_txt')
    return workflow
예제 #4
0
def cluster_image2(name="threshold_cluster_makeimages"):
    from nipype.interfaces import fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    workflow = pe.Workflow(name=name)
    inputspec = pe.Node(util.IdentityInterface(fields=["pstat","mask","threshold","min_cluster_size",'anatomical']),name="inputspec")

    do_fdr = pe.MapNode(util.Function(input_names=['in_file','mask_file','pthresh'],
                                      output_names=['qstat','qthresh','qrate'],
                                      function=fdr),name='do_fdr',iterfield=['in_file'])

    cluster = pe.MapNode(fsl.Cluster(out_localmax_txt_file=True,
                                     out_index_file=True,
                                     out_localmax_vol_file=True), 
                         name='cluster', iterfield=['in_file','threshold'])

    workflow.connect(inputspec,'pstat',do_fdr,'in_file')
    workflow.connect(inputspec,'mask',do_fdr,'mask_file')
    workflow.connect(inputspec,'threshold', do_fdr,'pthresh')

    workflow.connect(do_fdr,"qthresh",cluster,"threshold")
    #workflow.connect(inputspec,"connectivity",cluster,"connectivity")
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_pval_file = True
    workflow.connect(do_fdr,'qstat',cluster,'in_file')

    labels = pe.MapNode(util.Function(input_names=['in_file','thr','csize'],
                                   output_names=['labels'],function=get_labels),
        name='labels',iterfield=["in_file","thr"])

    workflow.connect(do_fdr,"qthresh",labels,"thr")
    workflow.connect(inputspec,"min_cluster_size",labels,"csize")
    workflow.connect(cluster,"threshold_file",labels,"in_file")

    showslice=pe.MapNode(util.Function(input_names=['image_in','anat_file','coordinates','thr'],
                                    output_names=["outfiles"],function=show_slices),
              name='showslice',iterfield=["image_in","coordinates",'thr'])

    coords = pe.MapNode(util.Function(input_names=["in_file","img"],
                                   output_names=["coords"],
                                   function=get_coords2),
        name='getcoords', iterfield=["in_file","img"])

    workflow.connect(cluster,'threshold_file',showslice,'image_in')
    workflow.connect(inputspec,'anatomical',showslice,"anat_file")
    workflow.connect(do_fdr,'qthresh',showslice,'thr')
    workflow.connect(labels,'labels',coords,"img")
    workflow.connect(cluster,"threshold_file",coords,"in_file")
    workflow.connect(coords,"coords",showslice,"coordinates")

    overlay = pe.MapNode(util.Function(input_names=["stat_image",
                                                 "background_image",
                                                 "threshold"],
                                       output_names=["fnames"],function=overlay_new),
                         name='overlay', iterfield=["stat_image",'threshold'])
    workflow.connect(inputspec,"anatomical", overlay,"background_image")
    workflow.connect(cluster,"threshold_file",overlay,"stat_image")
    workflow.connect(do_fdr,"qthresh",overlay,"threshold")
    #workflow.connect(cluster, 'threshold_file',imgflow,'inputspec.in_file')
    #workflow.connect(dataflow,'func',imgflow, 'inputspec.in_file')
    #workflow.connect(inputspec,'mask',imgflow, 'inputspec.mask_file')

    outputspec = pe.Node(util.IdentityInterface(fields=["corrected_p","localmax_txt","index_file","localmax_vol","slices","cuts","corrected_p","qrate"]),name='outputspec')
    workflow.connect(cluster,'threshold_file',outputspec,'corrected_p')
    workflow.connect(showslice,"outfiles",outputspec,"slices")
    workflow.connect(overlay,"fnames",outputspec,"cuts")
    workflow.connect(cluster,'localmax_txt_file',outputspec,'localmax_txt')
    workflow.connect(do_fdr,"qrate",outputspec,'qrate')
    #workflow.connect(logp,'out_file',outputspec,"corrected_p")
    return workflow 
예제 #5
0
def prep_randomise_workflow(c,
                            merged_file,
                            mask_file,
                            f_test,
                            mat_file,
                            con_file,
                            grp_file,
                            output_dir,
                            working_dir,
                            log_dir,
                            model_name,
                            fts_file=None):

    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as nio

    wf = pe.Workflow(name='randomise_workflow')
    wf.base_dir = c.work_dir

    randomise = pe.Node(interface=fsl.Randomise(),
                        name='fsl-randomise_{0}'.format(model_name))
    randomise.inputs.base_name = model_name
    randomise.inputs.in_file = merged_file
    randomise.inputs.mask = mask_file
    randomise.inputs.num_perm = c.randomise_permutation
    randomise.inputs.demean = c.randomise_demean
    randomise.inputs.c_thresh = c.randomise_thresh
    randomise.inputs.tfce = c.randomise_tfce

    randomise.inputs.design_mat = mat_file
    randomise.inputs.tcon = con_file

    if fts_file:
        randomise.inputs.fcon = fts_file

    select_tcorrp_files = pe.Node(util.Function(input_names=['input_list'],
                                                output_names=['out_file'],
                                                function=select),
                                  name='select_t_corrp')

    wf.connect(randomise, 't_corrected_p_files', select_tcorrp_files,
               'input_list')

    select_tstat_files = pe.Node(util.Function(input_names=['input_list'],
                                               output_names=['out_file'],
                                               function=select),
                                 name='select_t_stat')

    wf.connect(randomise, 'tstat_files', select_tstat_files, 'input_list')

    thresh = pe.Node(interface=fsl.Threshold(), name='fsl_threshold_contrast')
    thresh.inputs.thresh = 0.95
    thresh.inputs.out_file = 'randomise_pipe_thresh_tstat.nii.gz'
    wf.connect(select_tstat_files, 'out_file', thresh, 'in_file')

    thresh_bin = pe.Node(interface=fsl.UnaryMaths(),
                         name='fsl_threshold_bin_contrast')
    thresh_bin.inputs.operation = 'bin'
    wf.connect(thresh, 'out_file', thresh_bin, 'in_file')

    apply_mask = pe.Node(interface=fsl.ApplyMask(),
                         name='fsl_applymask_contrast')
    wf.connect(select_tstat_files, 'out_file', apply_mask, 'in_file')
    wf.connect(thresh_bin, 'out_file', apply_mask, 'mask_file')

    cluster = pe.Node(interface=fsl.Cluster(), name='cluster_contrast')
    cluster.inputs.threshold = 0.0001
    cluster.inputs.out_index_file = "index_file"
    cluster.inputs.out_localmax_txt_file = "lmax_contrast.txt"
    cluster.inputs.out_size_file = "cluster_size_contrast"
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_mean_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_size_file = True

    wf.connect(apply_mask, 'out_file', cluster, 'in_file')

    ds = pe.Node(nio.DataSink(), name='fsl-randomise_sink')

    ds.inputs.base_directory = str(output_dir)
    ds.inputs.container = ''

    wf.connect(randomise, 'tstat_files', ds, 'tstat_files')
    wf.connect(randomise, 't_corrected_p_files', ds, 't_corrected_p_files')
    wf.connect(select_tcorrp_files, 'out_file', ds, 'out_tcorr_corrected')
    wf.connect(select_tstat_files, 'out_file', ds, 'out_tstat_corrected')
    wf.connect(thresh, 'out_file', ds, 'randomise_pipe_thresh_tstat.nii.gz')
    wf.connect(thresh_bin, 'out_file', ds, 'thresh_bin_out')
    wf.connect(cluster, 'index_file', ds, 'index_file')
    wf.connect(cluster, 'threshold_file', ds, 'threshold_file')
    wf.connect(cluster, 'localmax_txt_file', ds, 'localmax_txt_file')
    wf.connect(cluster, 'localmax_vol_file', ds, 'localmax_vol_file')
    wf.connect(cluster, 'max_file', ds, 'max_file')
    wf.connect(cluster, 'mean_file', ds, 'meal_file')
    wf.connect(cluster, 'pval_file', ds, 'pval_file')
    wf.connect(cluster, 'size_file', ds, 'size_file')

    wf.run()
예제 #6
0
resampled_mask.to_filename(op.join(wf.base_dir, 'mask.nii.gz'))

n_voxels = (resampled_mask.get_data() > 0).sum()

smooth_est = pe.MapNode(fsl.SmoothEstimate(),
                        iterfield=['zstat_file'],
                        name='smooth_estimate')

smooth_est.inputs.mask_file = resampled_mask.get_filename()

wf.connect(inputnode, 'zmap', smooth_est, 'zstat_file')

cluster = pe.MapNode(fsl.Cluster(threshold=3.1,
                                 volume=n_voxels,
                                 pthreshold=0.05,
                                 out_pval_file=True,
                                 out_threshold_file=True,
                                 out_index_file=True,
                                 out_localmax_txt_file=True),
                     iterfield=[
                         'in_file',
                         'dlh',
                     ],
                     name='cluster')
wf.connect(inputnode, 'zmap', cluster, 'in_file')
wf.connect(smooth_est, 'dlh', cluster, 'dlh')
cluster.inputs.volume = n_voxels


def invert_zmap(zmap):
    from nilearn import image
예제 #7
0
def create_randomise(name='randomise', working_dir=None, crash_dir=None):
    """
    Parameters
    ----------
        
    Returns
    -------
    workflow : nipype.pipeline.engine.Workflow
        Randomise workflow.
        
    Notes
    -----
    
    Workflow Inputs::
    
        
    Workflow Outputs::

    
    References
    ----------
    
    """

    if not working_dir:
        working_dir = os.path.join(os.getcwd(), 'Randomise_work_dir')
    if not crash_dir:
        crash_dir = os.path.join(os.getcwd(), 'Randomise_crash_dir')

    wf = pe.Workflow(name=name)
    wf.base_dir = working_dir
    wf.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(crash_dir)
    }

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'subjects_list', 'pipeline_output_folder', 'permutations',
        'mask_boolean', 'demean', 'c_thresh'
    ]),
                        name='inputspec')

    outputspec = pe.Node(util.IdentityInterface(fields=[
        'tstat_files', 't_corrected_p_files', 'index_file', 'threshold_file',
        'localmax_txt_file', 'localmax_vol_file', 'max_file', 'mean_file',
        'pval_file', 'size_file'
    ]),
                         name='outputspec')

    #merge = pe.Node(interface=fsl.Merge(), name='fsl_merge')
    #merge.inputs.dimension = 't'
    #merge.inputs.merged_file = "randomise_merged.nii.gz"

    #wf.connect(inputspec, 'subjects', merge, 'in_files')

    #mask = pe.Node(interface=fsl.maths.MathsCommand(), name='fsl_maths')
    #mask.inputs.args = '-abs -Tmin -bin'
    #mask.inputs.out_file = "randomise_mask.nii.gz"
    #wf.connect(inputspec, 'subjects', mask, 'in_file')

    randomise = pe.Node(interface=fsl.Randomise(), name='randomise')
    randomise.inputs.base_name = "randomise"
    randomise.inputs.demean = True
    randomise.inputs.tfce = True
    wf.connect([(inputspec, randomise, [
        ('subjects', 'in_file'),
        ('design_matrix_file', 'design_mat'),
        ('constrast_file', 'tcon'),
        ('permutations', 'num_perm'),
    ])])
    wf.connect(randomise, 'tstat_files', outputspec, 'tstat_files')
    wf.connect(randomise, 't_corrected_p_files', outputspec,
               't_corrected_p_files')
    #------------- issue here arises while using tfce. By not using tfce, you don't get t_corrected_p files. R V in a conundrum? --------------------#

    select_tcorrp_files = pe.Node(Function(input_names=['input_list'],
                                           output_names=['out_file'],
                                           function=select),
                                  name='select_t_corrp')

    wf.connect(randomise, 't_corrected_p_files', select_tcorrp_files,
               'input_list')
    wf.connect(select_tcorrp_files, 'out_file', outputspec,
               'out_tcorr_corrected')

    select_tstat_files = pe.Node(Function(input_names=['input_list'],
                                          output_names=['out_file'],
                                          function=select),
                                 name='select_t_stat')

    wf.connect(randomise, 'tstat_files', select_tstat_files, 'input_list')
    wf.connect(select_tstat_files, 'out_file', outputspec,
               'out_tstat_corrected')

    thresh = pe.Node(interface=fsl.Threshold(), name='fsl_threshold_contrast')
    thresh.inputs.thresh = 0.95
    thresh.inputs.out_file = 'rando_pipe_thresh_tstat.nii.gz'
    wf.connect(select_tstat_files, 'out_file', thresh, 'in_file')
    wf.connect(thresh, 'out_file', outputspec,
               'rando_pipe_thresh_tstat.nii.gz')

    thresh_bin = pe.Node(interface=fsl.UnaryMaths(),
                         name='fsl_threshold_bin_contrast')
    thresh_bin.inputs.operation = 'bin'
    wf.connect(thresh, 'out_file', thresh_bin, 'in_file')
    wf.connect(thresh_bin, 'out_file', outputspec, 'thresh_bin_out')

    apply_mask = pe.Node(interface=fsl.ApplyMask(),
                         name='fsl_applymask_contrast')
    wf.connect(select_tstat_files, 'out_file', apply_mask, 'in_file')
    wf.connect(thresh_bin, 'out_file', apply_mask, 'mask_file')

    cluster = pe.Node(interface=fsl.Cluster(), name='cluster_contrast')
    cluster.inputs.threshold = 0.0001
    cluster.inputs.out_index_file = "index_file"
    cluster.inputs.out_localmax_txt_file = "lmax_contrast.txt"
    cluster.inputs.out_size_file = "cluster_size_contrast"
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_mean_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_size_file = True

    wf.connect(apply_mask, 'out_file', cluster, 'in_file')

    wf.connect(cluster, 'index_file', outputspec, 'index_file')
    wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')
    wf.connect(cluster, 'localmax_txt_file', outputspec, 'localmax_txt_file')
    wf.connect(cluster, 'localmax_vol_file', outputspec, 'localmax_vol_file')
    wf.connect(cluster, 'max_file', outputspec, 'max_file')
    wf.connect(cluster, 'mean_file', outputspec, 'meal_file')
    wf.connect(cluster, 'pval_file', outputspec, 'pval_file')
    wf.connect(cluster, 'size_file', outputspec, 'size_file')

    return wf
예제 #8
0
파일: mixedfx.py 프로젝트: ningmengxu/lyman
def create_volume_mixedfx_workflow(name="volume_group",
                                   subject_list=None,
                                   regressors=None,
                                   contrasts=None,
                                   exp_info=None):

    # Handle default arguments
    if subject_list is None:
        subject_list = []
    if regressors is None:
        regressors = dict(group_mean=[])
    if contrasts is None:
        contrasts = [["group_mean", "T", ["group_mean"], [1]]]
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define workflow inputs
    inputnode = Node(
        IdentityInterface(["l1_contrast", "copes", "varcopes", "dofs"]),
        "inputnode")

    # Merge the fixed effect summary images into one 4D image
    merge = Node(MergeAcrossSubjects(regressors=regressors), "merge")

    # Make a simple design
    design = Node(fsl.MultipleRegressDesign(contrasts=contrasts), "design")

    # Fit the mixed effects model
    flameo = Node(fsl.FLAMEO(run_mode=exp_info["flame_mode"]), "flameo")

    # Estimate the smoothness of the data
    smoothest = Node(fsl.SmoothEstimate(), "smoothest")

    # Correct for multiple comparisons
    cluster = Node(
        fsl.Cluster(threshold=exp_info["cluster_zthresh"],
                    pthreshold=exp_info["grf_pthresh"],
                    out_threshold_file=True,
                    out_index_file=True,
                    out_localmax_txt_file=True,
                    peak_distance=exp_info["peak_distance"],
                    use_mm=True), "cluster")

    # Project the mask and thresholded zstat onto the surface
    surfproj = create_surface_projection_workflow(exp_info=exp_info)

    # Segment the z stat image with a watershed algorithm
    watershed = Node(Watershed(), "watershed")

    # Make static report images in the volume
    report = Node(MFXReport(), "report")
    report.inputs.subjects = subject_list

    # Save the experiment info
    saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams")

    # Define the workflow outputs
    outputnode = Node(
        IdentityInterface([
            "copes", "varcopes", "mask_file", "flameo_stats", "thresh_zstat",
            "surf_zstat", "surf_mask", "cluster_image", "seg_file",
            "peak_file", "lut_file", "report", "json_file"
        ]), "outputnode")

    # Define and connect up the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, merge, [("copes", "cope_files"),
                            ("varcopes", "varcope_files"),
                            ("dofs", "dof_files")]),
        (inputnode, saveparams, [("copes", "in_file")]),
        (merge, flameo, [("cope_file", "cope_file"),
                         ("varcope_file", "var_cope_file"),
                         ("dof_file", "dof_var_cope_file"),
                         ("mask_file", "mask_file")]),
        (merge, design, [("regressors", "regressors")]),
        (design, flameo, [("design_con", "t_con_file"),
                          ("design_grp", "cov_split_file"),
                          ("design_mat", "design_file")]),
        (flameo, smoothest, [("zstats", "zstat_file")]),
        (merge, smoothest, [("mask_file", "mask_file")]),
        (smoothest, cluster, [("dlh", "dlh"), ("volume", "volume")]),
        (flameo, cluster, [("zstats", "in_file")]),
        (cluster, watershed, [("threshold_file", "zstat_file"),
                              ("localmax_txt_file", "localmax_file")]),
        (merge, report, [("mask_file", "mask_file"),
                         ("cope_file", "cope_file")]),
        (flameo, report, [("zstats", "zstat_file")]),
        (cluster, report, [("threshold_file", "zstat_thresh_file"),
                           ("localmax_txt_file", "localmax_file")]),
        (watershed, report, [("seg_file", "seg_file")]),
        (merge, surfproj, [("mask_file", "inputs.mask_file")]),
        (cluster, surfproj, [("threshold_file", "inputs.zstat_file")]),
        (merge, outputnode, [("cope_file", "copes"),
                             ("varcope_file", "varcopes"),
                             ("mask_file", "mask_file")]),
        (flameo, outputnode, [("stats_dir", "flameo_stats")]),
        (cluster, outputnode, [("threshold_file", "thresh_zstat"),
                               ("index_file", "cluster_image")]),
        (watershed, outputnode, [("seg_file", "seg_file"),
                                 ("peak_file", "peak_file"),
                                 ("lut_file", "lut_file")]),
        (surfproj, outputnode, [("outputs.surf_zstat", "surf_zstat"),
                                ("outputs.surf_mask", "surf_mask")]),
        (report, outputnode, [("out_files", "report")]),
        (saveparams, outputnode, [("json_file", "json_file")]),
    ])

    return group, inputnode, outputnode
NodeHash_2f7b4860.inputs.interp = 'trilinear'

#Wraps command **fslmerge**
NodeHash_2e8e9e00 = pe.Node(interface=fsl.Merge(), name='NodeName_2e8e9e00')
NodeHash_2e8e9e00.inputs.dimension = 't'

#Wraps command **flameo**
NodeHash_313ca880 = pe.Node(interface=fsl.FLAMEO(), name='NodeName_313ca880')
NodeHash_313ca880.inputs.run_mode = 'flame1'

#Wraps command **smoothest**
NodeHash_314ce330 = pe.Node(interface=fsl.SmoothEstimate(),
                            name='NodeName_314ce330')

#Wraps command **cluster**
NodeHash_332d21c0 = pe.Node(interface=fsl.Cluster(), name='NodeName_332d21c0')
NodeHash_332d21c0.inputs.pthreshold = 0.05
NodeHash_332d21c0.inputs.threshold = 2.3

#Wraps command **fslmerge**
NodeHash_33d80690 = pe.Node(interface=fsl.Merge(), name='NodeName_33d80690')
NodeHash_33d80690.inputs.dimension = 't'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_313ca880, 'zstats', NodeHash_314ce330,
                     'zstat_file')
analysisflow.connect(NodeHash_26dc8f20, 'MNI_brain', NodeHash_2f7b4860,
                     'ref_file')
analysisflow.connect(NodeHash_26dc8f20, 'MNI_brain', NodeHash_2b7ae5e0,
                     'ref_file')
NodeHash_29ecf020 = pe.MapNode(interface = fsl.ApplyWarp(), name = 'NodeName_29ecf020', iterfield = ['field_file', 'in_file', 'premat'])
NodeHash_29ecf020.inputs.interp = 'trilinear'

#Wraps command **fslmerge**
NodeHash_2ceb9d10 = pe.Node(interface = fsl.Merge(), name = 'NodeName_2ceb9d10')
NodeHash_2ceb9d10.inputs.dimension = 't'

#Wraps command **flameo**
NodeHash_2f149160 = pe.Node(interface = fsl.FLAMEO(), name = 'NodeName_2f149160')
NodeHash_2f149160.inputs.run_mode = 'flame1'

#Wraps command **smoothest**
NodeHash_2fbc52b0 = pe.Node(interface = fsl.SmoothEstimate(), name = 'NodeName_2fbc52b0')

#Wraps command **cluster**
NodeHash_318a61d0 = pe.Node(interface = fsl.Cluster(), name = 'NodeName_318a61d0')
NodeHash_318a61d0.inputs.pthreshold = 0.05
NodeHash_318a61d0.inputs.threshold = 2.3

#Wraps command **fslmerge**
NodeHash_33749690 = pe.Node(interface = fsl.Merge(), name = 'NodeName_33749690')
NodeHash_33749690.inputs.dimension = 't'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_1ad6ca0, 'anat', NodeHash_28f35280, 'in_file')
analysisflow.connect(NodeHash_1ad6ca0, 'anat', NodeHash_8f61130, 'reference')
analysisflow.connect(NodeHash_1ad6ca0, 'anat', NodeHash_2062490, 'in_file')
analysisflow.connect(NodeHash_1ad6ca0, 'events', NodeHash_127d16f0, 'in_file')
analysisflow.connect(NodeHash_1ad6ca0, 'func', NodeHash_c3ef3c0, 'in_file')
analysisflow.connect(NodeHash_1ad6ca0, 'func', NodeHash_4d25ff0, 'in_file')
예제 #11
0
파일: tlc.py 프로젝트: zxh2135645/TLC
def create_tlc_workflow(config, t1_file, freesurf_parc, flair_lesion):
    """
    Inputs::
        config: Dictionary with PBR configuration options. See config.py
        t1_file: full path of t1 image
        freesurf_parc: full path of aparc+aseg.mgz from freesurfer
        flair_lesion: editted binary lesion mask based on FLAIR image (can also be labeled)
    Outputs::
        nipype.pipeline.engine.Workflow object
    """

    import nipype.interfaces.ants as ants
    from nipype.pipeline.engine import Node, Workflow, MapNode
    from nipype.interfaces.io import DataSink, DataGrabber
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.fsl as fsl
    from nipype.utils.filemanip import load_json
    import os
    import numpy as np
    from nipype.interfaces.freesurfer import Binarize, MRIConvert
    from nipype.interfaces.slicer.filtering import n4itkbiasfieldcorrection as n4
    from nipype.interfaces.fsl import Reorient2Std
    from nipype.interfaces.freesurfer import SegStats


    mse = get_mseid(t1_file)
    msid = get_msid(t1_file)
    working_dir = "tlc_{0}_{1}".format(msid, mse)

    register = Workflow(name=working_dir)
    register.base_dir = config["working_directory"]

    inputnode = Node(IdentityInterface(fields=["t1_image", "parc", "flair_lesion", "mse"]),
                     name="inputspec")
    inputnode.inputs.t1_image = t1_file
    inputnode.inputs.parc = freesurf_parc
    inputnode.inputs.flair_lesion = flair_lesion
    inputnode.inputs.mse = mse

    bin_math = Node(fsl.BinaryMaths(), name="Convert_to_binary")
    bin_math.inputs.operand_value = 1
    bin_math.inputs.operation = 'min'
    register.connect(inputnode, "flair_lesion", bin_math, "in_file")

    binvol1 = Node(Binarize(), name="binary_ventricle")
    binvol1.inputs.match = [4, 5, 11, 14, 15, 24, 43, 44, 50, 72, 213, 31, 63]
    #binvol1.inputs.match = [4, 5, 14, 15, 24, 43, 44, 72, 213]
    # every parcellation corresponds to ventricle CSF
    #binvol1.inputs.mask_thresh = 0.5
    binvol1.inputs.binary_file = os.path.join(config["working_directory"],
                                              working_dir, "binary_ventricle", "binarize_ventricle.nii.gz")
    register.connect(inputnode, "parc", binvol1, "in_file")

    binvol2 = Node(Binarize(), name="binary_gray_matter")
    binvol2.inputs.match = [3, 8, 42, 47, 169, 220, 702,
                            1878, 1915, 1979, 1993, 2000, 2001, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
                            2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024, 2025, 2026,
                            2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035,
                            772, 833, 835, 896, 925, 936, 1001, 1002, 1003, 1005, 1006, 1007, 1008, 1009, 1010, 1011,
                            1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026,
                            1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035]
    binvol2.inputs.binary_file = os.path.join(config["working_directory"], working_dir,
                                              "binary_gray_matter", "binarize_cortex.nii.gz")
    #binvol2.inputs.mask_thresh = 0.5
    register.connect(inputnode, "parc", binvol2, "in_file")

    bias_corr = Node(n4.N4ITKBiasFieldCorrection(), name="BiasFieldCorrection")
    bias_corr.inputs.outputimage = os.path.join(config["working_directory"], working_dir,
                                                "BiasFieldCorrection", "bias_corrected.nii.gz")
    register.connect(inputnode, "t1_image", bias_corr, "inputimage")

    reo1 = Node(Reorient2Std(), name="reorient1")
    reo2 = Node(Reorient2Std(), name="reorient2")
    register.connect(binvol1, "binary_file", reo1, "in_file")
    register.connect(binvol2, "binary_file", reo2, "in_file")

    mri_convert1 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert1")
    mri_convert2 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert2")
    mri_convert1.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert1')
    register.connect(bias_corr, "outputimage", mri_convert1, "t1_image")
    register.connect(reo1, "out_file", mri_convert1, "reorient_mask")
    mri_convert2.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert2')
    register.connect(bias_corr, "outputimage", mri_convert2, "t1_image")
    register.connect(reo2, "out_file", mri_convert2, "reorient_mask")

    binvol3 = Node(Binarize(), name="binary_white_matter")
    binvol3.inputs.match = [2, 7, 16, 28, 41, 46, 60, 77, 78, 79, 251, 252, 253, 254, 255]
    #binvol3.inputs.match = [2, 7, 41, 46, 77, 78, 79]
    #binvol3.inputs.mask_thresh = 0.5
    binvol3.inputs.binary_file = os.path.join(config["working_directory"], working_dir,
                                              "binary_white_matter", "binarize_white_matter.nii.gz")
    register.connect(inputnode, "parc", binvol3, "in_file")
    reo3 = Node(Reorient2Std(), name="reorient3")
    register.connect(binvol3, "binary_file", reo3, "in_file")

    mri_convert3 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert3")
    mri_convert3.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert3')
    register.connect(reo3, "out_file", mri_convert3, "reorient_mask")
    register.connect(bias_corr, "outputimage", mri_convert3, "t1_image")

    get_new_lesion = Node(Function(input_names=['t1_image', 'ventricle', 'cortex', 'flair_lesion', 'white_matter',
                                                'working_dir'],
                                   output_names=['out_path85', 'out_path90', 'out_path95', 'out_path100', 'out_path_combined'],
                                   function=matrix_operation), name='get_new_lesion')
    get_new_lesion.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'get_new_lesion')
    register.connect(bias_corr, "outputimage", get_new_lesion, "t1_image")
    register.connect(mri_convert1, "output_file", get_new_lesion, "ventricle")
    register.connect(mri_convert2, "output_file", get_new_lesion, "cortex")
    register.connect(bin_math, "out_file", get_new_lesion, "flair_lesion")
    register.connect(mri_convert3, "output_file", get_new_lesion, "white_matter")


    cluster85 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster85")
    register.connect(get_new_lesion, "out_path85", cluster85, "in_file")
    segstats85 = Node(SegStats(), name="segstats85")
    register.connect(cluster85, "index_file", segstats85, "segmentation_file")

    cluster90 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster90")
    register.connect(get_new_lesion, "out_path90", cluster90, "in_file")
    segstats90 = Node(SegStats(), name="segstats90")
    register.connect(cluster90, "index_file", segstats90, "segmentation_file")

    cluster95 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster95")
    register.connect(get_new_lesion, "out_path95", cluster95, "in_file")
    segstats95 = Node(SegStats(), name="segstats95")
    register.connect(cluster95, "index_file", segstats95, "segmentation_file")

    cluster100 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster100")
    register.connect(get_new_lesion, "out_path100", cluster100, "in_file")
    segstats100 = Node(SegStats(), name="segstats100")
    register.connect(cluster100, "index_file", segstats100, "segmentation_file")

    get_new_lesion2 = Node(Function(input_names=['t1_image', 'ventricle', 'cortex', 'flair_lesion', 'white_matter',
                                                'working_dir'],
                                   output_names=['out_path90', 'out_path95', 'out_path100'],
                                   function=matrix_operation2), name='get_new_lesion2')
    get_new_lesion2.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'get_new_lesion2')
    register.connect(bias_corr, "outputimage", get_new_lesion2, "t1_image")
    register.connect(mri_convert1, "output_file", get_new_lesion2, "ventricle")
    register.connect(mri_convert2, "output_file", get_new_lesion2, "cortex")
    register.connect(bin_math, "out_file", get_new_lesion2, "flair_lesion")
    register.connect(mri_convert3, "output_file", get_new_lesion2, "white_matter")
    cluster_intersection90 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection90")
    register.connect(get_new_lesion2, "out_path90", cluster_intersection90, "in_file")
    segstats_intersection90 = Node(SegStats(), name="segstats_intersection90")
    register.connect(cluster_intersection90, "index_file", segstats_intersection90, "segmentation_file")

    cluster_intersection95 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection95")
    register.connect(get_new_lesion2, "out_path95", cluster_intersection95, "in_file")
    segstats_intersection95 = Node(SegStats(), name="segstats_intersection95")
    register.connect(cluster_intersection95, "index_file", segstats_intersection95, "segmentation_file")

    cluster_intersection100 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection100")
    register.connect(get_new_lesion2, "out_path100", cluster_intersection100, "in_file")
    segstats_intersection100 = Node(SegStats(), name="segstats_intersection100")
    register.connect(cluster_intersection100, "index_file", segstats_intersection100, "segmentation_file")

    sinker = Node(DataSink(), name="sinker")
    sinker.inputs.base_directory = os.path.join(config["output_directory"], mse, "tlc")
    sinker.inputs.container = '.'
    sinker.inputs.substitutions = []

    register.connect(get_new_lesion, "out_path85", sinker, "85.@lesion85")
    register.connect(get_new_lesion, "out_path90", sinker, "90.@lesion90")
    register.connect(get_new_lesion, "out_path95", sinker, "95.@lesion95")
    register.connect(get_new_lesion, "out_path100", sinker, "100.@lesion100")
    register.connect(get_new_lesion, "out_path_combined", sinker, "@WhiteMatterCombined")
    register.connect(get_new_lesion2, "out_path90", sinker, "intersection90.@lesion90")
    register.connect(get_new_lesion2, "out_path95", sinker, "intersection95.@lesion95")
    register.connect(get_new_lesion2, "out_path100", sinker, "intersection100.@lesion100")

    register.connect(segstats85, "summary_file", sinker, "85.@summaryfile85")
    register.connect(segstats90, "summary_file", sinker, "90.@summaryfile90")
    register.connect(segstats95, "summary_file", sinker, "95.@summaryfile95")
    register.connect(segstats100, "summary_file", sinker, "100.@summaryfile100")
    register.connect(segstats_intersection90, "summary_file", sinker, "intersection90.@summaryfile90")
    register.connect(segstats_intersection95, "summary_file", sinker, "intersection95.@summaryfile95")
    register.connect(segstats_intersection100, "summary_file", sinker, "intersection100.@summaryfile100")

    register.connect(cluster85, "index_file", sinker, "85.@index_file85")
    register.connect(cluster90, "index_file", sinker, "90.@index_file90")
    register.connect(cluster95, "index_file", sinker, "95.@index_file95")
    register.connect(cluster100, "index_file", sinker, "100.@index_file100")
    register.connect(cluster_intersection90, "index_file", sinker, "intersection90.@index_file90")
    register.connect(cluster_intersection95, "index_file", sinker, "intersection95.@index_file95")
    register.connect(cluster_intersection100, "index_file", sinker, "intersection100.@index_file100")

    return register
예제 #12
0
def second_level_wf(name):
    """second level analysis"""
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'copes', 'varcopes', 'group_mask', 'design_mat', 'design_con',
        'design_grp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['zstat', 'tstat', 'pstat', 'fwe_thres', 'fdr_thres']),
                         name='outputnode')

    copemerge = pe.Node(fsl.Merge(dimension='t'), name='copemerge', mem_gb=40)
    varcopemerge = pe.Node(fsl.Merge(dimension='t'),
                           name='varcopemerge',
                           mem_gb=40)
    flameo = pe.Node(fsl.FLAMEO(run_mode='ols'), name='flameo')
    ztopval = pe.Node(fsl.ImageMaths(op_string='-ztop', suffix='_pval'),
                      name='ztop')

    # FDR
    fdr = pe.Node(FDR(), name='calc_fdr')
    fdr_apply = pe.Node(fsl.ImageMaths(suffix='_thresh_vox_fdr_pstat1'),
                        name='fdr_apply')

    # FWE
    def _reselcount(voxels, resels):
        return float(voxels / resels)

    smoothness = pe.Node(fsl.SmoothEstimate(), name='smoothness')
    rescount = pe.Node(niu.Function(function=_reselcount), name='reselcount')
    ptoz = pe.Node(PtoZ(), name='ptoz')
    fwethres = pe.Node(fsl.Threshold(), name='fwethres')

    # Cluster
    cluster = pe.Node(fsl.Cluster(threshold=3.2,
                                  pthreshold=0.05,
                                  connectivity=26,
                                  use_mm=True),
                      name='cluster')

    def _len(inlist):
        return len(inlist)

    def _lastidx(inlist):
        return len(inlist) - 1

    def _first(inlist):
        if isinstance(inlist, (list, tuple)):
            return inlist[0]
        return inlist

    def _fdr_thres_operator(fdr_th):
        return '-mul -1 -add 1 -thr %f' % (1 - fdr_th)

    # create workflow
    workflow.connect([
        (inputnode, flameo, [('design_mat', 'design_file'),
                             ('design_con', 't_con_file'),
                             ('design_grp', 'cov_split_file')]),
        (inputnode, copemerge, [('copes', 'in_files')]),
        (inputnode, varcopemerge, [('varcopes', 'in_files')]),
        (inputnode, flameo, [('group_mask', 'mask_file')]),
        (copemerge, flameo, [('merged_file', 'cope_file')]),
        (varcopemerge, flameo, [('merged_file', 'var_cope_file')]),
        (flameo, ztopval, [(('zstats', _first), 'in_file')]),
        (ztopval, fdr, [('out_file', 'in_file')]),
        (inputnode, fdr, [('group_mask', 'in_mask')]),
        (inputnode, fdr_apply, [('group_mask', 'mask_file')]),
        (flameo, fdr_apply, [(('zstats', _first), 'in_file')]),
        (fdr, fdr_apply, [(('fdr_val', _fdr_thres_operator), 'op_string')]),
        (inputnode, smoothness, [('group_mask', 'mask_file')]),
        (flameo, smoothness, [(('res4d', _first), 'residual_fit_file')]),
        (inputnode, smoothness, [(('copes', _lastidx), 'dof')]),
        (smoothness, rescount, [('resels', 'resels'), ('volume', 'voxels')]),
        (rescount, ptoz, [('out', 'resels')]),
        (flameo, fwethres, [(('zstats', _first), 'in_file')]),
        (ptoz, fwethres, [('z_val', 'thresh')]),
        (flameo, cluster, [(('zstats', _first), 'in_file'),
                           (('copes', _first), 'cope_file')]),
        (smoothness, cluster, [('dlh', 'dlh'), ('volume', 'volume')]),
        (flameo, outputnode, [
            (('zstats', _first), 'zstat'),
            (('tstats', _first), 'tstat'),
        ]),
        (ztopval, outputnode, [('out_file', 'pstat')]),
        (fdr_apply, outputnode, [('out_file', 'fdr_thres')]),
        (fwethres, outputnode, [('out_file', 'fwe_thres')]),
    ])
    return workflow
예제 #13
0
def clustering(thresh_zstat1, thresh_zstat2, thresh_zfstat1, copes, dlh,
               volume):

    #If you have many contrasts, you can create a loop and iterate over each contrast

    import nipype.interfaces.fsl as fsl
    import os

    Clustering_t1 = fsl.Cluster()
    Clustering_t1.inputs.threshold = 2.3
    Clustering_t1.inputs.pthreshold = 0.05
    Clustering_t1.inputs.in_file = thresh_zstat1
    Clustering_t1.inputs.cope_file = copes[0]
    Clustering_t1.inputs.connectivity = 26
    Clustering_t1.inputs.volume = volume
    Clustering_t1.inputs.dlh = dlh

    Clustering_t1.inputs.out_threshold_file = 'thresh_zstat1.nii.gz'
    Clustering_t1.inputs.out_index_file = 'cluster_mask_zstat1'
    Clustering_t1.inputs.out_localmax_txt_file = 'lmax_zstat1.txt'

    Clustering_t1.run()

    #==========================================================================================================================

    Clustering_t2 = fsl.Cluster()
    Clustering_t2.inputs.threshold = 2.3
    Clustering_t2.inputs.pthreshold = 0.05
    Clustering_t2.inputs.in_file = thresh_zstat2
    Clustering_t2.inputs.cope_file = copes[1]
    Clustering_t2.inputs.connectivity = 26
    Clustering_t2.inputs.volume = volume
    Clustering_t2.inputs.dlh = dlh

    Clustering_t2.inputs.out_threshold_file = 'thresh_zstat2.nii.gz'
    Clustering_t2.inputs.out_index_file = 'cluster_mask_zstat2'
    Clustering_t2.inputs.out_localmax_txt_file = 'lmax_zstat2.txt'

    Clustering_t2.run()

    #==========================================================================================================================
    # In[15]:
    #Clustering on the statistical output of f-contrast

    Clustering_f = fsl.Cluster()
    Clustering_f.inputs.threshold = 2.3
    Clustering_f.inputs.pthreshold = 0.05
    Clustering_f.inputs.in_file = thresh_zfstat1
    Clustering_f.inputs.connectivity = 26
    Clustering_f.inputs.volume = volume
    Clustering_f.inputs.dlh = dlh

    Clustering_f.inputs.out_threshold_file = 'thresh_zfstat1.nii.gz'
    Clustering_f.inputs.out_index_file = 'cluster_mask_zfstat1'
    Clustering_f.inputs.out_localmax_txt_file = 'lmax_zfstat1.txt'

    Clustering_f.run()
    thresh_zstat1 = os.path.abspath('thresh_zstat1.nii.gz')
    thresh_zstat2 = os.path.abspath('thresh_zstat2.nii.gz')
    thresh_zfstat1 = os.path.abspath('thresh_zfstat1.nii.gz')

    return thresh_zstat1, thresh_zstat2, thresh_zfstat1
예제 #14
0
파일: easy_thres.py 프로젝트: amrka/ratpype
def threshold(wf_name, correction="uncorrected"):
    """
    Workflow for carrying out uncorrected, cluster-based and voxel-based thresholding
    (following FSL terminology)
    and colour activation overlaying

    Parameters
    ----------
    wf_name : string
        Workflow name

    Returns
    -------
    easy_thresh : object
        Easy thresh workflow object

    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/easy_thresh/easy_thresh.py>`_
    Modifyed by Tamas Spisak for Preclinical use
    Added: uncorrected and voxel corrected thresholding

    Workflow Inputs::

        inputspec.z_stats : string (nifti file)
            z_score stats output for t or f contrast from flameo

        inputspec.merge_mask : string (nifti file)
            mask generated from 4D Merged derivative file

        inputspec.z_threshold : float
            Z Statistic threshold value for cluster thresholding. It is used to
            determine what level of activation would be statistically significant.
            Increasing this will result in higher estimates of required effect.

        inputspec.p_threshold : float
            Probability threshold for cluster thresholding.

        inputspec.paramerters : string (tuple)
            tuple containing which MNI and FSLDIR path information

    Workflow Outputs::

        outputspec.cluster_threshold : string (nifti files)
           the thresholded Z statistic image for each t contrast

        outputspec.cluster_index : string (nifti files)
            image of clusters for each t contrast; the values
            in the clusters are the index numbers as used
            in the cluster list.

        outputspec.overlay_threshold : string (nifti files)
            3D color rendered stats overlay image for t contrast
            After reloading this image, use the Statistics Color
            Rendering GUI to reload the color look-up-table

        outputspec.overlay_rendered_image : string (nifti files)
           2D color rendered stats overlay picture for each t contrast

        outputspec.cluster_localmax_txt : string (text files)
            local maxima text file, defines the coordinates of maximum value
            in the cluster


    Order of commands in case of cluster correction:

    - Estimate smoothness of the image::

        smoothest --mask= merge_mask.nii.gz --zstat=.../flameo/stats/zstat1.nii.gz

        arguments
        --mask  :  brain mask volume
        --zstat :  filename of zstat/zfstat image

    - Create mask. For details see `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm#fslutils>`_::

        fslmaths ../flameo/stats/zstat1.nii.gz
                 -mas merge_mask.nii.gz
                 zstat1_mask.nii.gz

        arguments
        -mas   : use (following image>0) to mask current image

    - Copy Geometry image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another::

        fslcpgeom MNI152_T1_2mm_brain.nii.gz zstat1_mask.nii.gz

    - Cluster based thresholding. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::

        cluster --dlh = 0.0023683100
                --in = zstat1_mask.nii.gz
                --oindex = zstat1_cluster_index.nii.gz
                --olmax = zstat1_cluster_localmax.txt
                --othresh = zstat1_cluster_threshold.nii.gz
                --pthresh = 0.0500000000
                --thresh = 2.3000000000
                --volume = 197071

        arguments
        --in    :    filename of input volume
        --dlh   :    smoothness estimate = sqrt(det(Lambda))
        --oindex  :  filename for output of cluster index
        --othresh :  filename for output of thresholded image
        --olmax   :  filename for output of local maxima text file
        --volume  :  number of voxels in the mask
        --pthresh :  p-threshold for clusters
        --thresh  :  threshold for input volume

     Z statistic image is thresholded to show which voxels or clusters of voxels are activated at a particular significance level.
     A Z statistic threshold is used to define contiguous clusters. Then each cluster's estimated significance level (from GRF-theory)
     is compared with the cluster probability threshold. Significant clusters are then used to mask the original Z statistic image.

    High Level Workflow Graph:

    .. image:: ../images/easy_thresh.dot.png
       :width: 800


    Detailed Workflow Graph:

    .. image:: ../images/easy_thresh_detailed.dot.png
       :width: 800

    TODO: Order of commands in case of voxel correction and uncorrected

    Examples
    --------

    >>> import easy_thresh
    >>> preproc = easy_thresh.easy_thresh("new_workflow")
    >>> preproc.inputs.inputspec.z_stats= 'flameo/stats/zstat1.nii.gz'
    >>> preproc.inputs.inputspec.merge_mask = 'merge_mask/alff_Z_fn2standard_merged_mask.nii.gz'
    >>> preproc.inputs.inputspec.z_threshold = 2.3
    >>> preproc.inputs.inputspec.p_threshold = 0.05
    >>> preporc.run()  -- SKIP doctest

    """

    easy_thresh = pe.Workflow(name=wf_name)

    outputnode = pe.Node(
        util.IdentityInterface(fields=['thres_zstat', 'overlay_threshold']),
        name='outputspec')

    if (correction == 'uncorrected'):
        inputnode = pe.Node(
            util.IdentityInterface(fields=['z_stats', 'mask', 'p_threshold']),
            name='inputspec')

        # run clustering after fixing stats header for talspace
        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        ptoz = pe.Node(interface=myutils.PtoZ(), name='PtoZ')

        easy_thresh.connect(inputnode, 'p_threshold', ptoz, 'p_val')

        thres = pe.MapNode(interface=fsl.Threshold(),
                           name='ThresholdUncorr',
                           iterfield=['in_file'])

        easy_thresh.connect(zstat_mask, 'out_file', thres, 'in_file')
        easy_thresh.connect(ptoz, 'z_score', thres, 'thresh')

        easy_thresh.connect(thres, 'out_file', outputnode, 'thres_zstat')
        easy_thresh.connect(ptoz, 'z_score', outputnode, 'overlay_threshold')

    elif (correction == 'voxel'):
        inputnode = pe.Node(
            util.IdentityInterface(fields=['z_stats', 'mask', 'p_threshold']),
            name='inputspec')
        # run clustering after fixing stats header for talspace
        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        # estimate image smoothness
        smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                     name='smooth_estimate',
                                     iterfield=['zstat_file', 'mask_file'])

        easy_thresh.connect(zstat_mask, 'out_file', smooth_estimate,
                            'zstat_file')
        easy_thresh.connect(inputnode, 'mask', smooth_estimate, 'mask_file')

        ptoz = pe.MapNode(interface=myutils.PtoZ(),
                          name='PtoZ',
                          iterfield=['resels'])

        easy_thresh.connect(inputnode, 'p_threshold', ptoz, 'p_val')
        easy_thresh.connect(smooth_estimate, 'resels', ptoz, 'resels')

        thres = pe.MapNode(interface=fsl.Threshold(),
                           name='ThresholdVoxel',
                           iterfield=['in_file', 'thresh'])
        thres._interface._suffix = 'vox'

        easy_thresh.connect(zstat_mask, 'out_file', thres, 'in_file')
        easy_thresh.connect(ptoz, 'z_score', thres, 'thresh')

        easy_thresh.connect(thres, 'out_file', outputnode, 'thres_zstat')
        easy_thresh.connect(ptoz, 'z_score', outputnode, 'overlay_threshold')

    elif (correction == 'tfce'):

        print('Not good!!!')
        #TODO

        inputnode = pe.Node(
            util.IdentityInterface(fields=['z_stats', 'mask', 'p_threshold']),
            name='inputspec')

        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        # tfce-corerction
        op_string = '-tfce 2 0.5 6'
        tfce = pe.MapNode(interface=fsl.ImageMaths(suffix='_tfce',
                                                   op_string=op_string),
                          iterfield=['in_file'],
                          name='tfce')

        easy_thresh.connect(zstat_mask, 'out_file', tfce, 'in_file')

        # estimate image smoothness
        smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                     name='smooth_estimate',
                                     iterfield=['zstat_file', 'mask_file'])

        easy_thresh.connect(tfce, 'out_file', smooth_estimate, 'zstat_file')
        easy_thresh.connect(inputnode, 'mask', smooth_estimate, 'mask_file')

        ptoz = pe.MapNode(interface=myutils.PtoZ(),
                          name='PtoZ',
                          iterfield=['resels'])

        easy_thresh.connect(inputnode, 'p_threshold', ptoz, 'p_val')
        easy_thresh.connect(smooth_estimate, 'resels', ptoz, 'resels')

        thres = pe.MapNode(interface=fsl.Threshold(),
                           name='ThresholdVoxel',
                           iterfield=['in_file', 'thresh'])

        easy_thresh.connect(tfce, 'out_file', thres, 'in_file')
        easy_thresh.connect(ptoz, 'z_score', thres, 'thresh')

        easy_thresh.connect(thres, 'out_file', outputnode, 'thres_zstat')
        easy_thresh.connect(ptoz, 'z_score', outputnode, 'overlay_threshold')

        print('Not implemented!')

    elif (correction == 'cluster'):

        inputnode = pe.Node(util.IdentityInterface(
            fields=['z_stats', 'mask', 'z_threshold', 'p_threshold']),
                            name='inputspec')

        # run clustering
        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        # estimate image smoothness
        smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                     name='smooth_estimate',
                                     iterfield=['zstat_file', 'mask_file'])

        easy_thresh.connect(zstat_mask, 'out_file', smooth_estimate,
                            'zstat_file')
        easy_thresh.connect(inputnode, 'mask', smooth_estimate, 'mask_file')

        ##cluster-based thresholding
        #After carrying out the initial statistical test, the resulting
        #Z statistic image is then normally thresholded to show which voxels or
        #clusters of voxels are activated at a particular significance level.
        #A Z statistic threshold is used to define contiguous clusters.
        #Then each cluster's estimated significance level (from GRF-theory) is
        #compared with the cluster probability threshold. Significant clusters
        #are then used to mask the original Z statistic image for later production
        #of colour blobs.This method of thresholding is an alternative to
        #Voxel-based correction, and is normally more sensitive to activation.
        #    cluster = pe.MapNode(interface=fsl.Cluster(),
        #                            name='cluster',
        #                            iterfield=['in_file', 'volume', 'dlh'])
        #    #output of cluster index (in size order)
        #    cluster.inputs.out_index_file = True
        #    #thresholded image
        #    cluster.inputs.out_threshold_file = True
        #    #local maxima text file
        #    #defines the cluster cordinates
        #    cluster.inputs.out_localmax_txt_file = True

        cluster = pe.MapNode(interface=fsl.Cluster(
            out_pval_file='pval.nii.gz',
            out_threshold_file='thres_clust_zstat.nii.gz'),
                             name='ThresholdClust',
                             iterfield=['in_file', 'dlh', 'volume'])

        easy_thresh.connect(zstat_mask, 'out_file', cluster, 'in_file')
        easy_thresh.connect(inputnode, 'z_threshold', cluster, 'threshold')
        easy_thresh.connect(inputnode, 'p_threshold', cluster, 'pthreshold')
        easy_thresh.connect(smooth_estimate, 'volume', cluster, 'volume')
        easy_thresh.connect(smooth_estimate, 'dlh', cluster, 'dlh')

        easy_thresh.connect(cluster, 'threshold_file', outputnode,
                            'thres_zstat')
        easy_thresh.connect(inputnode, 'z_threshold', outputnode,
                            'overlay_threshold')
    else:
        print("Error: invalid thresholding correction mode: " + correction)

    return easy_thresh
예제 #15
0
dataDir = '/tmp/Data/ds114'
# Output directory
outDir = os.path.join(dataDir,'WorkflowOutput')
# Datasink directory
datasinkDir = os.path.join(outDir,'OVGvsOWR_Test')
# stats_dir directory
statsDir = os.path.join(datasinkDir,'stats_dir/stats')
# Z-stat image
imgZStat = os.path.join(statsDir, 'zstat' + contInd + '.nii.gz')


# FINDING CLUSTERS IN THE ANALYSIS RESULTS
# cluster node
cluster = Node(fsl.Cluster(in_file=imgZStat,
                           threshold=zThresh,
                           out_index_file=True,
                           out_threshold_file=True,
                           out_localmax_txt_file=True),
               name='cluster')

# data sink node
datasink = Node(DataSink(base_directory=statsDir),
                name='datasink')

# workflow connecting clustering to the datasink
clusterWF = Workflow(name="clusterWF", base_dir=outDir)
clusterWF.connect(cluster, 'index_file', datasink, 'index_file')
clusterWF.connect(cluster, 'threshold_file', datasink, 'threshold_file')
clusterWF.connect(cluster, 'localmax_txt_file', datasink, 'localmax_txt_file')
clusterWF.run()
예제 #16
0
# ============================================================================================================================
# Estimate smootheness of the image
smooth_est = Node(fsl.SmoothEstimate(), name='smooth_estimation')
smooth_est.inputs.dof = 147  # 453-5 volumes

# ============================================================================================================================
# ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
# ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
# ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
# ============================================================================================================================

mask_zstat = Node(fsl.ApplyMask(), name='mask_zstat')
mask_zstat.inputs.out_file = 'thresh_zstat.nii.gz'

# ============================================================================================================================
clustering_t = Node(fsl.Cluster(), name='clustering_t_contrast')
clustering_t.inputs.threshold = 2.3
clustering_t.inputs.pthreshold = 0.05
clustering_t.inputs.out_threshold_file = 'thresh_zstat.nii.gz'
clustering_t.inputs.out_index_file = 'cluster_mask_zstat'
clustering_t.inputs.out_localmax_txt_file = 'lmax_zstat.txt'
clustering_t.inputs.connectivity = 26

# ============================================================================================================================
# In[15]:
# overlay t contrast
overlay_t_contrast = Node(fsl.Overlay(), name='overlay_t_contrast')
overlay_t_contrast.inputs.auto_thresh_bg = True
overlay_t_contrast.inputs.stat_thresh = (2.300302, 5)
overlay_t_contrast.inputs.transparency = True
예제 #17
0
NodeHash_d73fcb0 = pe.MapNode(interface = fsl.ApplyWarp(), name = 'NodeName_d73fcb0', iterfield = ['field_file', 'in_file', 'premat'])
NodeHash_d73fcb0.inputs.interp = 'trilinear'

#Wraps command **fslmerge**
NodeHash_264457d0 = pe.Node(interface = fsl.Merge(), name = 'NodeName_264457d0')
NodeHash_264457d0.inputs.dimension = 't'

#Wraps command **flameo**
NodeHash_882ac40 = pe.Node(interface = fsl.FLAMEO(), name = 'NodeName_882ac40')
NodeHash_882ac40.inputs.run_mode = 'flame1'

#Wraps command **smoothest**
NodeHash_33f1eba0 = pe.Node(interface = fsl.SmoothEstimate(), name = 'NodeName_33f1eba0')

#Wraps command **cluster**
NodeHash_1978f9c0 = pe.Node(interface = fsl.Cluster(), name = 'NodeName_1978f9c0')
NodeHash_1978f9c0.inputs.pthreshold = 0.05
NodeHash_1978f9c0.inputs.threshold = 2.3

#Wraps command **fslmerge**
NodeHash_3c0ae30 = pe.Node(interface = fsl.Merge(), name = 'NodeName_3c0ae30')
NodeHash_3c0ae30.inputs.dimension = 't'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_2e292140, 'MNI_brain', NodeHash_d73fcb0, 'ref_file')
analysisflow.connect(NodeHash_2e292140, 'MNI_brain', NodeHash_347043c0, 'ref_file')
analysisflow.connect(NodeHash_2e292140, 'MNI_head', NodeHash_1e7cc750, 'ref_file')
analysisflow.connect(NodeHash_2e292140, 'MNI_mask', NodeHash_882ac40, 'mask_file')
analysisflow.connect(NodeHash_2e292140, 'MNI_mask', NodeHash_33f1eba0, 'mask_file')
analysisflow.connect(NodeHash_2e292140, 'MNI_mask', NodeHash_1e7cc750, 'refmask_file')
예제 #18
0
def combine_report(c, first_c=foo0, prep_c=foo1, fx_c=None, thr=2.326,csize=30,fx=False):
    from nipype.interfaces import fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio

    if not fx:
        workflow = pe.Workflow(name='first_level_report')
        #dataflow = get_data(first_c)
    else:
        workflow = pe.Workflow(name='fixedfx_report')
        #dataflow =  get_fx_data(fx_c)
    
    infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
                         name='subject_names')

    """
    if c.test_mode:
        infosource.iterables = ('subject_id', [c.subjects[0]])
    else:
        infosource.iterables = ('subject_id', c.subjects)
    
    infosource1 = pe.Node(util.IdentityInterface(fields=['fwhm']),
                         name='fwhms')
    infosource1.iterables = ('fwhm', prep_c.fwhm)
    """

    dataflow = c.datagrabber.create_dataflow()

    fssource = pe.Node(interface = FreeSurferSource(),name='fssource')
    
    #workflow.connect(infosource, 'subject_id', dataflow, 'subject_id')
    #workflow.connect(infosource1, 'fwhm', dataflow, 'fwhm')

    infosource = dataflow.get_node("subject_id_iterable")

    workflow.connect(infosource, 'subject_id', fssource, 'subject_id')
    fssource.inputs.subjects_dir = prep_c.surf_dir
    
    imgflow = img_wkflw(thr=thr,csize=csize)
    
    # adding cluster correction before sending to imgflow
    
    smoothest = pe.MapNode(fsl.SmoothEstimate(), name='smooth_estimate', iterfield=['zstat_file'])
    workflow.connect(dataflow,'datagrabber.func', smoothest, 'zstat_file')
    workflow.connect(dataflow,'datagrabber.mask',smoothest, 'mask_file')
    
    cluster = pe.MapNode(fsl.Cluster(), name='cluster', iterfield=['in_file','dlh','volume'])
    workflow.connect(smoothest,'dlh', cluster, 'dlh')
    workflow.connect(smoothest, 'volume', cluster, 'volume')
    cluster.inputs.connectivity = csize
    cluster.inputs.threshold = thr
    cluster.inputs.out_threshold_file = True
    workflow.connect(dataflow,'datagrabber.func',cluster,'in_file')
    
    workflow.connect(cluster, 'threshold_file',imgflow,'inputspec.in_file')
    #workflow.connect(dataflow,'func',imgflow, 'inputspec.in_file')
    workflow.connect(dataflow,'datagrabber.mask',imgflow, 'inputspec.mask_file')
    workflow.connect(dataflow,'datagrabber.reg',imgflow, 'inputspec.reg_file')
    
    workflow.connect(fssource,'brain',imgflow, 'inputspec.anat_file')
    
    workflow.connect(infosource, 'subject_id', imgflow, 'inputspec.subject_id')
    imgflow.inputs.inputspec.fsdir = prep_c.surf_dir
    
    writereport = pe.Node(util.Function( input_names = ["cs",
                                                        "locations",
                                                        "percents",
                                                        "in_files",
                                                        "des_mat_cov",
                                                        "des_mat",
                                                        "subjects",
                                                        "meanval",
                                                        "imagefiles",
                                                        "surface_ims",
                                                        'thr',
                                                        'csize',
                                                        'fwhm',
                                                        'onset_images'],
                                        output_names =["report",
                                                       "elements"],
                                        function = write_report),
                          name = "writereport" )
    
    
    # add plot detrended timeseries with onsets if block
    if c.is_block_design:
        plottseries = tsnr_roi(plot=True, onsets=True)
        plottseries.inputs.inputspec.TR = prep_c.TR
        workflow.connect(dataflow,'datagrabber.reg',plottseries, 'inputspec.reg_file')
        workflow.connect(fssource, ('aparc_aseg',pickfirst), plottseries, 'inputspec.aparc_aseg')
        workflow.connect(infosource, 'subject_id', plottseries, 'inputspec.subject')
        workflow.connect(dataflow, 'datagrabber.detrended', plottseries,'inputspec.tsnr_file')

        subjectinfo = pe.Node(util.Function(input_names=['subject_id'], output_names=['output']), name='subjectinfo')
        subjectinfo.inputs.function_str = first_c.subjectinfo

        workflow.connect(infosource,'subject_id', subjectinfo, 'subject_id')
        workflow.connect(subjectinfo, 'output', plottseries, 'inputspec.onsets')
        plottseries.inputs.inputspec.input_units = first_c.input_units
        workflow.connect(plottseries,'outputspec.out_file',writereport,'onset_images')
    else:
        writereport.inputs.onset_images = None
    
    
    
    #writereport = pe.Node(interface=ReportSink(),name='reportsink')
    #writereport.inputs.base_directory = os.path.join(c.sink_dir,'analyses','func')
    
    workflow.connect(infosource, 'subject_id', writereport, 'subjects')
    #workflow.connect(infosource, 'subject_id', writereport, 'container')
    try:
        infosource1 = dataflow.get_node('fwhm_iterable')
        workflow.connect(infosource1, 'fwhm', writereport, 'fwhm')
    except:
        writereport.inputs.fwhm = prep_c.fwhm[0]

    writereport.inputs.thr = thr
    writereport.inputs.csize = csize
    
    makesurfaceplots = pe.Node(util.Function(input_names = ['con_image',
                                                            'reg_file',
                                                            'subject_id',
                                                            'thr',
                                                            'sd'],
                                              output_names = ['surface_ims',
                                                              'surface_mgzs'],
                                              function = make_surface_plots),
                               name = 'make_surface_plots')
    
    workflow.connect(infosource, 'subject_id', makesurfaceplots, 'subject_id')
    
    makesurfaceplots.inputs.thr = thr
    makesurfaceplots.inputs.sd = prep_c.surf_dir
    
    sinker = pe.Node(nio.DataSink(), name='sinker')
    sinker.inputs.base_directory = os.path.join(c.sink_dir)
    
    workflow.connect(infosource,'subject_id',sinker,'container')
    workflow.connect(dataflow,'datagrabber.func',makesurfaceplots,'con_image')
    workflow.connect(dataflow,'datagrabber.reg',makesurfaceplots,'reg_file')
    
    workflow.connect(dataflow, 'datagrabber.des_mat', writereport, 'des_mat')
    workflow.connect(dataflow, 'datagrabber.des_mat_cov', writereport, 'des_mat_cov')
    workflow.connect(imgflow, 'outputspec.cs', writereport, 'cs')
    workflow.connect(imgflow, 'outputspec.locations', writereport, 'locations')
    workflow.connect(imgflow, 'outputspec.percents', writereport, 'percents')
    workflow.connect(imgflow, 'outputspec.meanval', writereport, 'meanval')
    workflow.connect(imgflow,'outputspec.imagefiles', writereport, 'imagefiles')
    
    workflow.connect(dataflow, 'datagrabber.func', writereport, 'in_files')
    workflow.connect(makesurfaceplots,'surface_ims', writereport, 'surface_ims')
    if not fx:
        workflow.connect(writereport,"report",sinker,"first_level_report")
    else:
        workflow.connect(writereport,"report",sinker,"fixed_fx_report")
    
    
    return workflow
예제 #19
0
def model_fitting(source_img, prepped_img, subject_info, aroma, task, args,
                  mask_file, run_number):
    # Get the necessary parameters
    outputdir = args.outputdir
    fwhm = args.fwhm
    cthresh = args.cthresh
    alpha = args.alpha

    # Make a task directory in the output folder
    if run_number > 0:
        taskdir = os.path.join(outputdir,
                               task + "_run-0" + str(run_number + 1))
    else:
        taskdir = os.path.join(outputdir, task)

    if not os.path.exists(taskdir):
        os.mkdir(taskdir)
    os.mkdir(os.path.join(taskdir, 'stats'))
    os.mkdir(os.path.join(taskdir, 'figs'))

    processed_image = preprocess(aroma, fwhm, prepped_img, mask_file, taskdir,
                                 task)

    task_vs_baseline = [
        task + " vs baseline", 'T', [task, 'baseline'], [1, -1]
    ]  # set up contrasts
    contrasts = [task_vs_baseline]
    """
    Model fitting workflow

    Inputs::
         inputspec.session_info : info generated by modelgen.SpecifyModel
         inputspec.interscan_interval : interscan interval
         inputspec.contrasts : list of contrasts
         inputspec.film_threshold : image threshold for FILM estimation
         inputspec.model_serial_correlations
         inputspec.bases
    Outputs::
         outputspec.copes
         outputspec.varcopes
         outputspec.dof_file
         outputspec.zfiles
         outputspec.parameter_estimates
    """

    modelfit = pe.Workflow(name='modelfit', base_dir=taskdir)
    modelspec = pe.Node(interface=model.SpecifyModel(),
                        name="modelspec")  # generate design info
    inputspec = pe.Node(util.IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ]),
                        name='inputspec')
    level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")
    modelgen = pe.MapNode(interface=fsl.FEATModel(),
                          name='modelgen',
                          iterfield=['fsf_file', 'ev_files'])
    modelestimate = pe.MapNode(
        interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5),
        name='modelestimate',
        iterfield=['design_file', 'in_file', 'tcon_file'])
    merge_contrasts = pe.MapNode(interface=util.Merge(2),
                                 name='merge_contrasts',
                                 iterfield=['in1'])
    outputspec = pe.Node(util.IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'zfiles', 'parameter_estimates'
    ]),
                         name='outputspec')

    modelfit.connect([
        (modelspec, inputspec, [('session_info', 'session_info')]),
        (inputspec, level1design,
         [('interscan_interval', 'interscan_interval'),
          ('session_info', 'session_info'), ('contrasts', 'contrasts'),
          ('bases', 'bases'),
          ('model_serial_correlations', 'model_serial_correlations')]),
        (inputspec, modelestimate, [('film_threshold', 'threshold'),
                                    ('functional_data', 'in_file')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                  ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),
        (merge_contrasts, outputspec, [('out', 'zfiles')]),
        (modelestimate, outputspec, [('param_estimates',
                                      'parameter_estimates'),
                                     ('dof_file', 'dof_file')]),
    ])

    modelfit.connect([
        (modelgen, modelestimate, [('con_file', 'tcon_file'),
                                   ('fcon_file', 'fcon_file')]),
        (modelestimate, merge_contrasts, [('zstats', 'in1'),
                                          ('zfstats', 'in2')]),
        (modelestimate, outputspec, [('copes', 'copes'),
                                     ('varcopes', 'varcopes')]),
    ])

    # Define inputs to workflow
    modelspec.inputs.functional_runs = processed_image
    inputspec.inputs.functional_data = processed_image
    modelspec.inputs.subject_info = subject_info
    modelspec.inputs.input_units = 'secs'
    modelspec.inputs.time_repetition = source_img.entities['RepetitionTime']
    modelspec.inputs.high_pass_filter_cutoff = 90
    inputspec.inputs.model_serial_correlations = True
    inputspec.inputs.film_threshold = 10.0
    inputspec.inputs.interscan_interval = source_img.entities['RepetitionTime']
    inputspec.inputs.bases = {
        'gamma': {
            'gammasigma': 3,
            'gammadelay': 6,
            'derivs': True
        }
    }
    inputspec.inputs.contrasts = contrasts

    # Run the model-fitting pipeline. Main outputs are a feat directory (w/ functional img) and a design.mat file
    res = modelfit.run()

    # outputs
    output_txt = open(os.path.join(taskdir, task + '_outputs.txt'), 'w')
    print_outputs(output_txt, res)

    # The third node, FILM's, first element (i.e. only element) of its 'zstats' output
    z_img = list(res.nodes)[2].result.outputs.zstats[0]

    # Use False Discovery Rate theory to correct for multiple comparisons
    fdr_thresh_img, fdr_threshold = thresholding.map_threshold(
        stat_img=z_img,
        mask_img=mask_file,
        alpha=alpha,
        height_control='fdr',
        cluster_threshold=cthresh)
    print("Thresholding at FDR corrected threshold of " + str(fdr_threshold))
    fdr_thresh_img_path = os.path.join(taskdir,
                                       task + '_fdr_thresholded_z.nii.gz')
    nibabel.save(fdr_thresh_img, fdr_thresh_img_path)

    # Do a cluster analysis using the FDR corrected threshold on the original z_img
    print("Performing cluster analysis.")
    cl = fsl.Cluster(in_file=z_img, threshold=fdr_threshold)
    cluster_file = os.path.join(taskdir, 'stats', task + "_cluster_stats.txt")
    cluster_analysis(cluster_file, cl)

    # Resample the result image with AFNI
    resample_fdr_thresh_img_path = os.path.join(
        taskdir, task + '_fdr_thresholded_z_resample.nii.gz')
    print("Resampling thresholded image to MNI space")
    resample = afni.Resample(master=template,
                             out_file=resample_fdr_thresh_img_path,
                             in_file=fdr_thresh_img_path)
    resample.run()
    os.remove(fdr_thresh_img_path)

    print("Image to be returned: " + resample_fdr_thresh_img_path)

    return resample_fdr_thresh_img_path
Film_Gls.inputs.design_file = design
Film_Gls.inputs.tcon_file = t_contrast
Film_Gls.inputs.fcon_file = f_contrast
Film_Gls.inputs.threshold = 1000.0
Film_Gls.inputs.smooth_autocorr = True

#-----------------------------------------------------------------------------------------------------
# In[15]:
#Estimate smootheness of the image
Smooth_Est = Node(fsl.SmoothEstimate(), name = 'Smooth_Estimation')
Smooth_Est.inputs.dof = 148 #150 volumes and only one regressor

#-----------------------------------------------------------------------------------------------------
# In[15]:
#Clusterin on the statistical output of t-contrasts
Clustering_t = Node(fsl.Cluster(), name = 'Clustering_t_Contrast')
Clustering_t.inputs.threshold = 2.3
Clustering_t.inputs.pthreshold = 0.05
Clustering_t.inputs.out_threshold_file = 'thresh_zstat1.nii.gz'
# Clustering_t.inputs.out_index_file = 'mask_zstat1'
# Clustering_t.inputs.out_localmax_txt_file = 'localmax'



#-----------------------------------------------------------------------------------------------------
# In[15]:
#Clusterin on the statistical output of f-contrast
Clustering_f = Node(fsl.Cluster(), name = 'Clustering_f_Contrast')
Clustering_f.inputs.threshold = 2.3
Clustering_f.inputs.pthreshold = 0.05
Clustering_f.inputs.out_threshold_file = 'thresh_zfstat1.nii.gz'
예제 #21
0
def second_level_wf(output_dir, bids_ref, name='wf_2nd_level'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['group_mask', 'in_copes', 'in_varcopes']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'zstats_raw', 'zstats_fwe', 'zstats_clust', 'clust_index_file',
        'clust_localmax_txt_file'
    ]),
                         name='outputnode')

    # Configure FSL 2nd level analysis
    l2_model = pe.Node(fsl.L2Model(), name='l2_model')
    flameo_ols = pe.Node(fsl.FLAMEO(run_mode='ols'), name='flameo_ols')

    merge_copes = pe.Node(fsl.Merge(dimension='t'), name='merge_copes')
    merge_varcopes = pe.Node(fsl.Merge(dimension='t'), name='merge_varcopes')

    # Thresholding - FDR ################################################
    # Calculate pvalues with ztop
    fdr_ztop = pe.Node(fsl.ImageMaths(op_string='-ztop', suffix='_pval'),
                       name='fdr_ztop')
    # Find FDR threshold: fdr -i zstat1_pval -m <group_mask> -q 0.05
    # fdr_th = <write Nipype interface for fdr>
    # Apply threshold:
    # fslmaths zstat1_pval -mul -1 -add 1 -thr <fdr_th> -mas <group_mask> \
    #     zstat1_thresh_vox_fdr_pstat1

    # Thresholding - FWE ################################################
    # smoothest -r %s -d %i -m %s
    smoothness = pe.Node(fsl.SmoothEstimate(), name='smoothness')
    # ptoz 0.025 -g %f
    # p = 0.05 / 2 for 2-tailed test
    fwe_ptoz = pe.Node(PtoZ(pvalue=0.025), name='fwe_ptoz')
    # fslmaths %s -uthr %s -thr %s nonsignificant
    # fslmaths %s -sub nonsignificant zstat1_thresh
    fwe_nonsig0 = pe.Node(fsl.Threshold(direction='above'), name='fwe_nonsig0')
    fwe_nonsig1 = pe.Node(fsl.Threshold(direction='below'), name='fwe_nonsig1')
    fwe_thresh = pe.Node(fsl.BinaryMaths(operation='sub'), name='fwe_thresh')

    # Thresholding - Cluster ############################################
    # cluster -i %s -c %s -t 3.2 -p 0.025 -d %s --volume=%s  \
    #     --othresh=thresh_cluster_fwe_zstat1 --connectivity=26 --mm
    cluster_kwargs = {
        'connectivity': 26,
        'threshold': 3.2,
        'pthreshold': 0.025,
        'out_threshold_file': True,
        'out_index_file': True,
        'out_localmax_txt_file': True
    }
    cluster_pos = pe.Node(fsl.Cluster(**cluster_kwargs), name='cluster_pos')
    cluster_neg = pe.Node(fsl.Cluster(**cluster_kwargs), name='cluster_neg')
    zstat_inv = pe.Node(fsl.BinaryMaths(operation='mul', operand_value=-1),
                        name='zstat_inv')
    cluster_inv = pe.Node(fsl.BinaryMaths(operation='mul', operand_value=-1),
                          name='cluster_inv')
    cluster_all = pe.Node(fsl.BinaryMaths(operation='add'), name='cluster_all')

    ds_zraw = pe.Node(GroupDerivativesDataSink(base_directory=str(output_dir),
                                               keep_dtype=False,
                                               suffix='zstat',
                                               sub='all'),
                      name='ds_zraw',
                      run_without_submitting=True)
    ds_zraw.inputs.source_file = bids_ref

    ds_zfwe = pe.Node(GroupDerivativesDataSink(base_directory=str(output_dir),
                                               keep_dtype=False,
                                               suffix='zstat',
                                               desc='fwe',
                                               sub='all'),
                      name='ds_zfwe',
                      run_without_submitting=True)
    ds_zfwe.inputs.source_file = bids_ref

    ds_zclust = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='zstat',
        desc='clust',
        sub='all'),
                        name='ds_zclust',
                        run_without_submitting=True)
    ds_zclust.inputs.source_file = bids_ref

    ds_clustidx_pos = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='pclusterindex',
        sub='all'),
                              name='ds_clustidx_pos',
                              run_without_submitting=True)
    ds_clustidx_pos.inputs.source_file = bids_ref

    ds_clustlmax_pos = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='plocalmax',
        desc='intask',
        sub='all'),
                               name='ds_clustlmax_pos',
                               run_without_submitting=True)
    ds_clustlmax_pos.inputs.source_file = bids_ref

    ds_clustidx_neg = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='nclusterindex',
        sub='all'),
                              name='ds_clustidx_neg',
                              run_without_submitting=True)
    ds_clustidx_neg.inputs.source_file = bids_ref

    ds_clustlmax_neg = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='nlocalmax',
        desc='intask',
        sub='all'),
                               name='ds_clustlmax_neg',
                               run_without_submitting=True)
    ds_clustlmax_neg.inputs.source_file = bids_ref

    workflow.connect([
        (inputnode, l2_model, [(('in_copes', _len), 'num_copes')]),
        (inputnode, flameo_ols, [('group_mask', 'mask_file')]),
        (inputnode, smoothness, [('group_mask', 'mask_file'),
                                 (('in_copes', _dof), 'dof')]),
        (inputnode, merge_copes, [('in_copes', 'in_files')]),
        (inputnode, merge_varcopes, [('in_varcopes', 'in_files')]),
        (l2_model, flameo_ols, [('design_mat', 'design_file'),
                                ('design_con', 't_con_file'),
                                ('design_grp', 'cov_split_file')]),
        (merge_copes, flameo_ols, [('merged_file', 'cope_file')]),
        (merge_varcopes, flameo_ols, [('merged_file', 'var_cope_file')]),
        (flameo_ols, smoothness, [('res4d', 'residual_fit_file')]),
        (flameo_ols, fwe_nonsig0, [('zstats', 'in_file')]),
        (fwe_nonsig0, fwe_nonsig1, [('out_file', 'in_file')]),
        (smoothness, fwe_ptoz, [('resels', 'resels')]),
        (fwe_ptoz, fwe_nonsig0, [('zstat', 'thresh')]),
        (fwe_ptoz, fwe_nonsig1, [(('zstat', _neg), 'thresh')]),
        (flameo_ols, fwe_thresh, [('zstats', 'in_file')]),
        (fwe_nonsig1, fwe_thresh, [('out_file', 'operand_file')]),
        (flameo_ols, cluster_pos, [('zstats', 'in_file')]),
        (merge_copes, cluster_pos, [('merged_file', 'cope_file')]),
        (smoothness, cluster_pos, [('volume', 'volume'), ('dlh', 'dlh')]),
        (flameo_ols, zstat_inv, [('zstats', 'in_file')]),
        (zstat_inv, cluster_neg, [('out_file', 'in_file')]),
        (cluster_neg, cluster_inv, [('threshold_file', 'in_file')]),
        (merge_copes, cluster_neg, [('merged_file', 'cope_file')]),
        (smoothness, cluster_neg, [('volume', 'volume'), ('dlh', 'dlh')]),
        (cluster_pos, cluster_all, [('threshold_file', 'in_file')]),
        (cluster_inv, cluster_all, [('out_file', 'operand_file')]),
        (flameo_ols, ds_zraw, [('zstats', 'in_file')]),
        (fwe_thresh, ds_zfwe, [('out_file', 'in_file')]),
        (cluster_all, ds_zclust, [('out_file', 'in_file')]),
        (cluster_pos, ds_clustidx_pos, [('index_file', 'in_file')]),
        (cluster_pos, ds_clustlmax_pos, [('localmax_txt_file', 'in_file')]),
        (cluster_neg, ds_clustidx_neg, [('index_file', 'in_file')]),
        (cluster_neg, ds_clustlmax_neg, [('localmax_txt_file', 'in_file')]),
    ])
    return workflow