def smooth_est(zstat):
    import nipype.interfaces.fsl as fsl
    import os
    template_mask = '/media/amr/Amr_4TB/Work/October_Acquistion/Anat_Template_Enhanced_Mask.nii.gz'

    smooth_est = fsl.SmoothEstimate()

    smooth_est.inputs.mask_file = template_mask

    if zstat[-25:-21] == '10Hz':
        res4d = '/media/amr/Amr_4TB/Work/stimulation/stimulation_3rd_level/10Hz/flameo/res4d.nii.gz'
        smooth_est.inputs.dof = 7
    elif zstat[-25:-21] == '20Hz':
        res4d = '/media/amr/Amr_4TB/Work/stimulation/stimulation_3rd_level/20Hz/flameo/res4d.nii.gz'
        smooth_est.inputs.dof = 7
    elif zstat[-25:-21] == '40Hz':
        res4d = '/media/amr/Amr_4TB/Work/stimulation/stimulation_3rd_level/40Hz/flameo/res4d.nii.gz'
        smooth_est.inputs.dof = 6

    print(res4d)

    smooth_est.inputs.residual_fit_file = res4d
    smooth_est_outputs = smooth_est.run()

    print(zstat[-25:-21])
    dlh = smooth_est_outputs.outputs.dlh
    volume = smooth_est_outputs.outputs.volume
    resels = smooth_est_outputs.outputs.resels

    return dlh, volume, resels
예제 #2
0
def smooth_est(zstat):
    import nipype.interfaces.fsl as fsl
    import os
    study_mask = '/Volumes/Amr_1TB/NARPS/narps_templateBrainExtractionMask.nii.gz'

    smooth_est = fsl.SmoothEstimate()
    smooth_est.inputs.dof = 53
    smooth_est.inputs.mask_file = study_mask

    if zstat[-36:-32] == 'gain':
        res4d = '/media/amr/Amr_4TB/NARPS/output_narps_proc_3rd_level/gain_stat_flameo_neg/+/res4d.nii.gz'
    elif zstat[-36:-32] == 'loss':
        res4d = '/media/amr/Amr_4TB/NARPS/output_narps_proc_3rd_level/loss_stat_flameo_neg/+/res4d.nii.gz'

    print(res4d)

    smooth_est.inputs.residual_fit_file = res4d
    smooth_est_outputs = smooth_est.run()

    print(zstat[-36:-32])
    dlh = smooth_est_outputs.outputs.dlh
    volume = smooth_est_outputs.outputs.volume
    resels = smooth_est_outputs.outputs.resels

    return dlh, volume, resels
def smooth_est(contrast):
    import nipype.interfaces.fsl as fsl
    import os
    standard_mask = '/usr/local/fsl/data/standard/MNI152_T1_1mm_brain_mask.nii.gz'

    smooth_est = fsl.SmoothEstimate()
    smooth_est.inputs.dof = 15
    smooth_est.inputs.mask_file = standard_mask

    if contrast[-54:-50] == 'gain':
        res4d = '/Users/amr/Documents/mixed_gambling_poldrack_2007/output_MGT_poldrack_proc_3rd_level/gain_stat_flameo_positive_and_negative/+/res4d.nii.gz'
    elif contrast[-54:-50] == 'loss':
        res4d = '/Users/amr/Documents/mixed_gambling_poldrack_2007/output_MGT_poldrack_proc_3rd_level/loss_stat_flameo_positive_and_negative/+/res4d.nii.gz'

    print(res4d)

    smooth_est.inputs.residual_fit_file = res4d
    smooth_est_outputs = smooth_est.run()

    dlh = smooth_est_outputs.outputs.dlh
    volume = smooth_est_outputs.outputs.volume
    resels = smooth_est_outputs.outputs.resels

    return dlh, volume, resels
예제 #4
0
def combine_report(c, first_c=foo0, prep_c=foo1, fx_c=None, thr=2.326,csize=30,fx=False):
    from nipype.interfaces import fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio

    if not fx:
        workflow = pe.Workflow(name='first_level_report')
        #dataflow = get_data(first_c)
    else:
        workflow = pe.Workflow(name='fixedfx_report')
        #dataflow =  get_fx_data(fx_c)
    
    infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
                         name='subject_names')

    """
    if c.test_mode:
        infosource.iterables = ('subject_id', [c.subjects[0]])
    else:
        infosource.iterables = ('subject_id', c.subjects)
    
    infosource1 = pe.Node(util.IdentityInterface(fields=['fwhm']),
                         name='fwhms')
    infosource1.iterables = ('fwhm', prep_c.fwhm)
    """

    dataflow = c.datagrabber.create_dataflow()

    fssource = pe.Node(interface = FreeSurferSource(),name='fssource')
    
    #workflow.connect(infosource, 'subject_id', dataflow, 'subject_id')
    #workflow.connect(infosource1, 'fwhm', dataflow, 'fwhm')

    infosource = dataflow.get_node("subject_id_iterable")

    workflow.connect(infosource, 'subject_id', fssource, 'subject_id')
    fssource.inputs.subjects_dir = prep_c.surf_dir
    
    imgflow = img_wkflw(thr=thr,csize=csize)
    
    # adding cluster correction before sending to imgflow
    
    smoothest = pe.MapNode(fsl.SmoothEstimate(), name='smooth_estimate', iterfield=['zstat_file'])
    workflow.connect(dataflow,'datagrabber.func', smoothest, 'zstat_file')
    workflow.connect(dataflow,'datagrabber.mask',smoothest, 'mask_file')
    
    cluster = pe.MapNode(fsl.Cluster(), name='cluster', iterfield=['in_file','dlh','volume'])
    workflow.connect(smoothest,'dlh', cluster, 'dlh')
    workflow.connect(smoothest, 'volume', cluster, 'volume')
    cluster.inputs.connectivity = csize
    cluster.inputs.threshold = thr
    cluster.inputs.out_threshold_file = True
    workflow.connect(dataflow,'datagrabber.func',cluster,'in_file')
    
    workflow.connect(cluster, 'threshold_file',imgflow,'inputspec.in_file')
    #workflow.connect(dataflow,'func',imgflow, 'inputspec.in_file')
    workflow.connect(dataflow,'datagrabber.mask',imgflow, 'inputspec.mask_file')
    workflow.connect(dataflow,'datagrabber.reg',imgflow, 'inputspec.reg_file')
    
    workflow.connect(fssource,'brain',imgflow, 'inputspec.anat_file')
    
    workflow.connect(infosource, 'subject_id', imgflow, 'inputspec.subject_id')
    imgflow.inputs.inputspec.fsdir = prep_c.surf_dir
    
    writereport = pe.Node(util.Function( input_names = ["cs",
                                                        "locations",
                                                        "percents",
                                                        "in_files",
                                                        "des_mat_cov",
                                                        "des_mat",
                                                        "subjects",
                                                        "meanval",
                                                        "imagefiles",
                                                        "surface_ims",
                                                        'thr',
                                                        'csize',
                                                        'fwhm',
                                                        'onset_images'],
                                        output_names =["report",
                                                       "elements"],
                                        function = write_report),
                          name = "writereport" )
    
    
    # add plot detrended timeseries with onsets if block
    if c.is_block_design:
        plottseries = tsnr_roi(plot=True, onsets=True)
        plottseries.inputs.inputspec.TR = prep_c.TR
        workflow.connect(dataflow,'datagrabber.reg',plottseries, 'inputspec.reg_file')
        workflow.connect(fssource, ('aparc_aseg',pickfirst), plottseries, 'inputspec.aparc_aseg')
        workflow.connect(infosource, 'subject_id', plottseries, 'inputspec.subject')
        workflow.connect(dataflow, 'datagrabber.detrended', plottseries,'inputspec.tsnr_file')

        subjectinfo = pe.Node(util.Function(input_names=['subject_id'], output_names=['output']), name='subjectinfo')
        subjectinfo.inputs.function_str = first_c.subjectinfo

        workflow.connect(infosource,'subject_id', subjectinfo, 'subject_id')
        workflow.connect(subjectinfo, 'output', plottseries, 'inputspec.onsets')
        plottseries.inputs.inputspec.input_units = first_c.input_units
        workflow.connect(plottseries,'outputspec.out_file',writereport,'onset_images')
    else:
        writereport.inputs.onset_images = None
    
    
    
    #writereport = pe.Node(interface=ReportSink(),name='reportsink')
    #writereport.inputs.base_directory = os.path.join(c.sink_dir,'analyses','func')
    
    workflow.connect(infosource, 'subject_id', writereport, 'subjects')
    #workflow.connect(infosource, 'subject_id', writereport, 'container')
    try:
        infosource1 = dataflow.get_node('fwhm_iterable')
        workflow.connect(infosource1, 'fwhm', writereport, 'fwhm')
    except:
        writereport.inputs.fwhm = prep_c.fwhm[0]

    writereport.inputs.thr = thr
    writereport.inputs.csize = csize
    
    makesurfaceplots = pe.Node(util.Function(input_names = ['con_image',
                                                            'reg_file',
                                                            'subject_id',
                                                            'thr',
                                                            'sd'],
                                              output_names = ['surface_ims',
                                                              'surface_mgzs'],
                                              function = make_surface_plots),
                               name = 'make_surface_plots')
    
    workflow.connect(infosource, 'subject_id', makesurfaceplots, 'subject_id')
    
    makesurfaceplots.inputs.thr = thr
    makesurfaceplots.inputs.sd = prep_c.surf_dir
    
    sinker = pe.Node(nio.DataSink(), name='sinker')
    sinker.inputs.base_directory = os.path.join(c.sink_dir)
    
    workflow.connect(infosource,'subject_id',sinker,'container')
    workflow.connect(dataflow,'datagrabber.func',makesurfaceplots,'con_image')
    workflow.connect(dataflow,'datagrabber.reg',makesurfaceplots,'reg_file')
    
    workflow.connect(dataflow, 'datagrabber.des_mat', writereport, 'des_mat')
    workflow.connect(dataflow, 'datagrabber.des_mat_cov', writereport, 'des_mat_cov')
    workflow.connect(imgflow, 'outputspec.cs', writereport, 'cs')
    workflow.connect(imgflow, 'outputspec.locations', writereport, 'locations')
    workflow.connect(imgflow, 'outputspec.percents', writereport, 'percents')
    workflow.connect(imgflow, 'outputspec.meanval', writereport, 'meanval')
    workflow.connect(imgflow,'outputspec.imagefiles', writereport, 'imagefiles')
    
    workflow.connect(dataflow, 'datagrabber.func', writereport, 'in_files')
    workflow.connect(makesurfaceplots,'surface_ims', writereport, 'surface_ims')
    if not fx:
        workflow.connect(writereport,"report",sinker,"first_level_report")
    else:
        workflow.connect(writereport,"report",sinker,"fixed_fx_report")
    
    
    return workflow
예제 #5
0
#==========================================================================================================================================================

#==========================================================================================================================================================
#Create desing for 2nd level
create_l2_design = Node(fsl.model.L2Model(), name='create_l2_design')
create_l2_design.inputs.num_copes = no_runs

#==========================================================================================================================================================
#perform higher level model fits

flameo_fit_copes1 = Node(fsl.model.FLAMEO(), name='flameo_fit_copes1')
flameo_fit_copes1.inputs.run_mode = 'fe'

#==========================================================================================================================================================
smooth_est_copes1 = Node(fsl.SmoothEstimate(), name='smooth_estimation_copes1')
smooth_est_copes1.inputs.dof = 3  #453-5 volumes

#==========================================================================================================================================================
#mask zstat1

mask_zstat1 = Node(fsl.ApplyMask(), name='mask_zstat1')
mask_zstat1.inputs.out_file = 'thresh_zstat1.nii.gz'

#==========================================================================================================================================================
#cluster copes1
cluster_copes1 = Node(fsl.model.Cluster(), name='cluster_copes1')

cluster_copes1.inputs.threshold = 2.3
cluster_copes1.inputs.pthreshold = 0.05
cluster_copes1.inputs.connectivity = 26
예제 #6
0
# f_contrast = '/media/amr/Amr_4TB/Work/stimulation/1st_Level_Designs/design.fts'

design = '/media/amr/Amr_4TB/Work/stimulation/1st_Level_Designs/design_no_filter.mat'
t_contrast = '/media/amr/Amr_4TB/Work/stimulation/1st_Level_Designs/design_no_filter.con'
f_contrast = '/media/amr/Amr_4TB/Work/stimulation/1st_Level_Designs/design_no_filter.fts'

film_gls = Node(fsl.FILMGLS(), name='Fit_Design_to_Timeseries')
film_gls.inputs.design_file = design
film_gls.inputs.tcon_file = t_contrast
film_gls.inputs.fcon_file = f_contrast
film_gls.inputs.threshold = 1000.0
film_gls.inputs.smooth_autocorr = True

# ============================================================================================================================
# Estimate smootheness of the image
smooth_est = Node(fsl.SmoothEstimate(), name='smooth_estimation')
smooth_est.inputs.dof = 147  # 453-5 volumes

# ============================================================================================================================
# ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
# ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
# ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
# ============================================================================================================================

mask_zstat = Node(fsl.ApplyMask(), name='mask_zstat')
mask_zstat.inputs.out_file = 'thresh_zstat.nii.gz'

# ============================================================================================================================
clustering_t = Node(fsl.Cluster(), name='clustering_t_contrast')
clustering_t.inputs.threshold = 2.3
clustering_t.inputs.pthreshold = 0.05
예제 #7
0
파일: mixedfx.py 프로젝트: ningmengxu/lyman
def create_volume_mixedfx_workflow(name="volume_group",
                                   subject_list=None,
                                   regressors=None,
                                   contrasts=None,
                                   exp_info=None):

    # Handle default arguments
    if subject_list is None:
        subject_list = []
    if regressors is None:
        regressors = dict(group_mean=[])
    if contrasts is None:
        contrasts = [["group_mean", "T", ["group_mean"], [1]]]
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define workflow inputs
    inputnode = Node(
        IdentityInterface(["l1_contrast", "copes", "varcopes", "dofs"]),
        "inputnode")

    # Merge the fixed effect summary images into one 4D image
    merge = Node(MergeAcrossSubjects(regressors=regressors), "merge")

    # Make a simple design
    design = Node(fsl.MultipleRegressDesign(contrasts=contrasts), "design")

    # Fit the mixed effects model
    flameo = Node(fsl.FLAMEO(run_mode=exp_info["flame_mode"]), "flameo")

    # Estimate the smoothness of the data
    smoothest = Node(fsl.SmoothEstimate(), "smoothest")

    # Correct for multiple comparisons
    cluster = Node(
        fsl.Cluster(threshold=exp_info["cluster_zthresh"],
                    pthreshold=exp_info["grf_pthresh"],
                    out_threshold_file=True,
                    out_index_file=True,
                    out_localmax_txt_file=True,
                    peak_distance=exp_info["peak_distance"],
                    use_mm=True), "cluster")

    # Project the mask and thresholded zstat onto the surface
    surfproj = create_surface_projection_workflow(exp_info=exp_info)

    # Segment the z stat image with a watershed algorithm
    watershed = Node(Watershed(), "watershed")

    # Make static report images in the volume
    report = Node(MFXReport(), "report")
    report.inputs.subjects = subject_list

    # Save the experiment info
    saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams")

    # Define the workflow outputs
    outputnode = Node(
        IdentityInterface([
            "copes", "varcopes", "mask_file", "flameo_stats", "thresh_zstat",
            "surf_zstat", "surf_mask", "cluster_image", "seg_file",
            "peak_file", "lut_file", "report", "json_file"
        ]), "outputnode")

    # Define and connect up the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, merge, [("copes", "cope_files"),
                            ("varcopes", "varcope_files"),
                            ("dofs", "dof_files")]),
        (inputnode, saveparams, [("copes", "in_file")]),
        (merge, flameo, [("cope_file", "cope_file"),
                         ("varcope_file", "var_cope_file"),
                         ("dof_file", "dof_var_cope_file"),
                         ("mask_file", "mask_file")]),
        (merge, design, [("regressors", "regressors")]),
        (design, flameo, [("design_con", "t_con_file"),
                          ("design_grp", "cov_split_file"),
                          ("design_mat", "design_file")]),
        (flameo, smoothest, [("zstats", "zstat_file")]),
        (merge, smoothest, [("mask_file", "mask_file")]),
        (smoothest, cluster, [("dlh", "dlh"), ("volume", "volume")]),
        (flameo, cluster, [("zstats", "in_file")]),
        (cluster, watershed, [("threshold_file", "zstat_file"),
                              ("localmax_txt_file", "localmax_file")]),
        (merge, report, [("mask_file", "mask_file"),
                         ("cope_file", "cope_file")]),
        (flameo, report, [("zstats", "zstat_file")]),
        (cluster, report, [("threshold_file", "zstat_thresh_file"),
                           ("localmax_txt_file", "localmax_file")]),
        (watershed, report, [("seg_file", "seg_file")]),
        (merge, surfproj, [("mask_file", "inputs.mask_file")]),
        (cluster, surfproj, [("threshold_file", "inputs.zstat_file")]),
        (merge, outputnode, [("cope_file", "copes"),
                             ("varcope_file", "varcopes"),
                             ("mask_file", "mask_file")]),
        (flameo, outputnode, [("stats_dir", "flameo_stats")]),
        (cluster, outputnode, [("threshold_file", "thresh_zstat"),
                               ("index_file", "cluster_image")]),
        (watershed, outputnode, [("seg_file", "seg_file"),
                                 ("peak_file", "peak_file"),
                                 ("lut_file", "lut_file")]),
        (surfproj, outputnode, [("outputs.surf_zstat", "surf_zstat"),
                                ("outputs.surf_mask", "surf_mask")]),
        (report, outputnode, [("out_files", "report")]),
        (saveparams, outputnode, [("json_file", "json_file")]),
    ])

    return group, inputnode, outputnode
예제 #8
0
def second_level_wf(output_dir, bids_ref, name='wf_2nd_level'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['group_mask', 'in_copes', 'in_varcopes']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'zstats_raw', 'zstats_fwe', 'zstats_clust', 'clust_index_file',
        'clust_localmax_txt_file'
    ]),
                         name='outputnode')

    # Configure FSL 2nd level analysis
    l2_model = pe.Node(fsl.L2Model(), name='l2_model')
    flameo_ols = pe.Node(fsl.FLAMEO(run_mode='ols'), name='flameo_ols')

    merge_copes = pe.Node(fsl.Merge(dimension='t'), name='merge_copes')
    merge_varcopes = pe.Node(fsl.Merge(dimension='t'), name='merge_varcopes')

    # Thresholding - FDR ################################################
    # Calculate pvalues with ztop
    fdr_ztop = pe.Node(fsl.ImageMaths(op_string='-ztop', suffix='_pval'),
                       name='fdr_ztop')
    # Find FDR threshold: fdr -i zstat1_pval -m <group_mask> -q 0.05
    # fdr_th = <write Nipype interface for fdr>
    # Apply threshold:
    # fslmaths zstat1_pval -mul -1 -add 1 -thr <fdr_th> -mas <group_mask> \
    #     zstat1_thresh_vox_fdr_pstat1

    # Thresholding - FWE ################################################
    # smoothest -r %s -d %i -m %s
    smoothness = pe.Node(fsl.SmoothEstimate(), name='smoothness')
    # ptoz 0.025 -g %f
    # p = 0.05 / 2 for 2-tailed test
    fwe_ptoz = pe.Node(PtoZ(pvalue=0.025), name='fwe_ptoz')
    # fslmaths %s -uthr %s -thr %s nonsignificant
    # fslmaths %s -sub nonsignificant zstat1_thresh
    fwe_nonsig0 = pe.Node(fsl.Threshold(direction='above'), name='fwe_nonsig0')
    fwe_nonsig1 = pe.Node(fsl.Threshold(direction='below'), name='fwe_nonsig1')
    fwe_thresh = pe.Node(fsl.BinaryMaths(operation='sub'), name='fwe_thresh')

    # Thresholding - Cluster ############################################
    # cluster -i %s -c %s -t 3.2 -p 0.025 -d %s --volume=%s  \
    #     --othresh=thresh_cluster_fwe_zstat1 --connectivity=26 --mm
    cluster_kwargs = {
        'connectivity': 26,
        'threshold': 3.2,
        'pthreshold': 0.025,
        'out_threshold_file': True,
        'out_index_file': True,
        'out_localmax_txt_file': True
    }
    cluster_pos = pe.Node(fsl.Cluster(**cluster_kwargs), name='cluster_pos')
    cluster_neg = pe.Node(fsl.Cluster(**cluster_kwargs), name='cluster_neg')
    zstat_inv = pe.Node(fsl.BinaryMaths(operation='mul', operand_value=-1),
                        name='zstat_inv')
    cluster_inv = pe.Node(fsl.BinaryMaths(operation='mul', operand_value=-1),
                          name='cluster_inv')
    cluster_all = pe.Node(fsl.BinaryMaths(operation='add'), name='cluster_all')

    ds_zraw = pe.Node(GroupDerivativesDataSink(base_directory=str(output_dir),
                                               keep_dtype=False,
                                               suffix='zstat',
                                               sub='all'),
                      name='ds_zraw',
                      run_without_submitting=True)
    ds_zraw.inputs.source_file = bids_ref

    ds_zfwe = pe.Node(GroupDerivativesDataSink(base_directory=str(output_dir),
                                               keep_dtype=False,
                                               suffix='zstat',
                                               desc='fwe',
                                               sub='all'),
                      name='ds_zfwe',
                      run_without_submitting=True)
    ds_zfwe.inputs.source_file = bids_ref

    ds_zclust = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='zstat',
        desc='clust',
        sub='all'),
                        name='ds_zclust',
                        run_without_submitting=True)
    ds_zclust.inputs.source_file = bids_ref

    ds_clustidx_pos = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='pclusterindex',
        sub='all'),
                              name='ds_clustidx_pos',
                              run_without_submitting=True)
    ds_clustidx_pos.inputs.source_file = bids_ref

    ds_clustlmax_pos = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='plocalmax',
        desc='intask',
        sub='all'),
                               name='ds_clustlmax_pos',
                               run_without_submitting=True)
    ds_clustlmax_pos.inputs.source_file = bids_ref

    ds_clustidx_neg = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='nclusterindex',
        sub='all'),
                              name='ds_clustidx_neg',
                              run_without_submitting=True)
    ds_clustidx_neg.inputs.source_file = bids_ref

    ds_clustlmax_neg = pe.Node(GroupDerivativesDataSink(
        base_directory=str(output_dir),
        keep_dtype=False,
        suffix='nlocalmax',
        desc='intask',
        sub='all'),
                               name='ds_clustlmax_neg',
                               run_without_submitting=True)
    ds_clustlmax_neg.inputs.source_file = bids_ref

    workflow.connect([
        (inputnode, l2_model, [(('in_copes', _len), 'num_copes')]),
        (inputnode, flameo_ols, [('group_mask', 'mask_file')]),
        (inputnode, smoothness, [('group_mask', 'mask_file'),
                                 (('in_copes', _dof), 'dof')]),
        (inputnode, merge_copes, [('in_copes', 'in_files')]),
        (inputnode, merge_varcopes, [('in_varcopes', 'in_files')]),
        (l2_model, flameo_ols, [('design_mat', 'design_file'),
                                ('design_con', 't_con_file'),
                                ('design_grp', 'cov_split_file')]),
        (merge_copes, flameo_ols, [('merged_file', 'cope_file')]),
        (merge_varcopes, flameo_ols, [('merged_file', 'var_cope_file')]),
        (flameo_ols, smoothness, [('res4d', 'residual_fit_file')]),
        (flameo_ols, fwe_nonsig0, [('zstats', 'in_file')]),
        (fwe_nonsig0, fwe_nonsig1, [('out_file', 'in_file')]),
        (smoothness, fwe_ptoz, [('resels', 'resels')]),
        (fwe_ptoz, fwe_nonsig0, [('zstat', 'thresh')]),
        (fwe_ptoz, fwe_nonsig1, [(('zstat', _neg), 'thresh')]),
        (flameo_ols, fwe_thresh, [('zstats', 'in_file')]),
        (fwe_nonsig1, fwe_thresh, [('out_file', 'operand_file')]),
        (flameo_ols, cluster_pos, [('zstats', 'in_file')]),
        (merge_copes, cluster_pos, [('merged_file', 'cope_file')]),
        (smoothness, cluster_pos, [('volume', 'volume'), ('dlh', 'dlh')]),
        (flameo_ols, zstat_inv, [('zstats', 'in_file')]),
        (zstat_inv, cluster_neg, [('out_file', 'in_file')]),
        (cluster_neg, cluster_inv, [('threshold_file', 'in_file')]),
        (merge_copes, cluster_neg, [('merged_file', 'cope_file')]),
        (smoothness, cluster_neg, [('volume', 'volume'), ('dlh', 'dlh')]),
        (cluster_pos, cluster_all, [('threshold_file', 'in_file')]),
        (cluster_inv, cluster_all, [('out_file', 'operand_file')]),
        (flameo_ols, ds_zraw, [('zstats', 'in_file')]),
        (fwe_thresh, ds_zfwe, [('out_file', 'in_file')]),
        (cluster_all, ds_zclust, [('out_file', 'in_file')]),
        (cluster_pos, ds_clustidx_pos, [('index_file', 'in_file')]),
        (cluster_pos, ds_clustlmax_pos, [('localmax_txt_file', 'in_file')]),
        (cluster_neg, ds_clustidx_neg, [('index_file', 'in_file')]),
        (cluster_neg, ds_clustlmax_neg, [('localmax_txt_file', 'in_file')]),
    ])
    return workflow
예제 #9
0
def cluster_image(name="threshold_cluster_makeimages"):
    from nipype.interfaces import fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    workflow = pe.Workflow(name=name)
    inputspec = pe.Node(util.IdentityInterface(fields=["zstat","mask","zthreshold","pthreshold","connectivity",'anatomical']),name="inputspec")
    smoothest = pe.MapNode(fsl.SmoothEstimate(), name='smooth_estimate', iterfield=['zstat_file'])
    workflow.connect(inputspec,'zstat', smoothest, 'zstat_file')
    workflow.connect(inputspec,'mask',smoothest, 'mask_file')

    cluster = pe.MapNode(fsl.Cluster(out_localmax_txt_file=True,
                                     out_index_file=True,
                                     out_localmax_vol_file=True), 
                         name='cluster', iterfield=['in_file','dlh','volume'])
    workflow.connect(smoothest,'dlh', cluster, 'dlh')
    workflow.connect(smoothest, 'volume', cluster, 'volume')
    workflow.connect(inputspec,"zthreshold",cluster,"threshold")
    workflow.connect(inputspec,"pthreshold",cluster,"pthreshold")
    workflow.connect(inputspec,"connectivity",cluster,"connectivity")
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_pval_file = True
    workflow.connect(inputspec,'zstat',cluster,'in_file')
    """
    labels = pe.MapNode(util.Function(input_names=['in_file','thr','csize'],
                                   output_names=['labels'],function=get_labels),
        name='labels',iterfield=["in_file"])

    workflow.connect(inputspec,"zthreshold",labels,"thr")
    workflow.connect(inputspec,"connectivity",labels,"csize")
    workflow.connect(cluster,"threshold_file",labels,"in_file")
    showslice=pe.MapNode(util.Function(input_names=['image_in','anat_file','coordinates','thr'],
                                    output_names=["outfiles"],function=show_slices),
              name='showslice',iterfield=["image_in","coordinates"])

    coords = pe.MapNode(util.Function(input_names=["in_file","img"],
                                   output_names=["coords"],
                                   function=get_coords2),
        name='getcoords', iterfield=["in_file","img"])

    workflow.connect(cluster,'threshold_file',showslice,'image_in')
    workflow.connect(inputspec,'anatomical',showslice,"anat_file")
    workflow.connect(inputspec,'zthreshold',showslice,'thr')
    workflow.connect(labels,'labels',coords,"img")
    workflow.connect(cluster,"threshold_file",coords,"in_file")
    workflow.connect(coords,"coords",showslice,"coordinates")

    overlay = pe.MapNode(util.Function(input_names=["stat_image",
                                                 "background_image",
                                                 "threshold"],
                                       output_names=["fnames"],function=overlay_new),
                         name='overlay', iterfield=["stat_image"])
    workflow.connect(inputspec,"anatomical", overlay,"background_image")
    workflow.connect(cluster,"threshold_file",overlay,"stat_image")
    workflow.connect(inputspec,"zthreshold",overlay,"threshold")
    """
    outputspec = pe.Node(util.IdentityInterface(fields=["corrected_z","localmax_txt","index_file","localmax_vol","slices","cuts","corrected_p"]),name='outputspec')
    workflow.connect(cluster,'threshold_file',outputspec,'corrected_z')
    workflow.connect(cluster,'index_file',outputspec,'index_file')
    workflow.connect(cluster,'localmax_vol_file',outputspec,'localmax_vol')
    #workflow.connect(showslice,"outfiles",outputspec,"slices")
    #workflow.connect(overlay,"fnames",outputspec,"cuts")
    workflow.connect(cluster,'localmax_txt_file',outputspec,'localmax_txt')
    return workflow
예제 #10
0
zmap = glob.glob(zmap)

wf = pe.Workflow(name='threshold_wf_{}'.format(glm),
                 base_dir='/tmp/workflow_folders')

inputnode = pe.Node(niu.IdentityInterface(fields=['zmap']), name='zmap')

inputnode.inputs.zmap = zmap

mask = fsl.Info.standard_image('MNI152_T1_1mm_brain_mask_dil.nii.gz')
resampled_mask = image.resample_to_img(mask, zmap[0], interpolation='nearest')
resampled_mask.to_filename(op.join(wf.base_dir, 'mask.nii.gz'))

n_voxels = (resampled_mask.get_data() > 0).sum()

smooth_est = pe.MapNode(fsl.SmoothEstimate(),
                        iterfield=['zstat_file'],
                        name='smooth_estimate')

smooth_est.inputs.mask_file = resampled_mask.get_filename()

wf.connect(inputnode, 'zmap', smooth_est, 'zstat_file')

cluster = pe.MapNode(fsl.Cluster(threshold=3.1,
                                 volume=n_voxels,
                                 pthreshold=0.05,
                                 out_pval_file=True,
                                 out_threshold_file=True,
                                 out_index_file=True,
                                 out_localmax_txt_file=True),
                     iterfield=[
예제 #11
0
파일: easy_thres.py 프로젝트: amrka/ratpype
def threshold(wf_name, correction="uncorrected"):
    """
    Workflow for carrying out uncorrected, cluster-based and voxel-based thresholding
    (following FSL terminology)
    and colour activation overlaying

    Parameters
    ----------
    wf_name : string
        Workflow name

    Returns
    -------
    easy_thresh : object
        Easy thresh workflow object

    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/easy_thresh/easy_thresh.py>`_
    Modifyed by Tamas Spisak for Preclinical use
    Added: uncorrected and voxel corrected thresholding

    Workflow Inputs::

        inputspec.z_stats : string (nifti file)
            z_score stats output for t or f contrast from flameo

        inputspec.merge_mask : string (nifti file)
            mask generated from 4D Merged derivative file

        inputspec.z_threshold : float
            Z Statistic threshold value for cluster thresholding. It is used to
            determine what level of activation would be statistically significant.
            Increasing this will result in higher estimates of required effect.

        inputspec.p_threshold : float
            Probability threshold for cluster thresholding.

        inputspec.paramerters : string (tuple)
            tuple containing which MNI and FSLDIR path information

    Workflow Outputs::

        outputspec.cluster_threshold : string (nifti files)
           the thresholded Z statistic image for each t contrast

        outputspec.cluster_index : string (nifti files)
            image of clusters for each t contrast; the values
            in the clusters are the index numbers as used
            in the cluster list.

        outputspec.overlay_threshold : string (nifti files)
            3D color rendered stats overlay image for t contrast
            After reloading this image, use the Statistics Color
            Rendering GUI to reload the color look-up-table

        outputspec.overlay_rendered_image : string (nifti files)
           2D color rendered stats overlay picture for each t contrast

        outputspec.cluster_localmax_txt : string (text files)
            local maxima text file, defines the coordinates of maximum value
            in the cluster


    Order of commands in case of cluster correction:

    - Estimate smoothness of the image::

        smoothest --mask= merge_mask.nii.gz --zstat=.../flameo/stats/zstat1.nii.gz

        arguments
        --mask  :  brain mask volume
        --zstat :  filename of zstat/zfstat image

    - Create mask. For details see `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm#fslutils>`_::

        fslmaths ../flameo/stats/zstat1.nii.gz
                 -mas merge_mask.nii.gz
                 zstat1_mask.nii.gz

        arguments
        -mas   : use (following image>0) to mask current image

    - Copy Geometry image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another::

        fslcpgeom MNI152_T1_2mm_brain.nii.gz zstat1_mask.nii.gz

    - Cluster based thresholding. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::

        cluster --dlh = 0.0023683100
                --in = zstat1_mask.nii.gz
                --oindex = zstat1_cluster_index.nii.gz
                --olmax = zstat1_cluster_localmax.txt
                --othresh = zstat1_cluster_threshold.nii.gz
                --pthresh = 0.0500000000
                --thresh = 2.3000000000
                --volume = 197071

        arguments
        --in    :    filename of input volume
        --dlh   :    smoothness estimate = sqrt(det(Lambda))
        --oindex  :  filename for output of cluster index
        --othresh :  filename for output of thresholded image
        --olmax   :  filename for output of local maxima text file
        --volume  :  number of voxels in the mask
        --pthresh :  p-threshold for clusters
        --thresh  :  threshold for input volume

     Z statistic image is thresholded to show which voxels or clusters of voxels are activated at a particular significance level.
     A Z statistic threshold is used to define contiguous clusters. Then each cluster's estimated significance level (from GRF-theory)
     is compared with the cluster probability threshold. Significant clusters are then used to mask the original Z statistic image.

    High Level Workflow Graph:

    .. image:: ../images/easy_thresh.dot.png
       :width: 800


    Detailed Workflow Graph:

    .. image:: ../images/easy_thresh_detailed.dot.png
       :width: 800

    TODO: Order of commands in case of voxel correction and uncorrected

    Examples
    --------

    >>> import easy_thresh
    >>> preproc = easy_thresh.easy_thresh("new_workflow")
    >>> preproc.inputs.inputspec.z_stats= 'flameo/stats/zstat1.nii.gz'
    >>> preproc.inputs.inputspec.merge_mask = 'merge_mask/alff_Z_fn2standard_merged_mask.nii.gz'
    >>> preproc.inputs.inputspec.z_threshold = 2.3
    >>> preproc.inputs.inputspec.p_threshold = 0.05
    >>> preporc.run()  -- SKIP doctest

    """

    easy_thresh = pe.Workflow(name=wf_name)

    outputnode = pe.Node(
        util.IdentityInterface(fields=['thres_zstat', 'overlay_threshold']),
        name='outputspec')

    if (correction == 'uncorrected'):
        inputnode = pe.Node(
            util.IdentityInterface(fields=['z_stats', 'mask', 'p_threshold']),
            name='inputspec')

        # run clustering after fixing stats header for talspace
        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        ptoz = pe.Node(interface=myutils.PtoZ(), name='PtoZ')

        easy_thresh.connect(inputnode, 'p_threshold', ptoz, 'p_val')

        thres = pe.MapNode(interface=fsl.Threshold(),
                           name='ThresholdUncorr',
                           iterfield=['in_file'])

        easy_thresh.connect(zstat_mask, 'out_file', thres, 'in_file')
        easy_thresh.connect(ptoz, 'z_score', thres, 'thresh')

        easy_thresh.connect(thres, 'out_file', outputnode, 'thres_zstat')
        easy_thresh.connect(ptoz, 'z_score', outputnode, 'overlay_threshold')

    elif (correction == 'voxel'):
        inputnode = pe.Node(
            util.IdentityInterface(fields=['z_stats', 'mask', 'p_threshold']),
            name='inputspec')
        # run clustering after fixing stats header for talspace
        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        # estimate image smoothness
        smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                     name='smooth_estimate',
                                     iterfield=['zstat_file', 'mask_file'])

        easy_thresh.connect(zstat_mask, 'out_file', smooth_estimate,
                            'zstat_file')
        easy_thresh.connect(inputnode, 'mask', smooth_estimate, 'mask_file')

        ptoz = pe.MapNode(interface=myutils.PtoZ(),
                          name='PtoZ',
                          iterfield=['resels'])

        easy_thresh.connect(inputnode, 'p_threshold', ptoz, 'p_val')
        easy_thresh.connect(smooth_estimate, 'resels', ptoz, 'resels')

        thres = pe.MapNode(interface=fsl.Threshold(),
                           name='ThresholdVoxel',
                           iterfield=['in_file', 'thresh'])
        thres._interface._suffix = 'vox'

        easy_thresh.connect(zstat_mask, 'out_file', thres, 'in_file')
        easy_thresh.connect(ptoz, 'z_score', thres, 'thresh')

        easy_thresh.connect(thres, 'out_file', outputnode, 'thres_zstat')
        easy_thresh.connect(ptoz, 'z_score', outputnode, 'overlay_threshold')

    elif (correction == 'tfce'):

        print('Not good!!!')
        #TODO

        inputnode = pe.Node(
            util.IdentityInterface(fields=['z_stats', 'mask', 'p_threshold']),
            name='inputspec')

        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        # tfce-corerction
        op_string = '-tfce 2 0.5 6'
        tfce = pe.MapNode(interface=fsl.ImageMaths(suffix='_tfce',
                                                   op_string=op_string),
                          iterfield=['in_file'],
                          name='tfce')

        easy_thresh.connect(zstat_mask, 'out_file', tfce, 'in_file')

        # estimate image smoothness
        smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                     name='smooth_estimate',
                                     iterfield=['zstat_file', 'mask_file'])

        easy_thresh.connect(tfce, 'out_file', smooth_estimate, 'zstat_file')
        easy_thresh.connect(inputnode, 'mask', smooth_estimate, 'mask_file')

        ptoz = pe.MapNode(interface=myutils.PtoZ(),
                          name='PtoZ',
                          iterfield=['resels'])

        easy_thresh.connect(inputnode, 'p_threshold', ptoz, 'p_val')
        easy_thresh.connect(smooth_estimate, 'resels', ptoz, 'resels')

        thres = pe.MapNode(interface=fsl.Threshold(),
                           name='ThresholdVoxel',
                           iterfield=['in_file', 'thresh'])

        easy_thresh.connect(tfce, 'out_file', thres, 'in_file')
        easy_thresh.connect(ptoz, 'z_score', thres, 'thresh')

        easy_thresh.connect(thres, 'out_file', outputnode, 'thres_zstat')
        easy_thresh.connect(ptoz, 'z_score', outputnode, 'overlay_threshold')

        print('Not implemented!')

    elif (correction == 'cluster'):

        inputnode = pe.Node(util.IdentityInterface(
            fields=['z_stats', 'mask', 'z_threshold', 'p_threshold']),
                            name='inputspec')

        # run clustering
        zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                                name='zstat_mask',
                                iterfield=['in_file', 'operand_files'])
        #operations to perform
        #-mas use (following image>0) to mask current image
        zstat_mask.inputs.op_string = '-mas %s'

        easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
        easy_thresh.connect(inputnode, 'mask', zstat_mask, 'operand_files')

        # estimate image smoothness
        smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                     name='smooth_estimate',
                                     iterfield=['zstat_file', 'mask_file'])

        easy_thresh.connect(zstat_mask, 'out_file', smooth_estimate,
                            'zstat_file')
        easy_thresh.connect(inputnode, 'mask', smooth_estimate, 'mask_file')

        ##cluster-based thresholding
        #After carrying out the initial statistical test, the resulting
        #Z statistic image is then normally thresholded to show which voxels or
        #clusters of voxels are activated at a particular significance level.
        #A Z statistic threshold is used to define contiguous clusters.
        #Then each cluster's estimated significance level (from GRF-theory) is
        #compared with the cluster probability threshold. Significant clusters
        #are then used to mask the original Z statistic image for later production
        #of colour blobs.This method of thresholding is an alternative to
        #Voxel-based correction, and is normally more sensitive to activation.
        #    cluster = pe.MapNode(interface=fsl.Cluster(),
        #                            name='cluster',
        #                            iterfield=['in_file', 'volume', 'dlh'])
        #    #output of cluster index (in size order)
        #    cluster.inputs.out_index_file = True
        #    #thresholded image
        #    cluster.inputs.out_threshold_file = True
        #    #local maxima text file
        #    #defines the cluster cordinates
        #    cluster.inputs.out_localmax_txt_file = True

        cluster = pe.MapNode(interface=fsl.Cluster(
            out_pval_file='pval.nii.gz',
            out_threshold_file='thres_clust_zstat.nii.gz'),
                             name='ThresholdClust',
                             iterfield=['in_file', 'dlh', 'volume'])

        easy_thresh.connect(zstat_mask, 'out_file', cluster, 'in_file')
        easy_thresh.connect(inputnode, 'z_threshold', cluster, 'threshold')
        easy_thresh.connect(inputnode, 'p_threshold', cluster, 'pthreshold')
        easy_thresh.connect(smooth_estimate, 'volume', cluster, 'volume')
        easy_thresh.connect(smooth_estimate, 'dlh', cluster, 'dlh')

        easy_thresh.connect(cluster, 'threshold_file', outputnode,
                            'thres_zstat')
        easy_thresh.connect(inputnode, 'z_threshold', outputnode,
                            'overlay_threshold')
    else:
        print("Error: invalid thresholding correction mode: " + correction)

    return easy_thresh
NodeHash_29d3f040.inputs.interp = 'trilinear'

#Wraps command **applywarp**
NodeHash_29ecf020 = pe.MapNode(interface = fsl.ApplyWarp(), name = 'NodeName_29ecf020', iterfield = ['field_file', 'in_file', 'premat'])
NodeHash_29ecf020.inputs.interp = 'trilinear'

#Wraps command **fslmerge**
NodeHash_2ceb9d10 = pe.Node(interface = fsl.Merge(), name = 'NodeName_2ceb9d10')
NodeHash_2ceb9d10.inputs.dimension = 't'

#Wraps command **flameo**
NodeHash_2f149160 = pe.Node(interface = fsl.FLAMEO(), name = 'NodeName_2f149160')
NodeHash_2f149160.inputs.run_mode = 'flame1'

#Wraps command **smoothest**
NodeHash_2fbc52b0 = pe.Node(interface = fsl.SmoothEstimate(), name = 'NodeName_2fbc52b0')

#Wraps command **cluster**
NodeHash_318a61d0 = pe.Node(interface = fsl.Cluster(), name = 'NodeName_318a61d0')
NodeHash_318a61d0.inputs.pthreshold = 0.05
NodeHash_318a61d0.inputs.threshold = 2.3

#Wraps command **fslmerge**
NodeHash_33749690 = pe.Node(interface = fsl.Merge(), name = 'NodeName_33749690')
NodeHash_33749690.inputs.dimension = 't'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_1ad6ca0, 'anat', NodeHash_28f35280, 'in_file')
analysisflow.connect(NodeHash_1ad6ca0, 'anat', NodeHash_8f61130, 'reference')
analysisflow.connect(NodeHash_1ad6ca0, 'anat', NodeHash_2062490, 'in_file')
#Wraps command **applywarp**
NodeHash_2f7b4860 = pe.MapNode(interface=fsl.ApplyWarp(),
                               name='NodeName_2f7b4860',
                               iterfield=['field_file', 'in_file', 'premat'])
NodeHash_2f7b4860.inputs.interp = 'trilinear'

#Wraps command **fslmerge**
NodeHash_2e8e9e00 = pe.Node(interface=fsl.Merge(), name='NodeName_2e8e9e00')
NodeHash_2e8e9e00.inputs.dimension = 't'

#Wraps command **flameo**
NodeHash_313ca880 = pe.Node(interface=fsl.FLAMEO(), name='NodeName_313ca880')
NodeHash_313ca880.inputs.run_mode = 'flame1'

#Wraps command **smoothest**
NodeHash_314ce330 = pe.Node(interface=fsl.SmoothEstimate(),
                            name='NodeName_314ce330')

#Wraps command **cluster**
NodeHash_332d21c0 = pe.Node(interface=fsl.Cluster(), name='NodeName_332d21c0')
NodeHash_332d21c0.inputs.pthreshold = 0.05
NodeHash_332d21c0.inputs.threshold = 2.3

#Wraps command **fslmerge**
NodeHash_33d80690 = pe.Node(interface=fsl.Merge(), name='NodeName_33d80690')
NodeHash_33d80690.inputs.dimension = 't'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_313ca880, 'zstats', NodeHash_314ce330,
                     'zstat_file')
예제 #14
0
def init_model_wf(
        workdir: Path,
        model,
        numinputs=1,
        variables=None,
        memcalc=MemoryCalculator.default(),
):
    name = f"{format_workflow(model.name)}_wf"
    workflow = pe.Workflow(name=name)

    if model is None:
        return workflow

    #
    inputnode = Node(
        niu.IdentityInterface(
            fields=[f"in{i:d}" for i in range(1, numinputs + 1)]),
        allow_missing_input_source=True,
        name="inputnode",
    )
    outputnode = pe.Node(niu.IdentityInterface(fields=["resultdicts"]),
                         name="outputnode")

    # setup outputs
    make_resultdicts_a = pe.Node(
        MakeResultdicts(
            tagkeys=["model", "contrast"],
            imagekeys=[
                "design_matrix", "contrast_matrix", *modelfit_model_outputs
            ],
            deletekeys=["contrast"],
        ),
        name="make_resultdicts_a",
    )

    statmaps = [
        modelfit_aliases[m] if m in modelfit_aliases else m
        for m in modelfit_contrast_outputs
    ]
    make_resultdicts_b = pe.Node(
        MakeResultdicts(
            tagkeys=["model", "contrast"],
            imagekeys=statmaps,
            metadatakeys=["critical_z"],
            missingvalues=[
                None,
                False,
            ],  # need to use False because traits doesn't support NoneType
        ),
        name="make_resultdicts_b",
    )

    if model is not None:
        make_resultdicts_a.inputs.model = model.name
        make_resultdicts_b.inputs.model = model.name

    # copy out results
    merge_resultdicts_b = pe.Node(niu.Merge(3), name="merge_resultdicts_b")
    workflow.connect(make_resultdicts_a, "resultdicts", merge_resultdicts_b,
                     "in1")
    workflow.connect(make_resultdicts_b, "resultdicts", merge_resultdicts_b,
                     "in2")

    workflow.connect(merge_resultdicts_b, "out", outputnode, "resultdicts")

    resultdict_datasink = pe.Node(
        ResultdictDatasink(base_directory=str(workdir)),
        name="resultdict_datasink")
    workflow.connect(merge_resultdicts_b, "out", resultdict_datasink,
                     "indicts")

    # merge inputs
    merge_resultdicts_a = Node(
        niu.Merge(numinputs),
        allow_missing_input_source=True,
        name="merge_resultdicts_a",
    )
    for i in range(1, numinputs + 1):
        workflow.connect(inputnode, f"in{i:d}", merge_resultdicts_a,
                         f"in{i:d}")

    # filter inputs
    filter_kwargs = dict(
        require_one_of_images=["effect", "reho", "falff", "alff"],
        exclude_files=[
            str(workdir / "exclude*.json"),
            str(workdir / "reports" / "exclude*.json"),
        ],
    )
    if (hasattr(model, "filters") and model.filters is not None
            and len(model.filters) > 0):
        filter_kwargs.update(dict(filter_dicts=model.filters))
    if hasattr(model, "spreadsheet"):
        if model.spreadsheet is not None and variables is not None:
            filter_kwargs.update(
                dict(spreadsheet=model.spreadsheet, variable_dicts=variables))
    filter_resultdicts = pe.Node(
        interface=FilterResultdicts(**filter_kwargs),
        name="filter_resultdicts",
    )
    workflow.connect(merge_resultdicts_a, "out", filter_resultdicts,
                     "in_dicts")

    # aggregate data structures
    # output is a list where each element respresents a separate model run
    aggregate_resultdicts = pe.Node(
        AggregateResultdicts(numinputs=1, across=model.across),
        name="aggregate_resultdicts",
    )
    workflow.connect(filter_resultdicts, "resultdicts", aggregate_resultdicts,
                     "in1")

    # extract fields from the aggregated data structure
    aliases = dict(effect=["reho", "falff", "alff"])
    extract_from_resultdict = MapNode(
        ExtractFromResultdict(keys=[model.across, *statmaps], aliases=aliases),
        iterfield="indict",
        allow_undefined_iterfield=True,
        name="extract_from_resultdict",
    )
    workflow.connect(aggregate_resultdicts, "resultdicts",
                     extract_from_resultdict, "indict")

    # copy over aggregated metadata and tags to outputs
    for make_resultdicts_node in [make_resultdicts_a, make_resultdicts_b]:
        workflow.connect(extract_from_resultdict, "tags",
                         make_resultdicts_node, "tags")
        workflow.connect(extract_from_resultdict, "metadata",
                         make_resultdicts_node, "metadata")
        workflow.connect(extract_from_resultdict, "vals",
                         make_resultdicts_node, "vals")

    # create models
    if model.type in ["fe", "me"]:  # intercept only model
        countimages = pe.Node(
            niu.Function(
                input_names=["arrarr"],
                output_names=["image_count"],
                function=len_for_each,
            ),
            name="countimages",
        )
        workflow.connect(extract_from_resultdict, "effect", countimages,
                         "arrarr")

        modelspec = MapNode(
            InterceptOnlyDesign(),
            name="modelspec",
            iterfield="n_copes",
            mem_gb=memcalc.min_gb,
        )
        workflow.connect(countimages, "image_count", modelspec, "n_copes")

    elif model.type in ["lme"]:  # glm
        modelspec = MapNode(
            GroupDesign(
                spreadsheet=model.spreadsheet,
                contrastdicts=model.contrasts,
                variabledicts=variables,
            ),
            name="modelspec",
            iterfield="subjects",
            mem_gb=memcalc.min_gb,
        )
        workflow.connect(extract_from_resultdict, "sub", modelspec, "subjects")

    else:
        raise ValueError()

    workflow.connect(modelspec, "contrast_names", make_resultdicts_b,
                     "contrast")

    # run models
    if model.type in [
            "fe"
    ]:  # fixed effects aggregate for multiple runs, sessions, etc.
        # pass length one inputs because we may want to use them on a higher level
        workflow.connect(
            aggregate_resultdicts,
            "non_aggregated_resultdicts",
            merge_resultdicts_b,
            "in3",
        )

        # need to merge
        mergenodeargs = dict(iterfield="in_files",
                             mem_gb=memcalc.volume_std_gb * 3)
        mergemask = MapNode(MergeMask(), name="mergemask", **mergenodeargs)
        workflow.connect(extract_from_resultdict, "mask", mergemask,
                         "in_files")

        mergeeffect = MapNode(Merge(dimension="t"),
                              name="mergeeffect",
                              **mergenodeargs)
        workflow.connect(extract_from_resultdict, "effect", mergeeffect,
                         "in_files")

        mergevariance = MapNode(Merge(dimension="t"),
                                name="mergevariance",
                                **mergenodeargs)
        workflow.connect(extract_from_resultdict, "variance", mergevariance,
                         "in_files")

        fe_run_mode = MapNode(
            niu.Function(
                input_names=["var_cope_file"],
                output_names=["run_mode"],
                function=_fe_run_mode,
            ),
            iterfield=["var_cope_file"],
            name="fe_run_mode",
        )
        workflow.connect(mergevariance, "merged_file", fe_run_mode,
                         "var_cope_file")

        # prepare design matrix
        multipleregressdesign = MapNode(
            fsl.MultipleRegressDesign(),
            name="multipleregressdesign",
            iterfield=["regressors", "contrasts"],
            mem_gb=memcalc.min_gb,
        )
        workflow.connect(modelspec, "regressors", multipleregressdesign,
                         "regressors")
        workflow.connect(modelspec, "contrasts", multipleregressdesign,
                         "contrasts")

        # use FSL implementation
        modelfit = MapNode(
            FLAMEO(),
            name="modelfit",
            mem_gb=memcalc.volume_std_gb * 10,
            iterfield=[
                "run_mode",
                "mask_file",
                "cope_file",
                "var_cope_file",
                "design_file",
                "t_con_file",
                "cov_split_file",
            ],
        )
        workflow.connect(fe_run_mode, "run_mode", modelfit, "run_mode")
        workflow.connect(mergemask, "merged_file", modelfit, "mask_file")
        workflow.connect(mergeeffect, "merged_file", modelfit, "cope_file")
        workflow.connect(mergevariance, "merged_file", modelfit,
                         "var_cope_file")
        workflow.connect(multipleregressdesign, "design_mat", modelfit,
                         "design_file")
        workflow.connect(multipleregressdesign, "design_con", modelfit,
                         "t_con_file")
        workflow.connect(multipleregressdesign, "design_grp", modelfit,
                         "cov_split_file")

        # mask output
        workflow.connect(mergemask, "merged_file", make_resultdicts_b, "mask")

    elif model.type in ["me", "lme"]:  # mixed effects across subjects
        # use custom implementation
        modelfit = MapNode(
            ModelFit(algorithms_to_run=model.algorithms),
            name="modelfit",
            n_procs=config.nipype.omp_nthreads,
            mem_gb=memcalc.volume_std_gb * 10,
            iterfield=[
                "mask_files",
                "cope_files",
                "var_cope_files",
                "regressors",
                "contrasts",
            ],
        )
        workflow.connect(extract_from_resultdict, "mask", modelfit,
                         "mask_files")
        workflow.connect(extract_from_resultdict, "effect", modelfit,
                         "cope_files")
        workflow.connect(extract_from_resultdict, "variance", modelfit,
                         "var_cope_files")

        workflow.connect(modelspec, "regressors", modelfit, "regressors")
        workflow.connect(modelspec, "contrasts", modelfit, "contrasts")

        # random field theory
        smoothest = MapNode(
            fsl.SmoothEstimate(),
            iterfield=["zstat_file", "mask_file"],
            name="smoothest",
            allow_undefined_iterfield=True,
        )
        workflow.connect([(modelfit, smoothest, [(("zstats", ravel),
                                                  "zstat_file")])])
        workflow.connect([(modelfit, smoothest, [(("masks", ravel),
                                                  "mask_file")])])

        criticalz = pe.Node(
            niu.Function(
                input_names=["voxels", "resels"],
                output_names=["critical_z"],
                function=_critical_z,
            ),
            name="criticalz",
        )
        workflow.connect(smoothest, "volume", criticalz, "voxels")
        workflow.connect(smoothest, "resels", criticalz, "resels")
        workflow.connect(criticalz, "critical_z", make_resultdicts_b,
                         "critical_z")

    else:
        raise ValueError()

    # connect modelfit outputs
    assert modelfit.outputs is not None
    for k, _ in modelfit.outputs.items():
        if k in modelfit_exclude:
            continue

        attr = k
        if k in modelfit_aliases:
            attr = modelfit_aliases[k]
        if attr in statmaps:
            workflow.connect(modelfit, k, make_resultdicts_b, attr)
        else:
            workflow.connect(modelfit, k, make_resultdicts_a, attr)

    # make tsv files for design and contrast matrices
    maketsv = MapNode(
        MakeDesignTsv(),
        iterfield=["regressors", "contrasts", "row_index"],
        name="maketsv",
    )
    workflow.connect(extract_from_resultdict, model.across, maketsv,
                     "row_index")
    workflow.connect(modelspec, "regressors", maketsv, "regressors")
    workflow.connect(modelspec, "contrasts", maketsv, "contrasts")

    workflow.connect(maketsv, "design_tsv", make_resultdicts_a,
                     "design_matrix")
    workflow.connect(maketsv, "contrasts_tsv", make_resultdicts_a,
                     "contrast_matrix")

    return workflow
예제 #15
0
def second_level_wf(name):
    """second level analysis"""
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'copes', 'varcopes', 'group_mask', 'design_mat', 'design_con',
        'design_grp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['zstat', 'tstat', 'pstat', 'fwe_thres', 'fdr_thres']),
                         name='outputnode')

    copemerge = pe.Node(fsl.Merge(dimension='t'), name='copemerge', mem_gb=40)
    varcopemerge = pe.Node(fsl.Merge(dimension='t'),
                           name='varcopemerge',
                           mem_gb=40)
    flameo = pe.Node(fsl.FLAMEO(run_mode='ols'), name='flameo')
    ztopval = pe.Node(fsl.ImageMaths(op_string='-ztop', suffix='_pval'),
                      name='ztop')

    # FDR
    fdr = pe.Node(FDR(), name='calc_fdr')
    fdr_apply = pe.Node(fsl.ImageMaths(suffix='_thresh_vox_fdr_pstat1'),
                        name='fdr_apply')

    # FWE
    def _reselcount(voxels, resels):
        return float(voxels / resels)

    smoothness = pe.Node(fsl.SmoothEstimate(), name='smoothness')
    rescount = pe.Node(niu.Function(function=_reselcount), name='reselcount')
    ptoz = pe.Node(PtoZ(), name='ptoz')
    fwethres = pe.Node(fsl.Threshold(), name='fwethres')

    # Cluster
    cluster = pe.Node(fsl.Cluster(threshold=3.2,
                                  pthreshold=0.05,
                                  connectivity=26,
                                  use_mm=True),
                      name='cluster')

    def _len(inlist):
        return len(inlist)

    def _lastidx(inlist):
        return len(inlist) - 1

    def _first(inlist):
        if isinstance(inlist, (list, tuple)):
            return inlist[0]
        return inlist

    def _fdr_thres_operator(fdr_th):
        return '-mul -1 -add 1 -thr %f' % (1 - fdr_th)

    # create workflow
    workflow.connect([
        (inputnode, flameo, [('design_mat', 'design_file'),
                             ('design_con', 't_con_file'),
                             ('design_grp', 'cov_split_file')]),
        (inputnode, copemerge, [('copes', 'in_files')]),
        (inputnode, varcopemerge, [('varcopes', 'in_files')]),
        (inputnode, flameo, [('group_mask', 'mask_file')]),
        (copemerge, flameo, [('merged_file', 'cope_file')]),
        (varcopemerge, flameo, [('merged_file', 'var_cope_file')]),
        (flameo, ztopval, [(('zstats', _first), 'in_file')]),
        (ztopval, fdr, [('out_file', 'in_file')]),
        (inputnode, fdr, [('group_mask', 'in_mask')]),
        (inputnode, fdr_apply, [('group_mask', 'mask_file')]),
        (flameo, fdr_apply, [(('zstats', _first), 'in_file')]),
        (fdr, fdr_apply, [(('fdr_val', _fdr_thres_operator), 'op_string')]),
        (inputnode, smoothness, [('group_mask', 'mask_file')]),
        (flameo, smoothness, [(('res4d', _first), 'residual_fit_file')]),
        (inputnode, smoothness, [(('copes', _lastidx), 'dof')]),
        (smoothness, rescount, [('resels', 'resels'), ('volume', 'voxels')]),
        (rescount, ptoz, [('out', 'resels')]),
        (flameo, fwethres, [(('zstats', _first), 'in_file')]),
        (ptoz, fwethres, [('z_val', 'thresh')]),
        (flameo, cluster, [(('zstats', _first), 'in_file'),
                           (('copes', _first), 'cope_file')]),
        (smoothness, cluster, [('dlh', 'dlh'), ('volume', 'volume')]),
        (flameo, outputnode, [
            (('zstats', _first), 'zstat'),
            (('tstats', _first), 'tstat'),
        ]),
        (ztopval, outputnode, [('out_file', 'pstat')]),
        (fdr_apply, outputnode, [('out_file', 'fdr_thres')]),
        (fwethres, outputnode, [('out_file', 'fwe_thres')]),
    ])
    return workflow
예제 #16
0
def first_level_wf(pipeline, subject_id, task_id, output_dir):
    """
    First level workflow
    """
    workflow = pe.Workflow(name='_'.join((pipeline, subject_id, task_id)))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_preproc', 'contrasts', 'confounds', 'brainmask', 'events_file'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['sigma_pre', 'sigma_post', 'out_stats']),
        name='outputnode')

    conf2movpar = pe.Node(niu.Function(function=_confounds2movpar),
                          name='conf2movpar')
    masker = pe.Node(fsl.ApplyMask(), name='masker')
    bim = pe.Node(afni.BlurInMask(fwhm=5.0, outputtype='NIFTI_GZ'),
                  name='bim',
                  mem_gb=20)

    ev = pe.Node(EventsFilesForTask(task=task_id), name='events')

    l1 = pe.Node(SpecifyModel(
        input_units='secs',
        time_repetition=2,
        high_pass_filter_cutoff=100,
        parameter_source='FSL',
    ),
                 name='l1')

    l1model = pe.Node(fsl.Level1Design(interscan_interval=2,
                                       bases={'dgamma': {
                                           'derivs': True
                                       }},
                                       model_serial_correlations=True),
                      name='l1design')

    l1featmodel = pe.Node(fsl.FEATModel(), name='l1model')
    l1estimate = pe.Node(fsl.FEAT(), name='l1estimate', mem_gb=40)

    pre_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                         detrend=True,
                                         args='-ShowMeClassicFWHM'),
                              name='smooth_pre_afni',
                              mem_gb=20)
    post_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                          detrend=True,
                                          args='-ShowMeClassicFWHM'),
                               name='smooth_post_afni',
                               mem_gb=20)

    pre_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_pre', mem_gb=20)
    post_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_post', mem_gb=20)

    def _resels(val):
        return val**(1 / 3.)

    def _fwhm(fwhm):
        from numpy import mean
        return float(mean(fwhm, dtype=float))

    workflow.connect([
        (inputnode, masker, [('bold_preproc', 'in_file'),
                             ('brainmask', 'mask_file')]),
        (inputnode, ev, [('events_file', 'in_file')]),
        (inputnode, l1model, [('contrasts', 'contrasts')]),
        (inputnode, conf2movpar, [('confounds', 'in_confounds')]),
        (inputnode, bim, [('brainmask', 'mask')]),
        (masker, bim, [('out_file', 'in_file')]),
        (bim, l1, [('out_file', 'functional_runs')]),
        (ev, l1, [('event_files', 'event_files')]),
        (conf2movpar, l1, [('out', 'realignment_parameters')]),
        (l1, l1model, [('session_info', 'session_info')]),
        (ev, l1model, [('orthogonalization', 'orthogonalization')]),
        (l1model, l1featmodel, [('fsf_files', 'fsf_file'),
                                ('ev_files', 'ev_files')]),
        (l1model, l1estimate, [('fsf_files', 'fsf_file')]),
        # Smooth
        (inputnode, pre_smooth, [('bold_preproc', 'zstat_file'),
                                 ('brainmask', 'mask_file')]),
        (bim, post_smooth, [('out_file', 'zstat_file')]),
        (inputnode, post_smooth, [('brainmask', 'mask_file')]),
        (pre_smooth, outputnode, [(('resels', _resels), 'sigma_pre')]),
        (post_smooth, outputnode, [(('resels', _resels), 'sigma_post')]),

        # Smooth with AFNI
        (inputnode, pre_smooth_afni, [('bold_preproc', 'in_file'),
                                      ('brainmask', 'mask')]),
        (bim, post_smooth_afni, [('out_file', 'in_file')]),
        (inputnode, post_smooth_afni, [('brainmask', 'mask')]),
    ])

    # Writing outputs
    csv = pe.Node(AddCSVRow(in_file=str(output_dir / 'smoothness.csv')),
                  name='addcsv_%s_%s' % (subject_id, pipeline))
    csv.inputs.sub_id = subject_id
    csv.inputs.pipeline = pipeline

    # Datasinks
    ds_stats = pe.Node(niu.Function(function=_feat_stats), name='ds_stats')
    ds_stats.inputs.subject_id = subject_id
    ds_stats.inputs.task_id = task_id
    ds_stats.inputs.variant = pipeline
    ds_stats.inputs.out_path = output_dir
    setattr(ds_stats.interface, '_always_run', True)

    workflow.connect([
        (outputnode, csv, [('sigma_pre', 'smooth_pre'),
                           ('sigma_post', 'smooth_post')]),
        (pre_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_pre')]),
        (post_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_post')]),
        (l1estimate, ds_stats, [('feat_dir', 'feat_dir')]),
        (ds_stats, outputnode, [('out', 'out_stats')]),
    ])
    return workflow
예제 #17
0
파일: easy_thresh.py 프로젝트: haipan/C-PAC
def easy_thresh(wf_name):
    """
    Workflow for carrying out cluster-based thresholding 
    and colour activation overlaying
    
    Parameters
    ----------
    wf_name : string 
        Workflow name
        
    Returns
    -------
    easy_thresh : object 
        Easy thresh workflow object
    
    Notes
    -----
    
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/easy_thresh/easy_thresh.py>`_
        
    Workflow Inputs::
    
        inputspec.z_stats : string (nifti file)
            z_score stats output for t or f contrast from flameo
        
        inputspec.merge_mask : string (nifti file)
            mask generated from 4D Merged derivative file
        
        inputspec.z_threshold : float
            Z Statistic threshold value for cluster thresholding. It is used to 
            determine what level of activation would be statistically significant. 
            Increasing this will result in higher estimates of required effect.
        
        inputspec.p_threshold : float
            Probability threshold for cluster thresholding.
        
        inputspec.paramerters : string (tuple)
            tuple containing which MNI and FSLDIR path information
            
    Workflow Outputs::
    
        outputspec.cluster_threshold : string (nifti files)
           the thresholded Z statistic image for each t contrast
        
        outputspec.cluster_index : string (nifti files)
            image of clusters for each t contrast; the values 
            in the clusters are the index numbers as used 
            in the cluster list.
        
        outputspec.overlay_threshold : string (nifti files)
            3D color rendered stats overlay image for t contrast
            After reloading this image, use the Statistics Color 
            Rendering GUI to reload the color look-up-table
        
        outputspec.overlay_rendered_image : string (nifti files)
           2D color rendered stats overlay picture for each t contrast
        
        outputspec.cluster_localmax_txt : string (text files)
            local maxima text file, defines the coordinates of maximum value
            in the cluster
    
    
    Order of commands:
    
    - Estimate smoothness of the image::
        
        smoothest --mask= merge_mask.nii.gz --zstat=.../flameo/stats/zstat1.nii.gz
        
        arguments
        --mask  :  brain mask volume
        --zstat :  filename of zstat/zfstat image
    
    - Create mask. For details see `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm#fslutils>`_::
        
        fslmaths ../flameo/stats/zstat1.nii.gz 
                 -mas merge_mask.nii.gz 
                 zstat1_mask.nii.gz
        
        arguments
        -mas   : use (following image>0) to mask current image

    - Copy Geometry image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another::
    
        fslcpgeom MNI152_T1_2mm_brain.nii.gz zstat1_mask.nii.gz
    
    - Cluster based thresholding. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::
        
        cluster --dlh = 0.0023683100 
                --in = zstat1_mask.nii.gz 
                --oindex = zstat1_cluster_index.nii.gz 
                --olmax = zstat1_cluster_localmax.txt
                --othresh = zstat1_cluster_threshold.nii.gz 
                --pthresh = 0.0500000000 
                --thresh = 2.3000000000 
                --volume = 197071
                
        arguments 
        --in    :    filename of input volume
        --dlh   :    smoothness estimate = sqrt(det(Lambda))
        --oindex  :  filename for output of cluster index
        --othresh :  filename for output of thresholded image
        --olmax   :  filename for output of local maxima text file
        --volume  :  number of voxels in the mask
        --pthresh :  p-threshold for clusters
        --thresh  :  threshold for input volume
        
     Z statistic image is thresholded to show which voxels or clusters of voxels are activated at a particular significance level.
     A Z statistic threshold is used to define contiguous clusters. Then each cluster's estimated significance level (from GRF-theory) 
     is compared with the cluster probability threshold. Significant clusters are then used to mask the original Z statistic image.
    
    - Get the maximum intensity value of the output thresholded image. This used is while rendering the Z statistic image:: 
        
        fslstats zstat1_cluster_threshold.nii.gz -R
        
        arguments
        -R  : output <min intensity> <max intensity>

    - Rendering. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::
         
        overlay 1 0 MNI152_T1_2mm_brain.nii.gz 
               -a zstat1_cluster_threshold.nii.gz 
               2.30 15.67 
               zstat1_cluster_threshold_overlay.nii.gz
               
        slicer zstat1_cluster_threshold_overlay.nii.gz 
               -L  -A 750 
               zstat1_cluster_threshold_overlay.png
    
      The Z statistic range selected for rendering is automatically calculated by default, 
      to run from red (minimum Z statistic after thresholding) to yellow (maximum Z statistic, here 
      maximum intensity).
      
    High Level Workflow Graph:
    
    .. image:: ../images/easy_thresh.dot.png
       :width: 800
    
    
    Detailed Workflow Graph:
    
    .. image:: ../images/easy_thresh_detailed.dot.png
       :width: 800
               
    Examples
    --------
    
    >>> import easy_thresh
    >>> preproc = easy_thresh.easy_thresh("new_workflow")
    >>> preproc.inputs.inputspec.z_stats= 'flameo/stats/zstat1.nii.gz'
    >>> preproc.inputs.inputspec.merge_mask = 'merge_mask/alff_Z_fn2standard_merged_mask.nii.gz'
    >>> preproc.inputs.inputspec.z_threshold = 2.3
    >>> preproc.inputs.inputspec.p_threshold = 0.05
    >>> preproc.inputs.inputspec.parameters = ('/usr/local/fsl/', 'MNI152')
    >>> preporc.run()  -- SKIP doctest
    
    """

    easy_thresh = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=[
        'z_stats', 'merge_mask', 'z_threshold', 'p_threshold', 'parameters'
    ]),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'cluster_threshold', 'cluster_index', 'cluster_localmax_txt',
        'overlay_threshold', 'rendered_image'
    ]),
                         name='outputspec')

    ### fsl easythresh
    # estimate image smoothness
    smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                 name='smooth_estimate',
                                 iterfield=['zstat_file'])

    # run clustering after fixing stats header for talspace
    zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                            name='zstat_mask',
                            iterfield=['in_file'])
    #operations to perform
    #-mas use (following image>0) to mask current image
    zstat_mask.inputs.op_string = '-mas %s'

    #fslcpgeom
    #copy certain parts of the header information (image dimensions,
    #voxel dimensions, voxel dimensions units string, image orientation/origin
    #or qform/sform info) from one image to another
    copy_geometry = pe.MapNode(util.Function(
        input_names=['infile_a', 'infile_b'],
        output_names=['out_file'],
        function=copy_geom),
                               name='copy_geometry',
                               iterfield=['infile_a', 'infile_b'])

    ##cluster-based thresholding
    #After carrying out the initial statistical test, the resulting
    #Z statistic image is then normally thresholded to show which voxels or
    #clusters of voxels are activated at a particular significance level.
    #A Z statistic threshold is used to define contiguous clusters.
    #Then each cluster's estimated significance level (from GRF-theory) is
    #compared with the cluster probability threshold. Significant clusters
    #are then used to mask the original Z statistic image for later production
    #of colour blobs.This method of thresholding is an alternative to
    #Voxel-based correction, and is normally more sensitive to activation.
    #    cluster = pe.MapNode(interface=fsl.Cluster(),
    #                            name='cluster',
    #                            iterfield=['in_file', 'volume', 'dlh'])
    #    #output of cluster index (in size order)
    #    cluster.inputs.out_index_file = True
    #    #thresholded image
    #    cluster.inputs.out_threshold_file = True
    #    #local maxima text file
    #    #defines the cluster cordinates
    #    cluster.inputs.out_localmax_txt_file = True

    cluster = pe.MapNode(util.Function(
        input_names=[
            'in_file', 'volume', 'dlh', 'threshold', 'pthreshold', 'parameters'
        ],
        output_names=['index_file', 'threshold_file', 'localmax_txt_file'],
        function=call_cluster),
                         name='cluster',
                         iterfield=['in_file', 'volume', 'dlh'])

    #max and minimum intensity values
    image_stats = pe.MapNode(interface=fsl.ImageStats(),
                             name='image_stats',
                             iterfield=['in_file'])
    image_stats.inputs.op_string = '-R'

    #create tuple of z_threshold and max intensity value of threshold file
    create_tuple = pe.MapNode(util.Function(
        input_names=['infile_a', 'infile_b'],
        output_names=['out_file'],
        function=get_tuple),
                              name='create_tuple',
                              iterfield=['infile_b'])

    #colour activation overlaying
    overlay = pe.MapNode(interface=fsl.Overlay(),
                         name='overlay',
                         iterfield=['stat_image', 'stat_thresh'])
    overlay.inputs.transparency = True
    overlay.inputs.auto_thresh_bg = True
    overlay.inputs.out_type = 'float'

    #colour rendering
    slicer = pe.MapNode(interface=fsl.Slicer(),
                        name='slicer',
                        iterfield=['in_file'])
    #set max picture width
    slicer.inputs.image_width = 750
    # set output all axial slices into one picture
    slicer.inputs.all_axial = True

    #function mapnode to get the standard fsl brain image
    #based on parameters as FSLDIR,MNI and voxel size
    get_backgroundimage = pe.MapNode(util.Function(
        input_names=['in_file', 'file_parameters'],
        output_names=['out_file'],
        function=get_standard_background_img),
                                     name='get_bckgrndimg1',
                                     iterfield=['in_file'])

    #function node to get the standard fsl brain image
    #outputs single file
    get_backgroundimage2 = pe.Node(util.Function(
        input_names=['in_file', 'file_parameters'],
        output_names=['out_file'],
        function=get_standard_background_img),
                                   name='get_backgrndimg2')

    #connections
    easy_thresh.connect(inputnode, 'z_stats', smooth_estimate, 'zstat_file')
    easy_thresh.connect(inputnode, 'merge_mask', smooth_estimate, 'mask_file')

    easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
    easy_thresh.connect(inputnode, 'merge_mask', zstat_mask, 'operand_files')

    easy_thresh.connect(zstat_mask, 'out_file', get_backgroundimage, 'in_file')
    easy_thresh.connect(inputnode, 'parameters', get_backgroundimage,
                        'file_parameters')

    easy_thresh.connect(get_backgroundimage, 'out_file', copy_geometry,
                        'infile_a')
    easy_thresh.connect(zstat_mask, 'out_file', copy_geometry, 'infile_b')

    easy_thresh.connect(copy_geometry, 'out_file', cluster, 'in_file')
    easy_thresh.connect(inputnode, 'z_threshold', cluster, 'threshold')
    easy_thresh.connect(inputnode, 'p_threshold', cluster, 'pthreshold')
    easy_thresh.connect(smooth_estimate, 'volume', cluster, 'volume')
    easy_thresh.connect(smooth_estimate, 'dlh', cluster, 'dlh')
    easy_thresh.connect(inputnode, 'parameters', cluster, 'parameters')

    easy_thresh.connect(cluster, 'threshold_file', image_stats, 'in_file')

    easy_thresh.connect(image_stats, 'out_stat', create_tuple, 'infile_b')
    easy_thresh.connect(inputnode, 'z_threshold', create_tuple, 'infile_a')

    easy_thresh.connect(cluster, 'threshold_file', overlay, 'stat_image')
    easy_thresh.connect(create_tuple, 'out_file', overlay, 'stat_thresh')

    easy_thresh.connect(inputnode, 'merge_mask', get_backgroundimage2,
                        'in_file')
    easy_thresh.connect(inputnode, 'parameters', get_backgroundimage2,
                        'file_parameters')

    easy_thresh.connect(get_backgroundimage2, 'out_file', overlay,
                        'background_image')

    easy_thresh.connect(overlay, 'out_file', slicer, 'in_file')

    easy_thresh.connect(cluster, 'threshold_file', outputnode,
                        'cluster_threshold')
    easy_thresh.connect(cluster, 'index_file', outputnode, 'cluster_index')
    easy_thresh.connect(cluster, 'localmax_txt_file', outputnode,
                        'cluster_localmax_txt')
    easy_thresh.connect(overlay, 'out_file', outputnode, 'overlay_threshold')
    easy_thresh.connect(slicer, 'out_file', outputnode, 'rendered_image')

    return easy_thresh
예제 #18
0
NodeHash_347043c0.inputs.interp = 'trilinear'

#Wraps command **applywarp**
NodeHash_d73fcb0 = pe.MapNode(interface = fsl.ApplyWarp(), name = 'NodeName_d73fcb0', iterfield = ['field_file', 'in_file', 'premat'])
NodeHash_d73fcb0.inputs.interp = 'trilinear'

#Wraps command **fslmerge**
NodeHash_264457d0 = pe.Node(interface = fsl.Merge(), name = 'NodeName_264457d0')
NodeHash_264457d0.inputs.dimension = 't'

#Wraps command **flameo**
NodeHash_882ac40 = pe.Node(interface = fsl.FLAMEO(), name = 'NodeName_882ac40')
NodeHash_882ac40.inputs.run_mode = 'flame1'

#Wraps command **smoothest**
NodeHash_33f1eba0 = pe.Node(interface = fsl.SmoothEstimate(), name = 'NodeName_33f1eba0')

#Wraps command **cluster**
NodeHash_1978f9c0 = pe.Node(interface = fsl.Cluster(), name = 'NodeName_1978f9c0')
NodeHash_1978f9c0.inputs.pthreshold = 0.05
NodeHash_1978f9c0.inputs.threshold = 2.3

#Wraps command **fslmerge**
NodeHash_3c0ae30 = pe.Node(interface = fsl.Merge(), name = 'NodeName_3c0ae30')
NodeHash_3c0ae30.inputs.dimension = 't'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_2e292140, 'MNI_brain', NodeHash_d73fcb0, 'ref_file')
analysisflow.connect(NodeHash_2e292140, 'MNI_brain', NodeHash_347043c0, 'ref_file')
analysisflow.connect(NodeHash_2e292140, 'MNI_head', NodeHash_1e7cc750, 'ref_file')
#Fit the design to the voxels time-series
design = '/media/amr/HDD/Work/Stimulation/1st_Level_Designs/10s_Stimulation_design.mat'
t_contrast = '/media/amr/HDD/Work/Stimulation/1st_Level_Designs/10s_Stimulation_design.con'
f_contrast = '/media/amr/HDD/Work/Stimulation/1st_Level_Designs/10s_Stimulation_design.fts'

Film_Gls = Node(fsl.FILMGLS(), name = 'Fit_Design_to_Timeseries')
Film_Gls.inputs.design_file = design
Film_Gls.inputs.tcon_file = t_contrast
Film_Gls.inputs.fcon_file = f_contrast
Film_Gls.inputs.threshold = 1000.0
Film_Gls.inputs.smooth_autocorr = True

#-----------------------------------------------------------------------------------------------------
# In[15]:
#Estimate smootheness of the image
Smooth_Est = Node(fsl.SmoothEstimate(), name = 'Smooth_Estimation')
Smooth_Est.inputs.dof = 148 #150 volumes and only one regressor

#-----------------------------------------------------------------------------------------------------
# In[15]:
#Clusterin on the statistical output of t-contrasts
Clustering_t = Node(fsl.Cluster(), name = 'Clustering_t_Contrast')
Clustering_t.inputs.threshold = 2.3
Clustering_t.inputs.pthreshold = 0.05
Clustering_t.inputs.out_threshold_file = 'thresh_zstat1.nii.gz'
# Clustering_t.inputs.out_index_file = 'mask_zstat1'
# Clustering_t.inputs.out_localmax_txt_file = 'localmax'



#-----------------------------------------------------------------------------------------------------
예제 #20
0
파일: base.py 프로젝트: fossabot/Halfpipe-1
def init_model_wf(workdir=None, numinputs=1, model=None, variables=None, memcalc=MemoryCalculator()):
    name = f"{formatlikebids(model.name)}_wf"
    workflow = pe.Workflow(name=name)

    if model is None:
        return workflow

    #
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[f"in{i:d}" for i in range(1, numinputs + 1)]),
        name="inputnode",
    )
    outputnode = pe.Node(niu.IdentityInterface(fields=["resultdicts"]), name="outputnode")

    # setup outputs
    make_resultdicts_a = pe.Node(
        MakeResultdicts(
            tagkeys=["model", "contrast"],
            imagekeys=["design_matrix", "contrast_matrix"],
            deletekeys=["contrast"],
        ),
        name="make_resultdicts_a",
    )

    statmaps = ["effect", "variance", "z", "dof", "mask"]
    make_resultdicts_b = pe.Node(
        MakeResultdicts(
            tagkeys=["model", "contrast"],
            imagekeys=statmaps,
            metadatakeys=["critical_z"],
            missingvalues=[None, False],  # need to use False because traits doesn't support NoneType
        ),
        name="make_resultdicts_b",
    )

    if model is not None:
        make_resultdicts_a.inputs.model = model.name
        make_resultdicts_b.inputs.model = model.name

    # only output statistical map (_b) result dicts because the design matrix (_a) is
    # not relevant for higher level analyses
    workflow.connect(make_resultdicts_b, "resultdicts", outputnode, "resultdicts")

    # copy out results
    merge_resultdicts_b = pe.Node(niu.Merge(2), name="merge_resultdicts_b")
    workflow.connect(make_resultdicts_a, "resultdicts", merge_resultdicts_b, "in1")
    workflow.connect(make_resultdicts_b, "resultdicts", merge_resultdicts_b, "in2")

    resultdict_datasink = pe.Node(
        ResultdictDatasink(base_directory=workdir), name="resultdict_datasink"
    )
    workflow.connect(merge_resultdicts_b, "out", resultdict_datasink, "indicts")

    # merge inputs
    merge_resultdicts_a = pe.Node(niu.Merge(numinputs), name="merge_resultdicts_a")
    for i in range(1, numinputs + 1):
        workflow.connect(inputnode, f"in{i:d}", merge_resultdicts_a, f"in{i:d}")

    # filter inputs
    filterkwargs = dict(
        requireoneofimages=["effect", "reho", "falff", "alff"],
        excludefiles=str(Path(workdir) / "exclude*.json"),
    )
    if hasattr(model, "filters") and model.filters is not None and len(model.filters) > 0:
        filterkwargs.update(dict(filterdicts=model.filters))
    if hasattr(model, "spreadsheet"):
        if model.spreadsheet is not None and variables is not None:
            filterkwargs.update(dict(spreadsheet=model.spreadsheet, variabledicts=variables))
    filterresultdicts = pe.Node(
        interface=FilterResultdicts(**filterkwargs),
        name="filterresultdicts",
    )
    workflow.connect(merge_resultdicts_a, "out", filterresultdicts, "indicts")

    # aggregate data structures
    # output is a list where each element respresents a separate model run
    aggregateresultdicts = pe.Node(
        AggregateResultdicts(numinputs=1, across=model.across), name="aggregateresultdicts"
    )
    workflow.connect(filterresultdicts, "resultdicts", aggregateresultdicts, "in1")

    # extract fields from the aggregated data structure
    aliases = dict(effect=["reho", "falff", "alff"])
    extractfromresultdict = pe.MapNode(
        ExtractFromResultdict(keys=[model.across, *statmaps], aliases=aliases),
        iterfield="indict",
        name="extractfromresultdict",
    )
    workflow.connect(aggregateresultdicts, "resultdicts", extractfromresultdict, "indict")

    # copy over aggregated metadata and tags to outputs
    for make_resultdicts_node in [make_resultdicts_a, make_resultdicts_b]:
        workflow.connect(extractfromresultdict, "tags", make_resultdicts_node, "tags")
        workflow.connect(extractfromresultdict, "metadata", make_resultdicts_node, "metadata")
        workflow.connect(extractfromresultdict, "vals", make_resultdicts_node, "vals")

    # create models
    if model.type in ["fe", "me"]:  # intercept only model
        countimages = pe.Node(
            niu.Function(input_names=["arrarr"], output_names=["image_count"], function=lenforeach),
            name="countimages",
        )
        workflow.connect(extractfromresultdict, "effect", countimages, "arrarr")

        modelspec = pe.MapNode(
            InterceptOnlyModel(), name="modelspec", iterfield="n_copes", mem_gb=memcalc.min_gb
        )
        workflow.connect(countimages, "image_count", modelspec, "n_copes")

    elif model.type in ["lme"]:  # glm
        modelspec = pe.MapNode(
            LinearModel(
                spreadsheet=model.spreadsheet,
                contrastdicts=model.contrasts,
                variabledicts=variables,
            ),
            name="modelspec",
            iterfield="subjects",
            mem_gb=memcalc.min_gb,
        )
        workflow.connect(extractfromresultdict, "sub", modelspec, "subjects")

    else:
        raise ValueError()

    workflow.connect(modelspec, "contrast_names", make_resultdicts_b, "contrast")

    # run models
    if model.type in ["fe"]:

        # need to merge
        mergenodeargs = dict(iterfield="in_files", mem_gb=memcalc.volume_std_gb * numinputs)
        mergemask = pe.MapNode(MergeMask(), name="mergemask", **mergenodeargs)
        workflow.connect(extractfromresultdict, "mask", mergemask, "in_files")

        mergeeffect = pe.MapNode(Merge(dimension="t"), name="mergeeffect", **mergenodeargs)
        workflow.connect(extractfromresultdict, "effect", mergeeffect, "in_files")

        mergevariance = pe.MapNode(Merge(dimension="t"), name="mergevariance", **mergenodeargs)
        workflow.connect(extractfromresultdict, "variance", mergevariance, "in_files")

        fe_run_mode = pe.MapNode(
            niu.Function(input_names=["var_cope_file"], output_names=["run_mode"], function=_fe_run_mode),
            iterfield=["var_cope_file"],
            name="fe_run_mode",
        )
        workflow.connect(mergevariance, "merged_file", fe_run_mode, "var_cope_file")

        # prepare design matrix
        multipleregressdesign = pe.MapNode(
            fsl.MultipleRegressDesign(),
            name="multipleregressdesign",
            iterfield=["regressors", "contrasts"],
            mem_gb=memcalc.min_gb,
        )
        workflow.connect(modelspec, "regressors", multipleregressdesign, "regressors")
        workflow.connect(modelspec, "contrasts", multipleregressdesign, "contrasts")

        # use FSL implementation
        modelfit = pe.MapNode(
            FSLFLAMEO(),
            name="modelfit",
            mem_gb=memcalc.volume_std_gb * 100,
            iterfield=[
                "run_mode",
                "mask_file",
                "cope_file",
                "var_cope_file",
                "design_file",
                "t_con_file",
                "cov_split_file",
            ],
        )
        workflow.connect(fe_run_mode, "run_mode", modelfit, "run_mode")
        workflow.connect(mergemask, "merged_file", modelfit, "mask_file")
        workflow.connect(mergeeffect, "merged_file", modelfit, "cope_file")
        workflow.connect(mergevariance, "merged_file", modelfit, "var_cope_file")
        workflow.connect(multipleregressdesign, "design_mat", modelfit, "design_file")
        workflow.connect(multipleregressdesign, "design_con", modelfit, "t_con_file")
        workflow.connect(multipleregressdesign, "design_grp", modelfit, "cov_split_file")

        # mask output
        workflow.connect(mergemask, "merged_file", make_resultdicts_b, "mask")

    elif model.type in ["me", "lme"]:

        # use custom implementation
        modelfit = pe.MapNode(
            FLAME1(),
            name="modelfit",
            n_procs=config.nipype.omp_nthreads,
            mem_gb=memcalc.volume_std_gb * 100,
            iterfield=[
                "mask_files",
                "cope_files",
                "var_cope_files",
                "regressors",
                "contrasts",
            ],
        )
        workflow.connect(extractfromresultdict, "mask", modelfit, "mask_files")
        workflow.connect(extractfromresultdict, "effect", modelfit, "cope_files")
        workflow.connect(extractfromresultdict, "variance", modelfit, "var_cope_files")

        workflow.connect(modelspec, "regressors", modelfit, "regressors")
        workflow.connect(modelspec, "contrasts", modelfit, "contrasts")

        # mask output
        workflow.connect(modelfit, "masks", make_resultdicts_b, "mask")

        # random field theory
        smoothest = pe.MapNode(fsl.SmoothEstimate(), iterfield=["zstat_file", "mask_file"], name="smoothest")
        workflow.connect([(modelfit, smoothest, [(("zstats", ravel), "zstat_file")])])
        workflow.connect([(modelfit, smoothest, [(("masks", ravel), "mask_file")])])

        criticalz = pe.MapNode(
            niu.Function(input_names=["resels"], output_names=["critical_z"], function=_critical_z),
            iterfield=["resels"],
            name="criticalz",
        )
        workflow.connect(smoothest, "resels", criticalz, "resels")
        workflow.connect(criticalz, "critical_z", make_resultdicts_b, "critical_z")

    workflow.connect(modelfit, "copes", make_resultdicts_b, "effect")
    workflow.connect(modelfit, "var_copes", make_resultdicts_b, "variance")
    workflow.connect(modelfit, "zstats", make_resultdicts_b, "z")
    workflow.connect(modelfit, "tdof", make_resultdicts_b, "dof")

    # make tsv files for design and contrast matrices
    maketsv = pe.MapNode(
        MakeDesignTsv(),
        iterfield=["regressors", "contrasts", "row_index"],
        name="maketsv"
    )
    workflow.connect(extractfromresultdict, model.across, maketsv, "row_index")
    workflow.connect(modelspec, "regressors", maketsv, "regressors")
    workflow.connect(modelspec, "contrasts", maketsv, "contrasts")

    workflow.connect(maketsv, "design_tsv", make_resultdicts_a, "design_matrix")
    workflow.connect(maketsv, "contrasts_tsv", make_resultdicts_a, "contrast_matrix")

    return workflow