def create_2lvl_rand(name="group_randomize", mask=None, iters=5000):
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu

    wk = pe.Workflow(name=name)

    inputspec = pe.Node(niu.IdentityInterface(fields=[
        'copes', 'varcopes', 'template', "contrasts", "group", "regressors"
    ]),
                        name='inputspec')

    model = pe.Node(fsl.MultipleRegressDesign(), name='l2model')

    wk.connect(inputspec, 'contrasts', model, "contrasts")
    wk.connect(inputspec, 'regressors', model, "regressors")
    wk.connect(inputspec, 'group', model, 'groups')

    mergecopes = pe.Node(fsl.Merge(dimension='t'), name='merge_copes')

    rand = pe.Node(fsl.Randomise(base_name='TwoSampleT',
                                 raw_stats_imgs=True,
                                 tfce=True,
                                 num_perm=iters),
                   name='randomize')

    wk.connect(inputspec, 'copes', mergecopes, 'in_files')
    wk.connect(model, 'design_mat', rand, 'design_mat')
    wk.connect(model, 'design_con', rand, 'tcon')
    wk.connect(mergecopes, 'merged_file', rand, 'in_file')
    wk.connect(model, 'design_grp', rand, 'x_block_labels')

    if mask == None:
        bet = pe.Node(fsl.BET(mask=True, frac=0.3), name="template_brainmask")
        wk.connect(inputspec, 'template', bet, 'in_file')
        wk.connect(bet, 'mask_file', rand, 'mask')

    else:
        wk.connect(inputspec, 'template', rand, 'mask')

    outputspec = pe.Node(niu.IdentityInterface(fields=[
        'f_corrected_p_files', 'f_p_files', 'fstat_files',
        't_corrected_p_files', 't_p_files', 'tstat_file', 'mask'
    ]),
                         name='outputspec')

    wk.connect(rand, 'f_corrected_p_files', outputspec, 'f_corrected_p_files')
    wk.connect(rand, 'f_p_files', outputspec, 'f_p_files')
    wk.connect(rand, 'fstat_files', outputspec, 'fstat_files')
    wk.connect(rand, 't_corrected_p_files', outputspec, 't_corrected_p_files')
    wk.connect(rand, 't_p_files', outputspec, 't_p_files')
    wk.connect(rand, 'tstat_files', outputspec, 'tstat_file')
    if mask == None:
        wk.connect(bet, 'mask_file', outputspec, 'mask')
    else:
        wk.connect(inputspec, 'template', outputspec, 'mask')
    return wk
def create_2lvl_rand(name="group_randomize"):
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.io as nio
    wk = pe.Workflow(name=name)

    inputspec = pe.Node(
        niu.IdentityInterface(fields=['copes', 'varcopes', 'template']),
        name='inputspec')

    model = pe.Node(fsl.L2Model(), name='l2model')

    wk.connect(inputspec, ('copes', get_len), model, 'num_copes')

    mergecopes = pe.Node(fsl.Merge(dimension='t'), name='merge_copes')
    mergevarcopes = pe.Node(fsl.Merge(dimension='t'), name='merge_varcopes')

    rand = pe.Node(fsl.Randomise(base_name='OneSampleT',
                                 raw_stats_imgs=True,
                                 tfce=True),
                   name='randomize')

    wk.connect(inputspec, 'copes', mergecopes, 'in_files')
    wk.connect(inputspec, 'varcopes', mergevarcopes, 'in_files')
    wk.connect(model, 'design_mat', rand, 'design_mat')
    wk.connect(model, 'design_con', rand, 'tcon')
    wk.connect(mergecopes, 'merged_file', rand, 'in_file')
    #wk.connect(model,'design_grp',rand,'cov_split_file')

    bet = pe.Node(fsl.BET(mask=True, frac=0.3), name="template_brainmask")
    wk.connect(inputspec, 'template', bet, 'in_file')
    wk.connect(bet, 'mask_file', rand, 'mask')

    outputspec = pe.Node(niu.IdentityInterface(fields=[
        'f_corrected_p_files', 'f_p_files', 'fstat_files',
        't_corrected_p_files', 't_p_files', 'tstat_file', 'mask'
    ]),
                         name='outputspec')

    wk.connect(rand, 'f_corrected_p_files', outputspec, 'f_corrected_p_files')
    wk.connect(rand, 'f_p_files', outputspec, 'f_p_files')
    wk.connect(rand, 'fstat_files', outputspec, 'fstat_files')
    wk.connect(rand, 't_corrected_p_files', outputspec, 't_corrected_p_files')
    wk.connect(rand, 't_p_files', outputspec, 't_p_files')
    wk.connect(rand, 'tstat_files', outputspec, 'tstat_file')
    wk.connect(bet, 'mask_file', outputspec, 'mask')

    return wk
Example #3
0
nilearn.plotting.plot_stat_map(mean_mask)
group_mask = nilearn.image.math_img("a>=0.95", a=mean_mask)
nilearn.plotting.plot_roi(group_mask)


#%% Creating concatenated contrast (across subjects) and group mask
copes_concat = nilearn.image.concat_imgs(smooth_copes, auto_resample=True)
copes_concat.to_filename("/media/Data/work/KPE_SPM/fslRandomize/TraumaVsSad_cope.nii.gz")

group_mask = nilearn.image.resample_to_img(group_mask, copes_concat, interpolation='nearest')
group_mask.to_filename(os.path.join("/media/Data/work/KPE_SPM/fslRandomize",  "group_mask.nii.gz"))

#%% Running randomization
from  nipype.interfaces import fsl
import nipype.pipeline.engine as pe  # pypeline engine
randomize = pe.Node(interface = fsl.Randomise(), base_dir = '/media/Data/work/KPE_SPM/fslRandomize',
                    name = 'randomize')
randomize.inputs.in_file = '/media/Data/work/KPE_SPM/fslRandomize/TraumaVsSad_cope.nii.gz' # choose which file to run permutation test on
randomize.inputs.mask = '/media/Data/work/KPE_SPM/fslRandomize/group_mask.nii.gz' # group mask file (was created earlier)
randomize.inputs.one_sample_group_mean = True
randomize.inputs.tfce = True
randomize.inputs.vox_p_values = True
randomize.inputs.num_perm = 200
#randomize.inputs.var_smooth = 5

randomize.run()
#%% Graph it
fig = nilearn.plotting.plot_stat_map('/media/Data/work/KPE_SPM/fslRandomize/randomize/randomise_tstat1.nii.gz', alpha=0.7 , cut_coords=(0, 45, -7))
fig.add_contours('/media/Data/work/custom_modelling_spm/randomize/randomise_tfce_corrp_tstat1.nii.gz', levels=[0.99], colors='w')
#%% opposite image run
fig = nilearn.plotting.plot_stat_map('/media/Data/work/custom_modelling_spm/neg/randomize/randomise_tstat1.nii.gz', alpha=0.7 , cut_coords=(0, 45, -7))
datasink.inputs.container = output_dir
datasink.inputs.base_directory = experiment_dir

substitutions = [('_map_id_', ' ')]

datasink.inputs.substitutions = substitutions

#-----------------------------------------------------------------------------------------------------
#Design with two contrasts only

design = '/home/in/aeed/TBSS/Design_TBSS.mat'
contrast = '/home/in/aeed/TBSS/Design_TBSS.con'

#-----------------------------------------------------------------------------------------------------
#randomise on the skeletonised data
randomise_tbss = Node(fsl.Randomise(), name='randomise_tbss')
randomise_tbss.inputs.design_mat = design
randomise_tbss.inputs.tcon = contrast
randomise_tbss.inputs.num_perm = 10000
randomise_tbss.inputs.tfce2D = True
randomise_tbss.inputs.vox_p_values = True
randomise_tbss.inputs.base_name = 'TBSS_'


#-----------------------------------------------------------------------------------------------------
#smoothing the images
def nilearn_smoothing(image):
    import nilearn
    from nilearn.image import smooth_img

    import numpy as np
Example #5
0
maskemerge = pe.Node(interface=fsl.Merge(dimension='t'),
                       name="maskemerge")
#copeImages = glob.glob('/media/Data/work/firstLevelKPE/_subject_id_*/feat_fit/run0.feat/stats/cope1.nii.gz')
#copemerge.inputs.in_files = copeImages



# Configure FSL 2nd level analysis
l2_model = pe.Node(fsl.L2Model(), name='l2_model')

flameo_ols = pe.Node(fsl.FLAMEO(run_mode='ols'), name='flameo_ols')
def _len(inlist):
    print (len(inlist))
    return len(inlist)
### use randomize
rand = pe.Node(fsl.Randomise(),
                            name = "randomize") 


rand.inputs.mask = '/media/Data/work/KPE_SPM/fslRandomize/group_mask.nii.gz' # group mask file (was created earlier)
rand.inputs.one_sample_group_mean = True
rand.inputs.tfce = True
rand.inputs.vox_p_values = True
rand.inputs.num_perm = 200
# Thresholding - FDR ################################################
# Calculate pvalues with ztop
fdr_ztop = pe.Node(fsl.ImageMaths(op_string='-ztop', suffix='_pval'),
                   name='fdr_ztop')
# Find FDR threshold: fdr -i zstat1_pval -m <group_mask> -q 0.05
# fdr_th = <write Nipype interface for fdr>
# Apply threshold:
def create_randomise(name='randomise', working_dir=None, crash_dir=None):
    """
    Parameters
    ----------
        
    Returns
    -------
    workflow : nipype.pipeline.engine.Workflow
        Randomise workflow.
        
    Notes
    -----
    
    Workflow Inputs::
    
        
    Workflow Outputs::

    
    References
    ----------
    
    """

    if not working_dir:
        working_dir = os.path.join(os.getcwd(), 'Randomise_work_dir')
    if not crash_dir:
        crash_dir = os.path.join(os.getcwd(), 'Randomise_crash_dir')

    wf = pe.Workflow(name=name)
    wf.base_dir = working_dir
    wf.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(crash_dir)
    }

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'subjects_list', 'pipeline_output_folder', 'permutations',
        'mask_boolean', 'demean', 'c_thresh'
    ]),
                        name='inputspec')

    outputspec = pe.Node(util.IdentityInterface(fields=[
        'tstat_files', 't_corrected_p_files', 'index_file', 'threshold_file',
        'localmax_txt_file', 'localmax_vol_file', 'max_file', 'mean_file',
        'pval_file', 'size_file'
    ]),
                         name='outputspec')

    #merge = pe.Node(interface=fsl.Merge(), name='fsl_merge')
    #merge.inputs.dimension = 't'
    #merge.inputs.merged_file = "randomise_merged.nii.gz"

    #wf.connect(inputspec, 'subjects', merge, 'in_files')

    #mask = pe.Node(interface=fsl.maths.MathsCommand(), name='fsl_maths')
    #mask.inputs.args = '-abs -Tmin -bin'
    #mask.inputs.out_file = "randomise_mask.nii.gz"
    #wf.connect(inputspec, 'subjects', mask, 'in_file')

    randomise = pe.Node(interface=fsl.Randomise(), name='randomise')
    randomise.inputs.base_name = "randomise"
    randomise.inputs.demean = True
    randomise.inputs.tfce = True
    wf.connect([(inputspec, randomise, [
        ('subjects', 'in_file'),
        ('design_matrix_file', 'design_mat'),
        ('constrast_file', 'tcon'),
        ('permutations', 'num_perm'),
    ])])
    wf.connect(randomise, 'tstat_files', outputspec, 'tstat_files')
    wf.connect(randomise, 't_corrected_p_files', outputspec,
               't_corrected_p_files')
    #------------- issue here arises while using tfce. By not using tfce, you don't get t_corrected_p files. R V in a conundrum? --------------------#

    select_tcorrp_files = pe.Node(Function(input_names=['input_list'],
                                           output_names=['out_file'],
                                           function=select),
                                  name='select_t_corrp')

    wf.connect(randomise, 't_corrected_p_files', select_tcorrp_files,
               'input_list')
    wf.connect(select_tcorrp_files, 'out_file', outputspec,
               'out_tcorr_corrected')

    select_tstat_files = pe.Node(Function(input_names=['input_list'],
                                          output_names=['out_file'],
                                          function=select),
                                 name='select_t_stat')

    wf.connect(randomise, 'tstat_files', select_tstat_files, 'input_list')
    wf.connect(select_tstat_files, 'out_file', outputspec,
               'out_tstat_corrected')

    thresh = pe.Node(interface=fsl.Threshold(), name='fsl_threshold_contrast')
    thresh.inputs.thresh = 0.95
    thresh.inputs.out_file = 'rando_pipe_thresh_tstat.nii.gz'
    wf.connect(select_tstat_files, 'out_file', thresh, 'in_file')
    wf.connect(thresh, 'out_file', outputspec,
               'rando_pipe_thresh_tstat.nii.gz')

    thresh_bin = pe.Node(interface=fsl.UnaryMaths(),
                         name='fsl_threshold_bin_contrast')
    thresh_bin.inputs.operation = 'bin'
    wf.connect(thresh, 'out_file', thresh_bin, 'in_file')
    wf.connect(thresh_bin, 'out_file', outputspec, 'thresh_bin_out')

    apply_mask = pe.Node(interface=fsl.ApplyMask(),
                         name='fsl_applymask_contrast')
    wf.connect(select_tstat_files, 'out_file', apply_mask, 'in_file')
    wf.connect(thresh_bin, 'out_file', apply_mask, 'mask_file')

    cluster = pe.Node(interface=fsl.Cluster(), name='cluster_contrast')
    cluster.inputs.threshold = 0.0001
    cluster.inputs.out_index_file = "index_file"
    cluster.inputs.out_localmax_txt_file = "lmax_contrast.txt"
    cluster.inputs.out_size_file = "cluster_size_contrast"
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_mean_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_size_file = True

    wf.connect(apply_mask, 'out_file', cluster, 'in_file')

    wf.connect(cluster, 'index_file', outputspec, 'index_file')
    wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')
    wf.connect(cluster, 'localmax_txt_file', outputspec, 'localmax_txt_file')
    wf.connect(cluster, 'localmax_vol_file', outputspec, 'localmax_vol_file')
    wf.connect(cluster, 'max_file', outputspec, 'max_file')
    wf.connect(cluster, 'mean_file', outputspec, 'meal_file')
    wf.connect(cluster, 'pval_file', outputspec, 'pval_file')
    wf.connect(cluster, 'size_file', outputspec, 'size_file')

    return wf
Example #7
0
def prep_randomise_workflow(c,
                            merged_file,
                            mask_file,
                            f_test,
                            mat_file,
                            con_file,
                            grp_file,
                            output_dir,
                            working_dir,
                            log_dir,
                            model_name,
                            fts_file=None):

    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as nio

    wf = pe.Workflow(name='randomise_workflow')
    wf.base_dir = c.work_dir

    randomise = pe.Node(interface=fsl.Randomise(),
                        name='fsl-randomise_{0}'.format(model_name))
    randomise.inputs.base_name = model_name
    randomise.inputs.in_file = merged_file
    randomise.inputs.mask = mask_file
    randomise.inputs.num_perm = c.randomise_permutation
    randomise.inputs.demean = c.randomise_demean
    randomise.inputs.c_thresh = c.randomise_thresh
    randomise.inputs.tfce = c.randomise_tfce

    randomise.inputs.design_mat = mat_file
    randomise.inputs.tcon = con_file

    if fts_file:
        randomise.inputs.fcon = fts_file

    select_tcorrp_files = pe.Node(util.Function(input_names=['input_list'],
                                                output_names=['out_file'],
                                                function=select),
                                  name='select_t_corrp')

    wf.connect(randomise, 't_corrected_p_files', select_tcorrp_files,
               'input_list')

    select_tstat_files = pe.Node(util.Function(input_names=['input_list'],
                                               output_names=['out_file'],
                                               function=select),
                                 name='select_t_stat')

    wf.connect(randomise, 'tstat_files', select_tstat_files, 'input_list')

    thresh = pe.Node(interface=fsl.Threshold(), name='fsl_threshold_contrast')
    thresh.inputs.thresh = 0.95
    thresh.inputs.out_file = 'randomise_pipe_thresh_tstat.nii.gz'
    wf.connect(select_tstat_files, 'out_file', thresh, 'in_file')

    thresh_bin = pe.Node(interface=fsl.UnaryMaths(),
                         name='fsl_threshold_bin_contrast')
    thresh_bin.inputs.operation = 'bin'
    wf.connect(thresh, 'out_file', thresh_bin, 'in_file')

    apply_mask = pe.Node(interface=fsl.ApplyMask(),
                         name='fsl_applymask_contrast')
    wf.connect(select_tstat_files, 'out_file', apply_mask, 'in_file')
    wf.connect(thresh_bin, 'out_file', apply_mask, 'mask_file')

    cluster = pe.Node(interface=fsl.Cluster(), name='cluster_contrast')
    cluster.inputs.threshold = 0.0001
    cluster.inputs.out_index_file = "index_file"
    cluster.inputs.out_localmax_txt_file = "lmax_contrast.txt"
    cluster.inputs.out_size_file = "cluster_size_contrast"
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_mean_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_size_file = True

    wf.connect(apply_mask, 'out_file', cluster, 'in_file')

    ds = pe.Node(nio.DataSink(), name='fsl-randomise_sink')

    ds.inputs.base_directory = str(output_dir)
    ds.inputs.container = ''

    wf.connect(randomise, 'tstat_files', ds, 'tstat_files')
    wf.connect(randomise, 't_corrected_p_files', ds, 't_corrected_p_files')
    wf.connect(select_tcorrp_files, 'out_file', ds, 'out_tcorr_corrected')
    wf.connect(select_tstat_files, 'out_file', ds, 'out_tstat_corrected')
    wf.connect(thresh, 'out_file', ds, 'randomise_pipe_thresh_tstat.nii.gz')
    wf.connect(thresh_bin, 'out_file', ds, 'thresh_bin_out')
    wf.connect(cluster, 'index_file', ds, 'index_file')
    wf.connect(cluster, 'threshold_file', ds, 'threshold_file')
    wf.connect(cluster, 'localmax_txt_file', ds, 'localmax_txt_file')
    wf.connect(cluster, 'localmax_vol_file', ds, 'localmax_vol_file')
    wf.connect(cluster, 'max_file', ds, 'max_file')
    wf.connect(cluster, 'mean_file', ds, 'meal_file')
    wf.connect(cluster, 'pval_file', ds, 'pval_file')
    wf.connect(cluster, 'size_file', ds, 'size_file')

    wf.run()
#node for group-specific level 2 model
grp_l2model = Node(L2Model(), name='grp_l2model')
grp_l2model.inputs.ignore_exception = False
group_wf.connect(inputspec, ('copes', get_len), grp_l2model, 'num_copes')

#node to concatenate copes into single image across time
grp_merge_copes = Node(fsl.utils.Merge(), name='grp_merge_copes')
grp_merge_copes.inputs.dimension = 't'  #concatenate across time
grp_merge_copes.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
grp_merge_copes.inputs.ignore_exception = False
grp_merge_copes.inputs.output_type = 'NIFTI_GZ'
grp_merge_copes.inputs.terminal_output = 'stream'
group_wf.connect(inputspec, 'copes', grp_merge_copes, 'in_files')

#node for Randomise
grp_randomise = Node(fsl.Randomise(), name='grp_randomise')
grp_randomise.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
grp_randomise.inputs.ignore_exception = False
grp_randomise.inputs.tfce = True
grp_randomise.inputs.base_name = 'oneSampT'
grp_randomise.inputs.num_perm = 5000
grp_randomise.inputs.output_type = 'NIFTI_GZ'
grp_randomise.inputs.terminal_output = 'stream'
grp_randomise.plugin_args = {'bsub_args': '-m IB_40C_1.5T_1'}
group_wf.connect(grp_l2model, 'design_mat', grp_randomise, 'design_mat')
group_wf.connect(grp_l2model, 'design_con', grp_randomise, 'tcon')
group_wf.connect(grp_merge_copes, 'merged_file', grp_randomise, 'in_file')
group_wf.connect(inputspec, 'brain_mask', grp_randomise, 'mask')

#node for group_randomise.sinker
group_randomise_sinker = Node(DataSink(infields=None),
Example #9
0
def create_cross_sectional_tbss_pipeline(in_files,
                                         output_dir,
                                         name='cross_sectional_tbss',
                                         skeleton_threshold=0.2,
                                         design_mat=None,
                                         design_con=None):
    workflow = pe.Workflow(name=name)
    workflow.base_dir = output_dir
    workflow.base_output_dir = name

    # Create the dtitk groupwise registration workflow
    groupwise_dtitk = create_dtitk_groupwise_workflow(in_files=in_files,
                                                      name="dtitk_groupwise",
                                                      rig_iteration=3,
                                                      aff_iteration=3,
                                                      nrr_iteration=6)

    # Create the average FA map
    mean_fa = pe.Node(interface=dtitk.TVtool(), name="mean_fa")
    workflow.connect(groupwise_dtitk, 'output_node.out_template', mean_fa,
                     'in_file')
    mean_fa.inputs.operation = 'fa'

    # Register the FMRIB58_FA_1mm.nii.gz atlas to the mean FA map
    reg_atlas = pe.Node(interface=niftyreg.RegAladin(), name='reg_atlas')
    workflow.connect(mean_fa, 'out_file', reg_atlas, 'ref_file')
    reg_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'FMRIB58_FA_1mm.nii.gz')

    # Apply the transformation to the lower cingulum image
    war_atlas = pe.Node(interface=niftyreg.RegResample(), name='war_atlas')
    workflow.connect(mean_fa, 'out_file', war_atlas, 'ref_file')
    war_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'LowerCingulum_1mm.nii.gz')
    workflow.connect(reg_atlas, 'aff_file', war_atlas, 'trans_file')
    war_atlas.inputs.inter_val = 'LIN'

    # Threshold the propagated lower cingulum
    thr_atlas = pe.Node(interface=niftyseg.BinaryMaths(), name='thr_atlas')
    workflow.connect(war_atlas, 'out_file', thr_atlas, 'in_file')
    thr_atlas.inputs.operation = 'thr'
    thr_atlas.inputs.operand_value = 0.5

    # Binarise the propagated lower cingulum
    bin_atlas = pe.Node(interface=niftyseg.UnaryMaths(), name='bin_atlas')
    workflow.connect(thr_atlas, 'out_file', bin_atlas, 'in_file')
    bin_atlas.inputs.operation = 'bin'

    # Create all the individual FA maps
    individual_fa = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_fa",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_fa,
                     'in_file')
    individual_fa.inputs.operation = 'fa'

    # Create all the individual MD maps
    individual_md = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_md",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_md,
                     'in_file')
    individual_md.inputs.operation = 'tr'

    # Create all the individual RD maps
    individual_rd = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_rd",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_rd,
                     'in_file')
    individual_rd.inputs.operation = 'rd'

    # Create all the individual RD maps
    individual_ad = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_ad",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_ad,
                     'in_file')
    individual_ad.inputs.operation = 'ad'

    # Combine all the warped FA images into a 4D image
    merged_4d_fa = pe.Node(interface=fsl.Merge(), name='merged_4d_fa')
    merged_4d_fa.inputs.dimension = 't'
    workflow.connect(individual_fa, 'out_file', merged_4d_fa, 'in_files')

    # Combine all the warped MD images into a 4D image
    merged_4d_md = pe.Node(interface=fsl.Merge(), name='merged_4d_md')
    merged_4d_md.inputs.dimension = 't'
    workflow.connect(individual_md, 'out_file', merged_4d_md, 'in_files')

    # Combine all the warped RD images into a 4D image
    merged_4d_rd = pe.Node(interface=fsl.Merge(), name='merged_4d_rd')
    merged_4d_rd.inputs.dimension = 't'
    workflow.connect(individual_rd, 'out_file', merged_4d_rd, 'in_files')

    # Combine all the warped AD images into a 4D image
    merged_4d_ad = pe.Node(interface=fsl.Merge(), name='merged_4d_ad')
    merged_4d_ad.inputs.dimension = 't'
    workflow.connect(individual_ad, 'out_file', merged_4d_ad, 'in_files')

    # Threshold the 4D FA image to 0
    merged_4d_fa_thresholded = pe.Node(interface=niftyseg.BinaryMaths(),
                                       name='merged_4d_fa_thresholded')
    merged_4d_fa_thresholded.inputs.operation = 'thr'
    merged_4d_fa_thresholded.inputs.operand_value = 0
    workflow.connect(merged_4d_fa, 'merged_file', merged_4d_fa_thresholded,
                     'in_file')

    # Extract the min value from the 4D FA image
    minimal_value_across_all_fa = pe.Node(interface=niftyseg.UnaryMaths(),
                                          name='minimal_value_across_all_fa')
    minimal_value_across_all_fa.inputs.operation = 'tmin'
    workflow.connect(merged_4d_fa_thresholded, 'out_file',
                     minimal_value_across_all_fa, 'in_file')

    # Create the mask image
    fa_mask = pe.Node(interface=niftyseg.UnaryMaths(), name='fa_mask')
    fa_mask.inputs.operation = 'bin'
    fa_mask.inputs.output_datatype = 'char'
    workflow.connect(minimal_value_across_all_fa, 'out_file', fa_mask,
                     'in_file')

    # Mask the mean FA image
    masked_mean_fa = pe.Node(interface=fsl.ApplyMask(), name='masked_mean_fa')
    workflow.connect(mean_fa, 'out_file', masked_mean_fa, 'in_file')
    workflow.connect(fa_mask, 'out_file', masked_mean_fa, 'mask_file')

    # Create the skeleton image
    skeleton = pe.Node(interface=fsl.TractSkeleton(), name='skeleton')
    skeleton.inputs.skeleton_file = True
    workflow.connect(masked_mean_fa, 'out_file', skeleton, 'in_file')

    # Threshold the skeleton image
    thresholded_skeleton = pe.Node(interface=niftyseg.BinaryMaths(),
                                   name='thresholded_skeleton')
    thresholded_skeleton.inputs.operation = 'thr'
    thresholded_skeleton.inputs.operand_value = skeleton_threshold
    workflow.connect(skeleton, 'skeleton_file', thresholded_skeleton,
                     'in_file')

    # Binarise the skeleton image
    binarised_skeleton = pe.Node(interface=niftyseg.UnaryMaths(),
                                 name='binarised_skeleton')
    binarised_skeleton.inputs.operation = 'bin'
    workflow.connect(thresholded_skeleton, 'out_file', binarised_skeleton,
                     'in_file')

    # Create skeleton distance map
    invert_mask1 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask1')
    invert_mask1.inputs.operation = 'mul'
    invert_mask1.inputs.operand_value = -1
    workflow.connect(fa_mask, 'out_file', invert_mask1, 'in_file')
    invert_mask2 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask2')
    invert_mask2.inputs.operation = 'add'
    invert_mask2.inputs.operand_value = 1
    workflow.connect(invert_mask1, 'out_file', invert_mask2, 'in_file')
    invert_mask3 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask3')
    invert_mask3.inputs.operation = 'add'
    workflow.connect(invert_mask2, 'out_file', invert_mask3, 'in_file')
    workflow.connect(binarised_skeleton, 'out_file', invert_mask3,
                     'operand_file')
    distance_map = pe.Node(interface=fsl.DistanceMap(), name='distance_map')
    workflow.connect(invert_mask3, 'out_file', distance_map, 'in_file')

    # Project the FA values onto the skeleton
    all_fa_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_fa_projected')
    all_fa_projected.inputs.threshold = skeleton_threshold
    all_fa_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_fa_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_fa_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_fa_projected,
                     'data_file')
    workflow.connect(bin_atlas, 'out_file', all_fa_projected,
                     'search_mask_file')

    # Project the MD values onto the skeleton
    all_md_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_md_projected')
    all_md_projected.inputs.threshold = skeleton_threshold
    all_md_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_md_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_md_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_md_projected,
                     'data_file')
    workflow.connect(merged_4d_md, 'merged_file', all_md_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_md_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_rd_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_rd_projected')
    all_rd_projected.inputs.threshold = skeleton_threshold
    all_rd_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_rd_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_rd_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_rd_projected,
                     'data_file')
    workflow.connect(merged_4d_rd, 'merged_file', all_rd_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_rd_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_ad_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_ad_projected')
    all_ad_projected.inputs.threshold = skeleton_threshold
    all_ad_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_ad_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_ad_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_ad_projected,
                     'data_file')
    workflow.connect(merged_4d_ad, 'merged_file', all_ad_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_ad_projected,
                     'search_mask_file')

    # Create an output node
    output_node = pe.Node(interface=niu.IdentityInterface(fields=[
        'mean_fa', 'all_fa_skeletonised', 'all_md_skeletonised',
        'all_rd_skeletonised', 'all_ad_skeletonised', 'skeleton',
        'skeleton_bin', 't_contrast_raw_stat', 't_contrast_uncorrected_pvalue',
        't_contrast_corrected_pvalue'
    ]),
                          name='output_node')

    # Connect the workflow to the output node
    workflow.connect(masked_mean_fa, 'out_file', output_node, 'mean_fa')
    workflow.connect(all_fa_projected, 'projected_data', output_node,
                     'all_fa_skeletonised')
    workflow.connect(all_md_projected, 'projected_data', output_node,
                     'all_md_skeletonised')
    workflow.connect(all_rd_projected, 'projected_data', output_node,
                     'all_rd_skeletonised')
    workflow.connect(all_ad_projected, 'projected_data', output_node,
                     'all_ad_skeletonised')
    workflow.connect(skeleton, 'skeleton_file', output_node, 'skeleton')
    workflow.connect(binarised_skeleton, 'out_file', output_node,
                     'skeleton_bin')

    # Run randomise if required and connect its output to the output node
    if design_mat is not None and design_con is not None:
        randomise = pe.Node(interface=fsl.Randomise(), name='randomise')
        randomise.inputs.base_name = 'stats_tbss'
        randomise.inputs.tfce2D = True
        randomise.inputs.num_perm = 5000
        workflow.connect(all_fa_projected, 'projected_data', randomise,
                         'in_file')
        randomise.inputs.design_mat = design_mat
        randomise.inputs.design_con = design_con
        workflow.connect(binarised_skeleton, 'out_file', randomise, 'mask')

        workflow.connect(randomise, 'tstat_files', output_node,
                         't_contrast_raw_stat')
        workflow.connect(randomise, 't_p_files', output_node,
                         't_contrast_uncorrected_pvalue')
        workflow.connect(randomise, 't_corrected_p_files', output_node,
                         't_contrast_corrected_pvalue')

    # Create nodes to rename the outputs
    mean_fa_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa', keep_ext=True),
                              name='mean_fa_renamer')
    workflow.connect(output_node, 'mean_fa', mean_fa_renamer, 'in_file')

    mean_sk_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton', keep_ext=True),
                              name='mean_sk_renamer')
    workflow.connect(output_node, 'skeleton', mean_sk_renamer, 'in_file')

    bin_ske_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton_mask', keep_ext=True),
                              name='bin_ske_renamer')
    workflow.connect(output_node, 'skeleton_bin', bin_ske_renamer, 'in_file')

    fa_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_fa_skeletonised', keep_ext=True),
                              name='fa_skel_renamer')
    workflow.connect(output_node, 'all_fa_skeletonised', fa_skel_renamer,
                     'in_file')
    md_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_md_skeletonised', keep_ext=True),
                              name='md_skel_renamer')
    workflow.connect(output_node, 'all_md_skeletonised', md_skel_renamer,
                     'in_file')
    rd_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_rd_skeletonised', keep_ext=True),
                              name='rd_skel_renamer')
    workflow.connect(output_node, 'all_rd_skeletonised', rd_skel_renamer,
                     'in_file')
    ad_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_ad_skeletonised', keep_ext=True),
                              name='ad_skel_renamer')
    workflow.connect(output_node, 'all_ad_skeletonised', ad_skel_renamer,
                     'in_file')

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = os.path.abspath(output_dir)

    # Connect the data sink
    workflow.connect(mean_fa_renamer, 'out_file', ds, '@mean_fa')
    workflow.connect(mean_sk_renamer, 'out_file', ds, '@skel_fa')
    workflow.connect(bin_ske_renamer, 'out_file', ds, '@bkel_fa')
    workflow.connect(fa_skel_renamer, 'out_file', ds, '@all_fa')
    workflow.connect(md_skel_renamer, 'out_file', ds, '@all_md')
    workflow.connect(rd_skel_renamer, 'out_file', ds, '@all_rd')
    workflow.connect(ad_skel_renamer, 'out_file', ds, '@all_ad')

    if design_mat is not None and design_con is not None:
        workflow.connect(output_node, 't_contrast_raw_stat', ds,
                         '@t_contrast_raw_stat')
        workflow.connect(output_node, 't_contrast_uncorrected_pvalue', ds,
                         '@t_contrast_uncorrected_pvalue')
        workflow.connect(output_node, 't_contrast_corrected_pvalue', ds,
                         '@t_contrast_corrected_pvalue')

    return workflow
nilearn.plotting.plot_stat_map(mean_mask)
group_mask = nilearn.image.math_img("a>=0.95", a=mean_mask)
nilearn.plotting.plot_roi(group_mask)


#%% Creating concatenated contrast (across subjects) and group mask
copes_concat = nilearn.image.concat_imgs(smooth_copes, auto_resample=True)
copes_concat.to_filename("/media/Data/work/custom_modelling_spm/negGainRisk_cope.nii.gz")

#group_mask = nilearn.image.resample_to_img(group_mask, copes_concat, interpolation='nearest')
#group_mask.to_filename(os.path.join("/media/Data/work/", "custom_modelling_spm", "group_mask.nii.gz"))

#%% Running randomization
from  nipype.interfaces import fsl
import nipype.pipeline.engine as pe  # pypeline engine
randomize = pe.Node(interface = fsl.Randomise(), base_dir = '/media/Data/work/custom_modelling_spm/neg',
                    name = 'randomize')
randomize.inputs.in_file = '/media/Data/work/custom_modelling_spm/oppnegGainRisk_cope.nii.gz' # choose which file to run permutation test on
randomize.inputs.mask = '/media/Data/work/custom_modelling_spm/group_mask.nii.gz' # group mask file (was created earlier)
randomize.inputs.one_sample_group_mean = True
randomize.inputs.tfce = True
randomize.inputs.vox_p_values = True
randomize.inputs.num_perm = 1000
#randomize.inputs.var_smooth = 5

randomize.run()
#%% Graph it
fig = nilearn.plotting.plot_stat_map('/media/Data/work/custom_modelling_spm/randomize/randomise_tstat1.nii.gz', alpha=0.7 , cut_coords=(0, 45, -7))
fig.add_contours('/media/Data/work/custom_modelling_spm/randomize/randomise_tfce_corrp_tstat1.nii.gz', levels=[0.95], colors='w')
#%% opposite image run
fig = nilearn.plotting.plot_stat_map('/media/Data/work/custom_modelling_spm/neg/randomize/randomise_tstat1.nii.gz', alpha=0.7 , cut_coords=(0, 45, -7))
def create_lvl2tfce_wf(mask=False):
    '''
    Input [Mandatory]:
        ~~~~~~~~~~~ Set through inputs.inputspec
        proj_name: String, naming subdirectory to use to identify this instance of lvl2 modeling.
            e.g. 'nosmooth'
            The string will be used as a subdirectory in output_dir.
        copes_template: String, naming full path to the cope files. Use wildcards to grab all cope files wanted.
            contrast (below) will be used iteratively to grab only the appropriate con files from this glob list on each iteration.
            e.g. inputs.inputspec.copes_template = '/home/neuro/workdir/stress_lvl2/data/nosmooth/sub-*/model/sub-*/_modelestimate0/cope*nii.gz'
        contrast: Character defining contrast name.
            Name should match a dictionary entry in full_cons and con_regressors.
            ** Often you will want to input this with an iterable node.
        full_cons: dictionary of each contrast.
            Names should match con_regressors.
            Entries in format [('name', 'stat', [condition_list], [weight])]
            e.g. full_cons = {
                '1_instructions_Instructions': [('1_instructions_Instructions', 'T', ['1_instructions_Instructions'], [1])]
                }
        output_dir: string, representing directory of output.
            e.g. inputs.inputspec.output_dir ='/home/neuro/output'
            In the output directory, the data will be stored in a root dir, giving the time and date of processing.
            If a mask is used, the mask will also be included in the output folder name. wholebrain is used otherwise.
        subject_list: list of string, with BIDs-format IDs to identify subjects.
            Use this to drop high movement subjects, even if they are among other files that will be grabbed.
            e.g. inputs.inputspec.subject_list =['sub-001', sub-002']
        con_regressors: dictionary of by-subject regressors for each contrast.
                Names should match full_cons.
                e.g. inputs.inputspec.con_regressors = {
                        '1_instructions_Instructions': {'1_instructions_Instructions': [1] * len(subject_list),
                        'reg2': [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1],
                        'reg3': [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0],
                        }
                    }
        Input [Optional]:
            mask: [default: False] path to mask file. Can have different dimensions from functional data, but should obviously be in the same reference space if anatomical (see jt_util.align_mask).
                e.g. inputs.inputspec.mask_file = '/home/neuro/atlases/FSMAP/stress/realigned_masks/amygdala_bl_flirt.nii.gz'
            sinker_subs: list of tuples, each containing a pair of strings.
                These will be sinker substitutions. They will change filenames in the output folder.
                Usually best to run the pipeline once, before deciding on these.
                e.g. inputs.inputspec.sinker_subs = [('tstat', 'raw_tstat'),
                       ('tfce_corrp_raw_tstat', 'tfce_corrected_p')]
        Output:
            lvl2tfce_wf: workflow to perform second-level modeling, using threshold free cluster estimation (tfce; see https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Randomise/UserGuide)
    '''
    import nipype.pipeline.engine as pe # pypeline engine
    import nipype.interfaces.fsl as fsl
    import os
    from nipype import IdentityInterface
    from nipype.interfaces.utility.wrappers import Function
    ##################  Setup workflow.
    lvl2tfce_wf = pe.Workflow(name='lvl2tfce_wf')

    inputspec = pe.Node(IdentityInterface(
        fields=['proj_name',
                'copes_template',
                'output_dir',
                'mask_file',
                'subject_list',
                'con_regressors',
                'full_cons',
                'sinker_subs',
                'contrast'
                ],
        mandatory_inputs=False),
                 name='inputspec')
    if mask:
        inputspec.inputs.mask_file = mask

    ################## Make output directory.
    def mk_outdir(output_dir, proj_name, mask=False):
        import os
        from time import gmtime, strftime
        time_prefix = strftime("%Y-%m-%d_%Hh-%Mm", gmtime())+'_'
        if mask:
            new_out_dir = os.path.join(output_dir, time_prefix + mask.split('/')[-1].split('.')[0])
        else:
            new_out_dir = os.path.join(output_dir, time_prefix + 'wholebrain', proj_name)
        if not os.path.isdir(new_out_dir):
            os.makedirs(new_out_dir)
        return new_out_dir

    make_outdir = pe.Node(Function(input_names=['output_dir', 'proj_name', 'mask'],
                                   output_names=['new_out_dir'],
                                   function=mk_outdir),
                          name='make_outdir')

    ################## Get contrast
    def get_con(contrast, full_cons, con_regressors):
        con_info = full_cons[contrast]
        reg_info = con_regressors[contrast]
        return con_info, reg_info

    get_model_info = pe.Node(Function(input_names=['contrast', 'full_cons', 'con_regressors'],
                                      output_names=['con_info', 'reg_info'],
                                      function=get_con),
                             name='get_model_info')
    # get_model_info.inputs.full_cons = From inputspec
    # get_model_info.inputs.full_regs = From inputspec
    # get_model_info.inputs.contrast = From inputspec

    ################## Get files
    def get_files(subject_list, copes_template, contrast):
        import glob
        temp_list = []
        for x in glob.glob(copes_template):
            if any(subj in x for subj in subject_list):
                temp_list.append(x)
        out_list = [x for x in temp_list if contrast in x]
        return out_list

    get_copes = pe.Node(Function(
        input_names=['subject_list', 'copes_template', 'contrast'],
        output_names=['out_list'],
        function=get_files),
                        name='get_copes')
    # get_copes.inputs.subject_list = # From inputspec
    # get_copes.inputs.copes_template = # From inputspec.
    # get_copes.inputs.contrast = # From inputspec.

    ################## Merge into 4d files.
    merge_copes = pe.Node(interface=fsl.Merge(dimension='t'),
                    name='merge_copes')
    # merge_copes.inputs.in_files = copes

    ################## Level 2 design.
    level2model = pe.Node(interface=fsl.MultipleRegressDesign(),
                        name='level2model')
    # level2model.inputs.contrasts # from get_con_info
    # level2model.inputs.regressors # from get_con_info

    ################## Fit mask, if given 2 design.
    if mask:
        def fit_mask(mask_file, ref_file):
            from nilearn.image import resample_img
            import nibabel as nib
            import os
            out_file = resample_img(nib.load(mask_file),
                                   target_affine=nib.load(ref_file).affine,
                                   target_shape=nib.load(ref_file).shape[0:3],
                                   interpolation='nearest')
            nib.save(out_file, os.path.join(os.getcwd(), mask_file.split('.nii')[0]+'_fit.nii.gz'))
            out_mask = os.path.join(os.getcwd(), mask_file.split('.nii')[0]+'_fit.nii.gz')
            return out_mask
        fit_mask = pe.Node(Function(
            input_names=['mask_file', 'ref_file'],
            output_names=['out_mask'],
            function=fit_mask),
                            name='fit_mask')

    ################## FSL Randomize.
    randomise = pe.Node(interface=fsl.Randomise(), name = 'randomise')
    # randomise.inputs.in_file = #From merge_copes
    # randomise.inputs.design_mat = # From level2model design_mat
    # randomise.inputs.tcon = # From level2model design_con
    # randomise.inputs.cm_thresh = 2.49 # mass based cluster thresholding. Not used.
    # randomise.mask = # Provided from mask_reslice, if mask provided.
    randomise.inputs.tfce = True
    randomise.inputs.raw_stats_imgs = True
    randomise.inputs.vox_p_values = True
    # randomise.inputs.num_perm = 5000

    def adj_minmax(in_file):
        import nibabel as nib
        import numpy as np
        import os
        img = nib.load(in_file[0])
        data = img.get_data()
        img.header['cal_max'] = np.max(data)
        img.header['cal_min'] = np.min(data)
        nib.save(img, in_file[0])
        return in_file

    ################## Setup datasink.
    from nipype.interfaces.io import DataSink
    import os
    # sinker = pe.Node(DataSink(parameterization=False), name='sinker')
    sinker = pe.Node(DataSink(parameterization=True), name='sinker')

    ################## Setup Pipeline.
    lvl2tfce_wf.connect([
        (inputspec, make_outdir, [('output_dir', 'output_dir'),
                                 ('proj_name', 'proj_name')]),
        (inputspec, get_model_info, [('full_cons', 'full_cons'),
                                    ('con_regressors', 'con_regressors')]),
        (inputspec, get_model_info, [('contrast', 'contrast')]),
        (inputspec, get_copes, [('subject_list', 'subject_list'),
                               ('contrast', 'contrast'),
                               ('copes_template', 'copes_template')]),
        (get_copes, merge_copes, [('out_list', 'in_files')]),
        (get_model_info, level2model, [('con_info', 'contrasts')]),
        (get_model_info, level2model, [('reg_info', 'regressors')]),
        (merge_copes, randomise, [('merged_file', 'in_file')]),
        (level2model, randomise, [('design_mat', 'design_mat')]),
        (level2model, randomise, [('design_con', 'tcon')]),
        ])
    if mask:
        lvl2tfce_wf.connect([
            (inputspec, fit_mask, [('mask_file', 'mask_file')]),
            (merge_copes, fit_mask, [('merged_file', 'ref_file')]),
            (fit_mask, randomise, [('out_mask', 'mask')]),
            (inputspec, make_outdir, [('mask_file', 'mask')]),
            (fit_mask, sinker, [('out_mask', 'out.@mask')]),
            ])

    lvl2tfce_wf.connect([
        (inputspec, sinker, [('sinker_subs', 'substitutions')]),
        (make_outdir, sinker, [('new_out_dir', 'base_directory')]),
        (level2model, sinker, [('design_con', 'out.@con')]),
        (level2model, sinker, [('design_grp', 'out.@grp')]),
        (level2model, sinker, [('design_mat', 'out.@mat')]),
        (randomise, sinker, [(('t_corrected_p_files', adj_minmax), 'out.@t_cor_p')]),
        (randomise, sinker, [(('tstat_files', adj_minmax), 'out.@t_stat')]),
        ])
    return lvl2tfce_wf
Example #12
0
    print (file2)
    !fslmaths {file2} -sub {file1} {'hippRight_ses2-1_' + sub}

# now we can run the same SPM procedure as before - but with the diff images instead of actual ones
#%%
# lets try fsl randomise
import nilearn
func_list = glob.glob('/home/or/kpe_conn/leftAmg_seed_sub-*_z.nii.gz')
func_concat = nilearn.image.concat_imgs(func_list, auto_resample=True) # create a 4d image with subjects as the 4th dimension
# save to file
func_concat.to_filename("leftAmg_seed_concat.nii.gz")

from  nipype.interfaces import fsl
import nipype.pipeline.engine as pe  # pypeline engine

randomize = pe.Node(interface = fsl.Randomise(), base_dir = '/home/or/kpe_conn/fsl',
                    name = 'randomize')
randomize.inputs.in_file = '/home/or/kpe_conn/leftAmg_seed_concat.nii.gz' # choose which file to run permutation test on
#randomize.inputs.mask = '/media/Data/work/KPE_SPM/fslRandomize/group_mask.nii.gz' # group mask file (was created earlier)
randomize.inputs.one_sample_group_mean = True
randomize.inputs.tfce = True
randomize.inputs.vox_p_values = True
randomize.inputs.num_perm = 200

#randomize.inputs.var_smooth = 5

randomize.run()

#show graph
display = plotting.plot_stat_map('/home/or/kpe_conn/fsl/maths/hippRight_ses2-1_008.nii.gz',
                                     vmax=1,
Example #13
0
len(diff_list_con)

#%% Creating concatenated contrast (across subjects) and group mask
copes_concat = nilearn.image.concat_imgs(diff_list_con, auto_resample=True)
copes_concat.to_filename(
    os.path.join(work_dir, "con%s_%s.nii.gz" % (contrast, group)))

group_mask = nilearn.image.resample_to_img(group_mask,
                                           copes_concat,
                                           interpolation='nearest')
group_mask.to_filename(os.path.join(work_dir, "group_mask.nii.gz"))

#%% Running randomization
from nipype.interfaces import fsl
import nipype.pipeline.engine as pe  # pypeline engine
randomize = pe.Node(interface=fsl.Randomise(),
                    base_dir=work_dir,
                    name='randomize')
randomize.inputs.in_file = os.path.join(
    work_dir, "con%s_%s.nii.gz" %
    (contrast, group))  # choose which file to run permutation test on
randomize.inputs.mask = os.path.join(
    work_dir, 'group_mask.nii.gz')  # group mask file (was created earlier)
randomize.inputs.one_sample_group_mean = True
randomize.inputs.tfce = True
#randomize.inputs.vox_p_values = True
randomize.inputs.num_perm = 1000
#randomize.inputs.var_smooth = 5

randomize.run()
Example #14
0
def group_randomise_wf(
    input_dir,
    output_dir,
    subject_list,
    regressors_path,
    contrast_path,
    selected_cope=None,
    roi=None,
    oneSampleT=False,
    analysis_name="oneSampleT_PPI",
):
    """Group level non parametric test work flow

    Parameters
    ----------
    input_dir:
        BIDS derivative
    subject_list:
        subjects entering group level analysis
    roi:
        mask or coordinate (default: whole brain)
    """
    def wf_prep_files():
        prep_files = pe.Workflow(name="prep_files")
        prep_files.base_dir = input_dir + os.sep + "group_level"

        template = {"mask": "sub-{subject}/sub-{subject}.feat/mask.nii.gz"}
        whole_brain_mask = pe.MapNode(
            SelectFiles(templates=template),
            iterfield="subject",
            name="whole_brain_mask",
        )
        whole_brain_mask.inputs.base_directory = input_dir
        whole_brain_mask.inputs.subject = subject_list

        gen_groupmask = pe.Node(
            Function(
                function=_create_group_mask,
                input_names=["brain_masks", "base_dir"],
                output_names=["groupmask_path"],
            ),
            name="gen_groupmask",
        )
        gen_groupmask.inputs.base_dir = input_dir + os.sep + "group_level" + os.sep

        designs = pe.Node(
            Function(
                function=_groupmean_contrast,
                input_names=[
                    "subject_list", "regressors_path", "contrast_path"
                ],
                output_names=["groups", "regressors", "contrasts"],
            ),
            name="designs",
        )
        designs.inputs.subject_list = subject_list
        designs.inputs.regressors_path = regressors_path
        designs.inputs.contrast_path = contrast_path

        model = pe.Node(fsl.MultipleRegressDesign(), name=f"model")

        outputnode = pe.Node(
            interface=niu.IdentityInterface(
                fields=["mask", "regressors", "contrasts"]),
            name="outputnode",
        )

        prep_files.connect([
            (whole_brain_mask, gen_groupmask, [("mask", "brain_masks")]),
            (
                designs,
                model,
                [
                    ("groups", "groups"),
                    ("regressors", "regressors"),
                    ("contrasts", "contrasts"),
                ],
            ),
            (gen_groupmask, outputnode, [("groupmask_path", "mask")]),
            (
                model,
                outputnode,
                [
                    ("design_grp", "group"),
                    ("design_mat", "regressors"),
                    ("design_con", "contrasts"),
                ],
            ),
        ])
        return prep_files

    meta_workflow = pe.Workflow(name=analysis_name)
    meta_workflow.base_dir = input_dir + os.sep + "group_level"
    prep_files = wf_prep_files()
    # now run randomise...
    contrast_names = _cope_names(input_dir, selected_cope)
    for cope_id, contrast in contrast_names:
        node_name = contrast.replace(">", "_wrt_")
        wk = pe.Workflow(name=f"contrast_{node_name}")
        template = {
            "cope_file":
            "sub-{subject}/sub-{subject}.feat/stats/cope{cope}.nii.gz"
        }
        file_grabber = pe.MapNode(
            SelectFiles(template, base_directory=input_dir),
            iterfield="subject",
            name="file_grabber",
        )
        file_grabber.inputs.cope = cope_id
        file_grabber.inputs.subject = subject_list

        concat_copes = pe.Node(
            Function(
                function=_concat_copes,
                input_names=["cope_file", "mm", "output_dir"],
                output_names=["output_dir"],
            ),
            name="concat_copes",
        )
        concat_copes.inputs.mm = 6
        concat_copes.inputs.output_dir = (input_dir + os.sep + "group_level" +
                                          os.sep + f"cope_{node_name}.nii.gz")
        prep_files = wf_prep_files()

        # generate design matrix
        randomise = pe.Node(fsl.Randomise(), name="stats_randomise")
        randomise.inputs.num_perm = 1000
        randomise.inputs.vox_p_values = True
        randomise.inputs.tfce = True

        import pandas as pd

        group_contrast_names = pd.read_csv(contrast_path,
                                           sep="\t",
                                           index_col=0).index
        group_contrast_names = group_contrast_names.tolist()

        # Create DataSink object
        sinker = pe.Node(DataSink(), name=f"sinker_{node_name}")
        sinker.inputs.base_directory = output_dir + os.sep + analysis_name
        t_test_new_name, p_new_name = [], []
        for i, name in enumerate(group_contrast_names):
            t_test_new_name.append(
                (f"randomise_tstat{i + 1}", f"{name}_tstat"))
            p_new_name.append((f"randomise_tfce_corrp_tstat{i + 1}",
                               f"{name}_tfce_corrp_tstat"))
        sinker.inputs.substitutions = t_test_new_name + p_new_name

        # connect the nodes
        wk.connect([
            (file_grabber, concat_copes, [("cope_file", "cope_file")]),
            (concat_copes, randomise, [("output_dir", "in_file")]),
            (
                prep_files,
                randomise,
                [
                    ("outputnode.mask", "mask"),
                    ("outputnode.contrasts", "tcon"),
                    ("outputnode.regressors", "design_mat"),
                ],
            ),
            (
                randomise,
                sinker,
                [
                    ("tstat_files", f"contrast_{node_name}.@tstat_files"),
                    (
                        "t_corrected_p_files",
                        f"contrast_{node_name}.@t_corrected_p_files",
                    ),
                ],
            ),
        ])

        if oneSampleT:
            # one sample T test
            onesampleT_randomise = pe.Node(fsl.Randomise(),
                                           name="onesampleT_randomise")
            onesampleT_randomise.inputs.num_perm = 1000
            onesampleT_randomise.inputs.vox_p_values = True
            onesampleT_randomise.inputs.tfce = True
            onesampleT_randomise.inputs.one_sample_group_mean = True

            # Create DataSink object
            gsinker = pe.Node(DataSink(), name=f"sinker_{node_name}_group")
            gsinker.inputs.base_directory = output_dir + os.sep + analysis_name
            gsinker.inputs.substitutions = [
                ("tstat1", "tstat"),
                ("randomise", "fullsample"),
            ]
            wk.connect([
                (concat_copes, onesampleT_randomise, [("output_dir", "in_file")
                                                      ]),
                (prep_files, onesampleT_randomise, [("outputnode.mask", "mask")
                                                    ]),
                (
                    onesampleT_randomise,
                    gsinker,
                    [
                        ("tstat_files",
                         f"contrast_{node_name}.@group_tstat_files"),
                        (
                            "t_corrected_p_files",
                            f"contrast_{node_name}.@group_t_corrected_p_files",
                        ),
                    ],
                ),
            ])

        meta_workflow.add_nodes([wk])
    return meta_workflow
#copeImages = glob.glob('/media/Data/work/firstLevelKPE/_subject_id_*/feat_fit/run0.feat/stats/cope1.nii.gz')
#copemerge.inputs.in_files = copeImages

# Configure FSL 2nd level analysis
l2_model = pe.Node(fsl.L2Model(), name='l2_model')

flameo_ols = pe.Node(fsl.FLAMEO(run_mode='ols'), name='flameo_ols')


def _len(inlist):
    print(len(inlist))
    return len(inlist)


### use randomize
rand = pe.Node(fsl.Randomise(), name="randomize")

rand.inputs.mask = '/home/oad4/scratch60/kpe_fsl/derivatives/fmriprep/sub-1369/ses-1/func/sub-1369_ses-1_task-Memory_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz'  # group mask file (was created earlier)
rand.inputs.one_sample_group_mean = True
rand.inputs.tfce = True
rand.inputs.vox_p_values = True
rand.inputs.num_perm = 5000
# Thresholding - FDR ################################################
# Calculate pvalues with ztop
fdr_ztop = pe.Node(fsl.ImageMaths(op_string='-ztop', suffix='_pval'),
                   name='fdr_ztop')
# Find FDR threshold: fdr -i zstat1_pval -m <group_mask> -q 0.05
# fdr_th = <write Nipype interface for fdr>
# Apply threshold:
# fslmaths zstat1_pval -mul -1 -add 1 -thr <fdr_th> -mas <group_mask> \
#     zstat1_thresh_vox_fdr_pstat1