def sort_pes(pes):
    from nipype import config, logging
    from nipype.interfaces.fsl import Merge
    from os.path import abspath
    config.enable_debug_mode()
    logging.update_logging(config)

    print(pes)
    pe1s = []
    pe0s = []
    for file in pes:
        if 'pe0' in file:
            pe0s.append(file)
        elif 'pe1' in file:
            pe1s.append(file)

    pe1s = sorted(pe1s)
    pe0s = sorted(pe0s)

    me = Merge()
    merged_pes = []

    for i in range(0,len(pe1s)):
        num=pe1s[i][-12:-11]
        me.inputs.in_files = [pe1s[i],pe0s[i]]
        me.inputs.dimension='t'
        me.inputs.merged_file = 'merged_pes%s.nii.gz' % num
        me.run()
        file = abspath('merged_pes%s.nii.gz' % num)
        merged_pes.append(file)

    return(merged_pes)
Example #2
0
def test_Merge_inputs():
    input_map = dict(args=dict(argstr='%s',
    ),
    dimension=dict(argstr='-%s',
    mandatory=True,
    position=0,
    ),
    environ=dict(nohash=True,
    usedefault=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_files=dict(argstr='%s',
    mandatory=True,
    position=2,
    ),
    merged_file=dict(argstr='%s',
    hash_files=False,
    name_source='in_files',
    name_template='%s_merged',
    position=1,
    ),
    output_type=dict(),
    terminal_output=dict(nohash=True,
    ),
    tr=dict(argstr='%.2f',
    position=-1,
    ),
    )
    inputs = Merge.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Example #3
0
def test_Merge_outputs():
    output_map = dict(merged_file=dict(), )
    outputs = Merge.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Example #4
0
def mni_tmplt(db_path, img_list):
    merger = pe.Node(Merge(), name='merger')
    # merger = Merge()
    # merger.inputs.merged_file = os.path.join(db_path, 'extras', 'merged.nii')
    merger.inputs.in_files = img_list
    merger.inputs.dimension = 't'
    merger.inputs.output_type = 'NIFTI'
    # merger.run()
    mean = pe.Node(MeanImage(), name='mean')
    mean.inputs.output_type = 'NIFTI'
    sm = pe.Node(Smooth(), name='sm')
    sm.inputs.fwhm = 8
    # sm.inputs.output_type = 'NIFTI'
    mean.inputs.out_file = os.path.join(db_path, 'extra', 'mean.nii')

    ppln = pe.Workflow(name='ppln')
    ppln.connect([
        (merger, mean, [('merged_file', 'in_file')]),
        (mean, sm, [('out_file', 'in_files')]),
    ])
    ppln.run()

    img = nib.load(os.path.join(db_path, 'extra', 'mean.nii'))
    scld_vox = (img.get_data() / img.get_data().max())
    new_img = nib.Nifti1Image(scld_vox, img.affine, img.header)
    nib.save(new_img, os.path.join(db_path, 'extra', 'st_sp_tmpl.nii'))
Example #5
0
def test_Merge_outputs():
    output_map = dict(merged_file=dict(),
    )
    outputs = Merge.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Example #6
0
def test_Merge_inputs():
    input_map = dict(
        args=dict(argstr='%s', ),
        dimension=dict(
            argstr='-%s',
            mandatory=True,
            position=0,
        ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_files=dict(
            argstr='%s',
            mandatory=True,
            position=2,
        ),
        merged_file=dict(
            argstr='%s',
            hash_files=False,
            name_source='in_files',
            name_template='%s_merged',
            position=1,
        ),
        output_type=dict(),
        terminal_output=dict(
            mandatory=True,
            nohash=True,
        ),
        tr=dict(
            argstr='%.2f',
            position=-1,
        ),
    )
    inputs = Merge.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Example #7
0
File: rs.py Project: NBCLab/niconn
def rs_grouplevel(copes, varcopes, dofs, output_dir, work_dir):

    from nipype.interfaces.fsl.model import MultipleRegressDesign
    from nipype.interfaces.utility import Function
    from nipype.interfaces.fsl.model import FLAMEO
    from nipype.interfaces.fsl.model import SmoothEstimate
    from connectivity.interfaces import Cluster
    from nipype.interfaces.fsl.utils import Merge
    from nipype.interfaces.fsl import Info
    from connectivity.interfaces import PtoZ

    grplevelworkflow = pe.Workflow(name="grplevelworkflow")
    grplevelworkflow.base_dir = work_dir

    merger = Merge()
    merger.inputs.dimension = 't'
    merger.inputs.in_files = copes
    merger.inputs.merged_file = op.join(work_dir, 'cope.nii.gz')
    merger.run()

    merger.inputs.in_files = varcopes
    merger.inputs.merged_file = op.join(work_dir, 'varcope.nii.gz')
    merger.run()

    merger.inputs.in_files = dofs
    merger.inputs.merged_file = op.join(work_dir, 'dof.nii.gz')
    merger.run()

    model = pe.Node(interface=MultipleRegressDesign(), name='model')
    model.inputs.contrasts = [['mean', 'T', ['roi'], [1]]]
    model.inputs.regressors = dict(roi=np.ones(len(copes)).tolist())

    flameo = pe.Node(interface=FLAMEO(), name='flameo')
    flameo.inputs.cope_file = op.join(work_dir, 'cope.nii.gz')
    flameo.inputs.var_cope_file = op.join(work_dir, 'varcope.nii.gz')
    flameo.inputs.dof_var_cope_file = op.join(work_dir, 'dof.nii.gz')
    flameo.inputs.run_mode = 'flame1'
    flameo.inputs.mask_file = Info.standard_image(
        'MNI152_T1_2mm_brain_mask.nii.gz')

    grplevelworkflow.connect(model, 'design_con', flameo, 't_con_file')
    grplevelworkflow.connect(model, 'design_grp', flameo, 'cov_split_file')
    grplevelworkflow.connect(model, 'design_mat', flameo, 'design_file')

    smoothest = pe.Node(SmoothEstimate(), name='smooth_estimate')
    grplevelworkflow.connect(flameo, 'zstats', smoothest, 'residual_fit_file')
    smoothest.inputs.mask_file = Info.standard_image(
        'MNI152_T1_2mm_brain_mask.nii.gz')
    smoothest.inputs.dof = len(dofs) - 1

    cluster = pe.Node(Cluster(), name='cluster')
    ptoz = pe.Node(PtoZ(), name='ptoz')
    ptoz.inputs.pvalue = 0.001
    calculate_resels = pe.Node(Function(input_names=["volume", "resels"],
                                        output_names=["resels"],
                                        function=calcres),
                               name="calcres")
    grplevelworkflow.connect(smoothest, 'resels', cluster, 'resels')
    grplevelworkflow.connect(smoothest, 'resels', calculate_resels, 'resels')
    grplevelworkflow.connect(smoothest, 'volume', calculate_resels, 'volume')
    grplevelworkflow.connect(calculate_resels, 'resels', ptoz, 'resels')
    grplevelworkflow.connect(ptoz, 'zstat', cluster, 'threshold')
    cluster.inputs.connectivity = 26
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_index_file = True
    cluster.inputs.out_localmax_txt_file = True
    cluster.inputs.voxthresh = True

    grplevelworkflow.connect(flameo, 'zstats', cluster, 'in_file')

    datasink = pe.Node(nio.DataSink(), name='sinker')
    datasink.inputs.base_directory = work_dir

    grplevelworkflow.connect(flameo, 'zstats', datasink, 'z')
    grplevelworkflow.connect(cluster, 'threshold_file', datasink, 'z_thresh')

    grplevelworkflow.run()

    shutil.rmtree(op.join(work_dir, 'grplevelworkflow'))
    #copy data to directory
    shutil.copyfile(op.join(work_dir, 'z', 'zstat1.nii.gz'),
                    op.join(output_dir, 'z.nii.gz'))
    shutil.copyfile(op.join(work_dir, 'z_thresh', 'zstat1_threshold.nii.gz'),
                    op.join(output_dir, 'z_level-voxel_corr-FWE.nii.gz'))

    shutil.rmtree(work_dir)
Example #8
0
def secondlevel_wf(subject_id, sink_directory, name='GLM1_scndlvl_wf'):
    scndlvl_wf = Workflow(name='scndlvl_wf')
    base_dir = os.path.abspath('/home/data/madlab/data/mri/wmaze/')

    contrasts = [
        'all_before_B_corr', 'all_before_B_incorr', 'all_remaining',
        'all_corr_minus_all_incorr', 'all_incorr_minus_all_corr'
    ]

    cnt_file_list = []
    for curr_contrast in contrasts:
        cnt_file_list.append(
            glob(
                os.path.join(
                    base_dir,
                    'frstlvl/model_GLM1/{0}/modelfit/contrasts/_estimate_model*/cope??_{1}.nii.gz'
                    .format(subject_id, curr_contrast))))

    dof_runs = [[], [], [], [], []]
    for i, curr_file_list in enumerate(cnt_file_list):
        if not isinstance(curr_file_list, list):
            curr_file_list = [curr_file_list]
        for curr_file in curr_file_list:
            dof_runs[i].append(
                curr_file.split('/')[-2][-1])  #grabs the estimate_model #

    info = dict(copes=[['subject_id', contrasts]],
                varcopes=[['subject_id', contrasts]],
                mask_file=[['subject_id', 'aparc+aseg_thresh']],
                dof_files=[['subject_id', dof_runs, 'dof']])

    #datasource node to get task_mri and motion-noise files
    datasource = Node(DataGrabber(infields=['subject_id'],
                                  outfields=info.keys()),
                      name='datasource')
    datasource.inputs.template = '*'
    datasource.inputs.subject_id = subject_id
    datasource.inputs.base_directory = os.path.abspath(
        '/home/data/madlab/data/mri/wmaze/')
    datasource.inputs.field_template = dict(
        copes=
        'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/cope*_%s.nii.gz',
        varcopes=
        'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/varcope*_%s.nii.gz',
        mask_file='preproc/%s/ref/_fs_threshold20/%s*_thresh.nii',
        dof_files='frstlvl/model_GLM1/%s/modelfit/dofs/_estimate_model%s/%s')
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True
    datasource.inputs.ignore_exception = False
    datasource.inputs.raise_on_empty = True

    #inputspec to deal with copes and varcopes doublelist issues
    fixedfx_inputspec = Node(IdentityInterface(
        fields=['copes', 'varcopes', 'dof_files'], mandatory_inputs=True),
                             name='fixedfx_inputspec')
    scndlvl_wf.connect(datasource, ('copes', doublelist), fixedfx_inputspec,
                       'copes')
    scndlvl_wf.connect(datasource, ('varcopes', doublelist), fixedfx_inputspec,
                       'varcopes')
    scndlvl_wf.connect(datasource, ('dof_files', doublelist),
                       fixedfx_inputspec, 'dof_files')

    #merge all of copes into a single matrix across subject runs
    copemerge = MapNode(Merge(), iterfield=['in_files'], name='copemerge')
    copemerge.inputs.dimension = 't'
    copemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    copemerge.inputs.ignore_exception = False
    copemerge.inputs.output_type = 'NIFTI_GZ'
    copemerge.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(fixedfx_inputspec, 'copes', copemerge, 'in_files')

    #generate DOF volume for second level
    gendofvolume = Node(Function(input_names=['dof_files', 'cope_files'],
                                 output_names=['dof_volumes'],
                                 function=get_dofvolumes),
                        name='gendofvolume')
    gendofvolume.inputs.ignore_exception = False
    scndlvl_wf.connect(fixedfx_inputspec, 'dof_files', gendofvolume,
                       'dof_files')
    scndlvl_wf.connect(copemerge, 'merged_file', gendofvolume, 'cope_files')

    #merge all of the varcopes into a single matrix across subject runs per voxel
    varcopemerge = MapNode(Merge(),
                           iterfield=['in_files'],
                           name='varcopemerge')
    varcopemerge.inputs.dimension = 't'
    varcopemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    varcopemerge.inputs.ignore_exception = False
    varcopemerge.inputs.output_type = 'NIFTI_GZ'
    varcopemerge.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(fixedfx_inputspec, 'varcopes', varcopemerge, 'in_files')

    #define contrasts from the names of the copes
    getcontrasts = Node(Function(input_names=['data_inputs'],
                                 output_names=['contrasts'],
                                 function=get_contrasts),
                        name='getcontrasts')
    getcontrasts.inputs.ignore_exception = False
    scndlvl_wf.connect(datasource, ('copes', doublelist), getcontrasts,
                       'data_inputs')

    #rename output files to be more descriptive
    getsubs = Node(Function(input_names=['subject_id', 'cons'],
                            output_names=['subs'],
                            function=get_subs),
                   name='getsubs')
    getsubs.inputs.ignore_exception = False
    getsubs.inputs.subject_id = subject_id
    scndlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons')

    #l2model node for fixed effects analysis (aka within subj across runs)
    l2model = MapNode(L2Model(), iterfield=['num_copes'], name='l2model')
    l2model.inputs.ignore_exception = False
    scndlvl_wf.connect(datasource, ('copes', num_copes), l2model, 'num_copes')

    #FLAMEO Node to run the fixed effects analysis
    flameo_fe = MapNode(FLAMEO(),
                        iterfield=[
                            'cope_file', 'var_cope_file', 'dof_var_cope_file',
                            'design_file', 't_con_file', 'cov_split_file'
                        ],
                        name='flameo_fe')
    flameo_fe.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    flameo_fe.inputs.ignore_exception = False
    flameo_fe.inputs.log_dir = 'stats'
    flameo_fe.inputs.output_type = 'NIFTI_GZ'
    flameo_fe.inputs.run_mode = 'fe'
    flameo_fe.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(varcopemerge, 'merged_file', flameo_fe, 'var_cope_file')
    scndlvl_wf.connect(l2model, 'design_mat', flameo_fe, 'design_file')
    scndlvl_wf.connect(l2model, 'design_con', flameo_fe, 't_con_file')
    scndlvl_wf.connect(l2model, 'design_grp', flameo_fe, 'cov_split_file')
    scndlvl_wf.connect(gendofvolume, 'dof_volumes', flameo_fe,
                       'dof_var_cope_file')
    scndlvl_wf.connect(datasource, 'mask_file', flameo_fe, 'mask_file')
    scndlvl_wf.connect(copemerge, 'merged_file', flameo_fe, 'cope_file')

    #outputspec node
    scndlvl_outputspec = Node(IdentityInterface(
        fields=['res4d', 'copes', 'varcopes', 'zstats', 'tstats'],
        mandatory_inputs=True),
                              name='scndlvl_outputspec')
    scndlvl_wf.connect(flameo_fe, 'res4d', scndlvl_outputspec, 'res4d')
    scndlvl_wf.connect(flameo_fe, 'copes', scndlvl_outputspec, 'copes')
    scndlvl_wf.connect(flameo_fe, 'var_copes', scndlvl_outputspec, 'varcopes')
    scndlvl_wf.connect(flameo_fe, 'zstats', scndlvl_outputspec, 'zstats')
    scndlvl_wf.connect(flameo_fe, 'tstats', scndlvl_outputspec, 'tstats')

    #datasink node
    sinkd = Node(DataSink(), name='sinkd')
    sinkd.inputs.base_directory = sink_directory
    sinkd.inputs.container = subject_id
    scndlvl_wf.connect(scndlvl_outputspec, 'copes', sinkd, 'fixedfx.@copes')
    scndlvl_wf.connect(scndlvl_outputspec, 'varcopes', sinkd,
                       'fixedfx.@varcopes')
    scndlvl_wf.connect(scndlvl_outputspec, 'tstats', sinkd, 'fixedfx.@tstats')
    scndlvl_wf.connect(scndlvl_outputspec, 'zstats', sinkd, 'fixedfx.@zstats')
    scndlvl_wf.connect(scndlvl_outputspec, 'res4d', sinkd, 'fixedfx.@pvals')
    scndlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions')

    return scndlvl_wf
Example #9
0
datagrabber = Node(DataGrabber(infields=['roi'],
                               outfields=['roi'],
                               sort_filelist=True,
                               base_directory=output_dir,
                               template='glm_seed_copes/%s_*/cope.nii',
                               field_template=templates,
                               template_args=dict(roi=[['roi']])),
                   name='datagrabber')

# In[ ]:

## Level 2

# merge param estimates across all subjects per seed
merge = Node(Merge(dimension='t'), name='merge')

# FSL randomise for higher level analysis
highermodel = Node(Randomise(tfce=True,
                             raw_stats_imgs=True,
                             design_mat=group_mat,
                             tcon=group_con),
                   name='highermodel')

## Cluster results

# make binary masks of sig clusters
binarize = Node(Binarize(min=0.95, max=1.0),
                name='binarize',
                iterfield='in_file')
Example #10
0
    conditions = conditions.append(temp, ignore_index=True)

#conditions.to_csv(output_dir + '/featureset_key.csv')
conditions.describe()

# In[3]:

## Concatenate all the parameter estimates from preproc to create a feature set
from glob import glob
from nipype.interfaces.fsl.utils import Merge
files = glob(preproc_dir + '/*/betas.nii.gz')
files = sorted(files)

bold_feature_data = output_dir + '/featureset.nii.gz'

merge = Merge()
merge.inputs.in_files = files
merge.inputs.dimension = 't'
merge.inputs.merged_file = bold_feature_data
#merge.run()

# In[23]:

# determine which analysis to run
for analysis in [
        'all_conditions', 'allConds_predAge', 'negative', 'positive', 'neutral'
]:

    if analysis == 'all_conditions':
        mask = conditions['labels'].isin(['negative', 'positive', 'neutral'])
        labels = conditions['labels']
Example #11
0
def rs_grouplevel(copes, varcopes, output_dir, work_dir):

    from nipype.interfaces.fsl.model import MultipleRegressDesign
    from nipype.interfaces.fsl.model import FLAMEO
    from nipype.interfaces.fsl.model import SmoothEstimate
    from interfaces import Cluster
    from nipype.interfaces.fsl.utils import Merge
    from nipype.interfaces.fsl.Info import standard_image
    from interfaces import PtoZ

    def calcres(smoothest_input)
        resels = int(smoothest_input[0]/smoothest_input[1])

    grplevelworkflow = pe.Workflow(name="grplevelworkflow")

    merger = Merge()
    merger.inputs.dimensions = 't'
    merger.inputs.in_files = copes
    merger.inputs.merged_file = op.join(work_dir, 'cope.nii.gz')
    merger.run()

    merger.inputs.in_files = varcopes
    merger.inputs.merged_file = op.join(work_dir, 'varcope.nii.gz')
    merger.run()

    model = MultipleRegressDesign()
    model.inputs.contrasts = [['mean', 'T', ['roi'], [1]]]
    model.intputs.regressors = dict(roi=np.ones(len(copes)))

    flameo = pe.Node(interface=FLAMEO(), name='flameo')
    flameo.inputs.cope_file = op.join(work_dir, 'cope.nii.gz')