コード例 #1
0
ファイル: test_auto_FLAMEO.py プロジェクト: wanderine/nipype
def test_FLAMEO_inputs():
    input_map = dict(
        args=dict(argstr='%s', ),
        burnin=dict(argstr='--burnin=%d', ),
        cope_file=dict(
            argstr='--copefile=%s',
            mandatory=True,
        ),
        cov_split_file=dict(
            argstr='--covsplitfile=%s',
            mandatory=True,
        ),
        design_file=dict(
            argstr='--designfile=%s',
            mandatory=True,
        ),
        dof_var_cope_file=dict(argstr='--dofvarcopefile=%s', ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        f_con_file=dict(argstr='--fcontrastsfile=%s', ),
        fix_mean=dict(argstr='--fixmean', ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        infer_outliers=dict(argstr='--inferoutliers', ),
        log_dir=dict(
            argstr='--ld=%s',
            usedefault=True,
        ),
        mask_file=dict(
            argstr='--maskfile=%s',
            mandatory=True,
        ),
        n_jumps=dict(argstr='--njumps=%d', ),
        no_pe_outputs=dict(argstr='--nopeoutput', ),
        outlier_iter=dict(argstr='--ioni=%d', ),
        output_type=dict(),
        run_mode=dict(
            argstr='--runmode=%s',
            mandatory=True,
        ),
        sample_every=dict(argstr='--sampleevery=%d', ),
        sigma_dofs=dict(argstr='--sigma_dofs=%d', ),
        t_con_file=dict(
            argstr='--tcontrastsfile=%s',
            mandatory=True,
        ),
        terminal_output=dict(nohash=True, ),
        var_cope_file=dict(argstr='--varcopefile=%s', ),
    )
    inputs = FLAMEO.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
コード例 #2
0
ファイル: test_auto_FLAMEO.py プロジェクト: adamatus/nipype
def test_FLAMEO_outputs():
    output_map = dict(mrefvars=dict(),
    tstats=dict(),
    pes=dict(),
    stats_dir=dict(),
    zstats=dict(),
    weights=dict(),
    tdof=dict(),
    var_copes=dict(),
    copes=dict(),
    fstats=dict(),
    zfstats=dict(),
    res4d=dict(),
    )
    outputs = FLAMEO.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
コード例 #3
0
ファイル: test_auto_FLAMEO.py プロジェクト: wanderine/nipype
def test_FLAMEO_outputs():
    output_map = dict(
        copes=dict(),
        fstats=dict(),
        mrefvars=dict(),
        pes=dict(),
        res4d=dict(),
        stats_dir=dict(),
        tdof=dict(),
        tstats=dict(),
        var_copes=dict(),
        weights=dict(),
        zfstats=dict(),
        zstats=dict(),
    )
    outputs = FLAMEO.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
コード例 #4
0
ファイル: rs.py プロジェクト: NBCLab/niconn
def rs_grouplevel(copes, varcopes, dofs, output_dir, work_dir):

    from nipype.interfaces.fsl.model import MultipleRegressDesign
    from nipype.interfaces.utility import Function
    from nipype.interfaces.fsl.model import FLAMEO
    from nipype.interfaces.fsl.model import SmoothEstimate
    from connectivity.interfaces import Cluster
    from nipype.interfaces.fsl.utils import Merge
    from nipype.interfaces.fsl import Info
    from connectivity.interfaces import PtoZ

    grplevelworkflow = pe.Workflow(name="grplevelworkflow")
    grplevelworkflow.base_dir = work_dir

    merger = Merge()
    merger.inputs.dimension = 't'
    merger.inputs.in_files = copes
    merger.inputs.merged_file = op.join(work_dir, 'cope.nii.gz')
    merger.run()

    merger.inputs.in_files = varcopes
    merger.inputs.merged_file = op.join(work_dir, 'varcope.nii.gz')
    merger.run()

    merger.inputs.in_files = dofs
    merger.inputs.merged_file = op.join(work_dir, 'dof.nii.gz')
    merger.run()

    model = pe.Node(interface=MultipleRegressDesign(), name='model')
    model.inputs.contrasts = [['mean', 'T', ['roi'], [1]]]
    model.inputs.regressors = dict(roi=np.ones(len(copes)).tolist())

    flameo = pe.Node(interface=FLAMEO(), name='flameo')
    flameo.inputs.cope_file = op.join(work_dir, 'cope.nii.gz')
    flameo.inputs.var_cope_file = op.join(work_dir, 'varcope.nii.gz')
    flameo.inputs.dof_var_cope_file = op.join(work_dir, 'dof.nii.gz')
    flameo.inputs.run_mode = 'flame1'
    flameo.inputs.mask_file = Info.standard_image(
        'MNI152_T1_2mm_brain_mask.nii.gz')

    grplevelworkflow.connect(model, 'design_con', flameo, 't_con_file')
    grplevelworkflow.connect(model, 'design_grp', flameo, 'cov_split_file')
    grplevelworkflow.connect(model, 'design_mat', flameo, 'design_file')

    smoothest = pe.Node(SmoothEstimate(), name='smooth_estimate')
    grplevelworkflow.connect(flameo, 'zstats', smoothest, 'residual_fit_file')
    smoothest.inputs.mask_file = Info.standard_image(
        'MNI152_T1_2mm_brain_mask.nii.gz')
    smoothest.inputs.dof = len(dofs) - 1

    cluster = pe.Node(Cluster(), name='cluster')
    ptoz = pe.Node(PtoZ(), name='ptoz')
    ptoz.inputs.pvalue = 0.001
    calculate_resels = pe.Node(Function(input_names=["volume", "resels"],
                                        output_names=["resels"],
                                        function=calcres),
                               name="calcres")
    grplevelworkflow.connect(smoothest, 'resels', cluster, 'resels')
    grplevelworkflow.connect(smoothest, 'resels', calculate_resels, 'resels')
    grplevelworkflow.connect(smoothest, 'volume', calculate_resels, 'volume')
    grplevelworkflow.connect(calculate_resels, 'resels', ptoz, 'resels')
    grplevelworkflow.connect(ptoz, 'zstat', cluster, 'threshold')
    cluster.inputs.connectivity = 26
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_index_file = True
    cluster.inputs.out_localmax_txt_file = True
    cluster.inputs.voxthresh = True

    grplevelworkflow.connect(flameo, 'zstats', cluster, 'in_file')

    datasink = pe.Node(nio.DataSink(), name='sinker')
    datasink.inputs.base_directory = work_dir

    grplevelworkflow.connect(flameo, 'zstats', datasink, 'z')
    grplevelworkflow.connect(cluster, 'threshold_file', datasink, 'z_thresh')

    grplevelworkflow.run()

    shutil.rmtree(op.join(work_dir, 'grplevelworkflow'))
    #copy data to directory
    shutil.copyfile(op.join(work_dir, 'z', 'zstat1.nii.gz'),
                    op.join(output_dir, 'z.nii.gz'))
    shutil.copyfile(op.join(work_dir, 'z_thresh', 'zstat1_threshold.nii.gz'),
                    op.join(output_dir, 'z_level-voxel_corr-FWE.nii.gz'))

    shutil.rmtree(work_dir)
コード例 #5
0
def secondlevel_wf(subject_id, sink_directory, name='GLM1_scndlvl_wf'):
    scndlvl_wf = Workflow(name='scndlvl_wf')
    base_dir = os.path.abspath('/home/data/madlab/data/mri/wmaze/')

    contrasts = [
        'all_before_B_corr', 'all_before_B_incorr', 'all_remaining',
        'all_corr_minus_all_incorr', 'all_incorr_minus_all_corr'
    ]

    cnt_file_list = []
    for curr_contrast in contrasts:
        cnt_file_list.append(
            glob(
                os.path.join(
                    base_dir,
                    'frstlvl/model_GLM1/{0}/modelfit/contrasts/_estimate_model*/cope??_{1}.nii.gz'
                    .format(subject_id, curr_contrast))))

    dof_runs = [[], [], [], [], []]
    for i, curr_file_list in enumerate(cnt_file_list):
        if not isinstance(curr_file_list, list):
            curr_file_list = [curr_file_list]
        for curr_file in curr_file_list:
            dof_runs[i].append(
                curr_file.split('/')[-2][-1])  #grabs the estimate_model #

    info = dict(copes=[['subject_id', contrasts]],
                varcopes=[['subject_id', contrasts]],
                mask_file=[['subject_id', 'aparc+aseg_thresh']],
                dof_files=[['subject_id', dof_runs, 'dof']])

    #datasource node to get task_mri and motion-noise files
    datasource = Node(DataGrabber(infields=['subject_id'],
                                  outfields=info.keys()),
                      name='datasource')
    datasource.inputs.template = '*'
    datasource.inputs.subject_id = subject_id
    datasource.inputs.base_directory = os.path.abspath(
        '/home/data/madlab/data/mri/wmaze/')
    datasource.inputs.field_template = dict(
        copes=
        'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/cope*_%s.nii.gz',
        varcopes=
        'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/varcope*_%s.nii.gz',
        mask_file='preproc/%s/ref/_fs_threshold20/%s*_thresh.nii',
        dof_files='frstlvl/model_GLM1/%s/modelfit/dofs/_estimate_model%s/%s')
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True
    datasource.inputs.ignore_exception = False
    datasource.inputs.raise_on_empty = True

    #inputspec to deal with copes and varcopes doublelist issues
    fixedfx_inputspec = Node(IdentityInterface(
        fields=['copes', 'varcopes', 'dof_files'], mandatory_inputs=True),
                             name='fixedfx_inputspec')
    scndlvl_wf.connect(datasource, ('copes', doublelist), fixedfx_inputspec,
                       'copes')
    scndlvl_wf.connect(datasource, ('varcopes', doublelist), fixedfx_inputspec,
                       'varcopes')
    scndlvl_wf.connect(datasource, ('dof_files', doublelist),
                       fixedfx_inputspec, 'dof_files')

    #merge all of copes into a single matrix across subject runs
    copemerge = MapNode(Merge(), iterfield=['in_files'], name='copemerge')
    copemerge.inputs.dimension = 't'
    copemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    copemerge.inputs.ignore_exception = False
    copemerge.inputs.output_type = 'NIFTI_GZ'
    copemerge.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(fixedfx_inputspec, 'copes', copemerge, 'in_files')

    #generate DOF volume for second level
    gendofvolume = Node(Function(input_names=['dof_files', 'cope_files'],
                                 output_names=['dof_volumes'],
                                 function=get_dofvolumes),
                        name='gendofvolume')
    gendofvolume.inputs.ignore_exception = False
    scndlvl_wf.connect(fixedfx_inputspec, 'dof_files', gendofvolume,
                       'dof_files')
    scndlvl_wf.connect(copemerge, 'merged_file', gendofvolume, 'cope_files')

    #merge all of the varcopes into a single matrix across subject runs per voxel
    varcopemerge = MapNode(Merge(),
                           iterfield=['in_files'],
                           name='varcopemerge')
    varcopemerge.inputs.dimension = 't'
    varcopemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    varcopemerge.inputs.ignore_exception = False
    varcopemerge.inputs.output_type = 'NIFTI_GZ'
    varcopemerge.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(fixedfx_inputspec, 'varcopes', varcopemerge, 'in_files')

    #define contrasts from the names of the copes
    getcontrasts = Node(Function(input_names=['data_inputs'],
                                 output_names=['contrasts'],
                                 function=get_contrasts),
                        name='getcontrasts')
    getcontrasts.inputs.ignore_exception = False
    scndlvl_wf.connect(datasource, ('copes', doublelist), getcontrasts,
                       'data_inputs')

    #rename output files to be more descriptive
    getsubs = Node(Function(input_names=['subject_id', 'cons'],
                            output_names=['subs'],
                            function=get_subs),
                   name='getsubs')
    getsubs.inputs.ignore_exception = False
    getsubs.inputs.subject_id = subject_id
    scndlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons')

    #l2model node for fixed effects analysis (aka within subj across runs)
    l2model = MapNode(L2Model(), iterfield=['num_copes'], name='l2model')
    l2model.inputs.ignore_exception = False
    scndlvl_wf.connect(datasource, ('copes', num_copes), l2model, 'num_copes')

    #FLAMEO Node to run the fixed effects analysis
    flameo_fe = MapNode(FLAMEO(),
                        iterfield=[
                            'cope_file', 'var_cope_file', 'dof_var_cope_file',
                            'design_file', 't_con_file', 'cov_split_file'
                        ],
                        name='flameo_fe')
    flameo_fe.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    flameo_fe.inputs.ignore_exception = False
    flameo_fe.inputs.log_dir = 'stats'
    flameo_fe.inputs.output_type = 'NIFTI_GZ'
    flameo_fe.inputs.run_mode = 'fe'
    flameo_fe.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(varcopemerge, 'merged_file', flameo_fe, 'var_cope_file')
    scndlvl_wf.connect(l2model, 'design_mat', flameo_fe, 'design_file')
    scndlvl_wf.connect(l2model, 'design_con', flameo_fe, 't_con_file')
    scndlvl_wf.connect(l2model, 'design_grp', flameo_fe, 'cov_split_file')
    scndlvl_wf.connect(gendofvolume, 'dof_volumes', flameo_fe,
                       'dof_var_cope_file')
    scndlvl_wf.connect(datasource, 'mask_file', flameo_fe, 'mask_file')
    scndlvl_wf.connect(copemerge, 'merged_file', flameo_fe, 'cope_file')

    #outputspec node
    scndlvl_outputspec = Node(IdentityInterface(
        fields=['res4d', 'copes', 'varcopes', 'zstats', 'tstats'],
        mandatory_inputs=True),
                              name='scndlvl_outputspec')
    scndlvl_wf.connect(flameo_fe, 'res4d', scndlvl_outputspec, 'res4d')
    scndlvl_wf.connect(flameo_fe, 'copes', scndlvl_outputspec, 'copes')
    scndlvl_wf.connect(flameo_fe, 'var_copes', scndlvl_outputspec, 'varcopes')
    scndlvl_wf.connect(flameo_fe, 'zstats', scndlvl_outputspec, 'zstats')
    scndlvl_wf.connect(flameo_fe, 'tstats', scndlvl_outputspec, 'tstats')

    #datasink node
    sinkd = Node(DataSink(), name='sinkd')
    sinkd.inputs.base_directory = sink_directory
    sinkd.inputs.container = subject_id
    scndlvl_wf.connect(scndlvl_outputspec, 'copes', sinkd, 'fixedfx.@copes')
    scndlvl_wf.connect(scndlvl_outputspec, 'varcopes', sinkd,
                       'fixedfx.@varcopes')
    scndlvl_wf.connect(scndlvl_outputspec, 'tstats', sinkd, 'fixedfx.@tstats')
    scndlvl_wf.connect(scndlvl_outputspec, 'zstats', sinkd, 'fixedfx.@zstats')
    scndlvl_wf.connect(scndlvl_outputspec, 'res4d', sinkd, 'fixedfx.@pvals')
    scndlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions')

    return scndlvl_wf
コード例 #6
0
    merger = Merge()
    merger.inputs.dimensions = 't'
    merger.inputs.in_files = copes
    merger.inputs.merged_file = op.join(work_dir, 'cope.nii.gz')
    merger.run()

    merger.inputs.in_files = varcopes
    merger.inputs.merged_file = op.join(work_dir, 'varcope.nii.gz')
    merger.run()

    model = MultipleRegressDesign()
    model.inputs.contrasts = [['mean', 'T', ['roi'], [1]]]
    model.intputs.regressors = dict(roi=np.ones(len(copes)))

    flameo = pe.Node(interface=FLAMEO(), name='flameo')
    flameo.inputs.cope_file = op.join(work_dir, 'cope.nii.gz')
    flameo.inputs.var_cope_file = op.join(work_dir, 'varcope.nii.gz')
    flameo.inputs.run_mode = 'flame1'
    flameo.inputs.mask = standard_image('MNI152_T1_2mm_brain_mask.nii.gz')

    grplevelworkflow.connect(model, 'design_con', flameo, 'inputs.t_con_file')
    grplevelworkflow.connect(model, 'design_grp', flameo, 'inputs.cov_split_file')
    grplevelworkflow.connect(model, 'design_mat', flameo, 'inputs.design_file')

    smoothest = Node(SmoothEstimate(), name='smooth_estimate')
    grplevelworkflow.connect(flameo, 'zstats', smoothest, 'zstat_file')
    smoothest.inputs.mask_file = mask_file

    cluster = Node(Cluster(), name='cluster')
    grplevelworkflow.connect(smoothest, 'resels', cluster, 'resels')