示例#1
0
def test_modelgen_sparse():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename2)
    s = SpecifySparseModel()
    s.inputs.input_units = 'secs'
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    info = [Bunch(conditions=['cond1'], onsets=[[0, 50, 100, 180]], durations=[[2]]),
            Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]])]
    s.inputs.subject_info = info
    s.inputs.volumes_in_cluster = 1
    s.inputs.time_acquisition = 2
    s.inputs.high_pass_filter_cutoff = np.inf
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 2
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 1
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 0
    s.inputs.stimuli_as_impulses = False
    res = s.run()
    yield assert_equal, res.outputs.session_info[0]['regress'][0]['val'][0], 1.0
    s.inputs.model_hrf = True
    res = s.run()
    yield assert_almost_equal, res.outputs.session_info[0]['regress'][0]['val'][0], 0.016675298129743384
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 1
    s.inputs.use_temporal_deriv = True
    res = s.run()
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 2
    yield assert_almost_equal, res.outputs.session_info[0]['regress'][0]['val'][0], 0.016675298129743384
    yield assert_almost_equal, res.outputs.session_info[1]['regress'][1]['val'][5], 0.007671459162258378
    rmtree(tempdir)
def test_SpecifySparseModel_inputs():
    input_map = dict(
        event_files=dict(mandatory=True, xor=["subject_info", "event_files"]),
        functional_runs=dict(copyfile=False, mandatory=True),
        high_pass_filter_cutoff=dict(mandatory=True),
        ignore_exception=dict(nohash=True, usedefault=True),
        input_units=dict(mandatory=True),
        model_hrf=dict(),
        outlier_files=dict(copyfile=False),
        realignment_parameters=dict(copyfile=False),
        save_plot=dict(),
        scale_regressors=dict(usedefault=True),
        scan_onset=dict(usedefault=True),
        stimuli_as_impulses=dict(usedefault=True),
        subject_info=dict(mandatory=True, xor=["subject_info", "event_files"]),
        time_acquisition=dict(mandatory=True),
        time_repetition=dict(mandatory=True),
        use_temporal_deriv=dict(requires=["model_hrf"]),
        volumes_in_cluster=dict(usedefault=True),
    )
    inputs = SpecifySparseModel.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_SpecifySparseModel_outputs():
    output_map = dict(session_info=dict(), sparse_png_file=dict(), sparse_svg_file=dict())
    outputs = SpecifySparseModel.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
示例#4
0
def test_SpecifySparseModel_outputs():
    output_map = dict(
        session_info=dict(),
        sparse_png_file=dict(),
        sparse_svg_file=dict(),
    )
    outputs = SpecifySparseModel.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
示例#5
0
def test_modelgen_sparse(tmpdir):
    tempdir = str(tmpdir)
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename2)
    s = SpecifySparseModel()
    s.inputs.input_units = 'secs'
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    info = [Bunch(conditions=['cond1'], onsets=[[0, 50, 100, 180]], durations=[[2]]),
            Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]])]
    s.inputs.subject_info = info
    s.inputs.volumes_in_cluster = 1
    s.inputs.time_acquisition = 2
    s.inputs.high_pass_filter_cutoff = np.inf
    res = s.run()
    assert len(res.outputs.session_info) == 2
    assert len(res.outputs.session_info[0]['regress']) == 1
    assert len(res.outputs.session_info[0]['cond']) == 0

    s.inputs.stimuli_as_impulses = False
    res = s.run()
    assert res.outputs.session_info[0]['regress'][0]['val'][0] == 1.0

    s.inputs.model_hrf = True
    res = s.run()
    npt.assert_almost_equal(res.outputs.session_info[0]['regress'][0]['val'][0], 0.016675298129743384)
    assert len(res.outputs.session_info[0]['regress']) == 1
    s.inputs.use_temporal_deriv = True
    res = s.run()

    assert len(res.outputs.session_info[0]['regress']) == 2
    npt.assert_almost_equal(res.outputs.session_info[0]['regress'][0]['val'][0], 0.016675298129743384)
    npt.assert_almost_equal(res.outputs.session_info[1]['regress'][1]['val'][5], 0.007671459162258378)
示例#6
0
def test_SpecifySparseModel_inputs():
    input_map = dict(
        event_files=dict(
            mandatory=True,
            xor=['subject_info'],
        ),
        functional_runs=dict(
            copyfile=False,
            mandatory=True,
        ),
        high_pass_filter_cutoff=dict(mandatory=True, ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        input_units=dict(mandatory=True, ),
        model_hrf=dict(),
        outlier_files=dict(copyfile=False, ),
        realignment_parameters=dict(copyfile=False, ),
        save_plot=dict(),
        scale_regressors=dict(usedefault=True, ),
        scan_onset=dict(usedefault=True, ),
        stimuli_as_impulses=dict(usedefault=True, ),
        subject_info=dict(
            mandatory=True,
            xor=['event_files'],
        ),
        time_acquisition=dict(mandatory=True, ),
        time_repetition=dict(mandatory=True, ),
        use_temporal_deriv=dict(requires=['model_hrf'], ),
        volumes_in_cluster=dict(usedefault=True, ),
    )
    inputs = SpecifySparseModel.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
def combine_wkflw(c, prep_c=foo, name='work_dir'):
    import nipype.interfaces.utility as util  # utility
    import nipype.pipeline.engine as pe  # pypeline engine
    import nipype.interfaces.io as nio  # input/output
    from nipype.algorithms.modelgen import SpecifyModel, SpecifySparseModel
    import numpy as np
    modelflow = pe.Workflow(name=name)
    modelflow.base_dir = os.path.join(c.working_dir)

    preproc = c.datagrabber.create_dataflow()  #preproc_datagrabber(prep_c)

    #infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
    #                     name='subject_names')

    #if c.test_mode:
    #    infosource.iterables = ('subject_id', [c.subjects[0]])
    #else:
    #    infosource.iterables = ('subject_id', c.subjects)

    infosource = preproc.get_node('subject_id_iterable')
    #modelflow.connect(infosource,'subject_id',preproc,'subject_id')
    #preproc.iterables = ('fwhm', prep_c.fwhm)

    subjectinfo = pe.Node(util.Function(input_names=['subject_id'],
                                        output_names=['output']),
                          name='subjectinfo')
    subjectinfo.inputs.function_str = c.subjectinfo

    def getsubs(subject_id, cons, info, fwhm):
        #from config import getcontrasts, get_run_numbers, subjectinfo, fwhm
        subs = [('_subject_id_%s/' % subject_id, ''), ('_plot_type_', ''),
                ('_fwhm', 'fwhm'), ('_dtype_mcf_mask_mean', '_mean'),
                ('_dtype_mcf_mask_smooth_mask_gms_tempfilt',
                 '_smoothed_preprocessed'),
                ('_dtype_mcf_mask_gms_tempfilt', '_unsmoothed_preprocessed'),
                ('_dtype_mcf', '_mcf')]

        for i in range(4):
            subs.append(('_plot_motion%d' % i, ''))
            subs.append(('_highpass%d/' % i, ''))
            subs.append(('_realign%d/' % i, ''))
            subs.append(('_meanfunc2%d/' % i, ''))
        runs = range(len(info))
        for i, run in enumerate(runs):
            subs.append(('_modelestimate%d/' % i, '_run_%d_%02d_' % (i, run)))
            subs.append(('_modelgen%d/' % i, '_run_%d_%02d_' % (i, run)))
            subs.append(('_conestimate%d/' % i, '_run_%d_%02d_' % (i, run)))
        for i, con in enumerate(cons):
            subs.append(
                ('cope%d.' % (i + 1), 'cope%02d_%s.' % (i + 1, con[0])))
            subs.append(
                ('varcope%d.' % (i + 1), 'varcope%02d_%s.' % (i + 1, con[0])))
            subs.append(
                ('zstat%d.' % (i + 1), 'zstat%02d_%s.' % (i + 1, con[0])))
            subs.append(
                ('tstat%d.' % (i + 1), 'tstat%02d_%s.' % (i + 1, con[0])))
        """for i, name in enumerate(info[0].conditions):
            subs.append(('pe%d.'%(i+1), 'pe%02d_%s.'%(i+1,name)))
        for i in range(len(info[0].conditions), 256):
            subs.append(('pe%d.'%(i+1), 'others/pe%02d.'%(i+1)))"""
        for i in fwhm:
            subs.append(('_register%d/' % (i), ''))

        return subs

    get_substitutions = pe.Node(util.Function(
        input_names=['subject_id', 'cons', 'info', 'fwhm'],
        output_names=['subs'],
        function=getsubs),
                                name='getsubs')

    # create a node to create the subject info
    if not c.is_sparse:
        s = pe.Node(SpecifyModel(), name='s')
    else:
        s = pe.Node(SpecifySparseModel(
            model_hrf=c.model_hrf,
            stimuli_as_impulses=c.stimuli_as_impulses,
            use_temporal_deriv=c.use_temporal_deriv,
            volumes_in_cluster=c.volumes_in_cluster,
            scan_onset=c.scan_onset,
            scale_regressors=c.scale_regressors),
                    name='s')
        s.inputs.time_acquisition = c.ta
    s.inputs.input_units = c.input_units
    s.inputs.time_repetition = c.tr
    if c.hpcutoff < 0:
        c.hpcutoff = np.inf
    s.inputs.high_pass_filter_cutoff = c.hpcutoff
    #subjinfo =                                          subjectinfo(subj)

    # create a node to add the traditional (MCFLIRT-derived) motion regressors to
    # the subject info
    trad_motn = pe.Node(util.Function(input_names=['subinfo', 'files'],
                                      output_names=['subinfo'],
                                      function=trad_mot),
                        name='trad_motn')

    #subjinfo = pe.Node(interface=util.Function(input_names=['subject_id','get_run_numbers'], output_names=['output'], function = c.subjectinfo), name='subjectinfo')
    #subjinfo.inputs.get_run_numbers = c.get_run_numbers
    #modelflow.connect(infosource,'subject_id',
    #                  subjinfo,'subject_id' )
    #modelflow.connect(subjinfo, 'output',
    #                  trad_motn, 'subinfo')

    #modelflow.connect(infosource, ('subject_id',subjectinfo), trad_motn, 'subinfo')
    modelflow.connect(infosource, 'subject_id', subjectinfo, 'subject_id')
    modelflow.connect(subjectinfo, 'output', trad_motn, 'subinfo')

    # create a node to add the principle components of the noise regressors to
    # the subject info
    noise_motn = pe.Node(util.Function(input_names=[
        'subinfo', 'files', 'num_noise_components', "use_compcor"
    ],
                                       output_names=['subinfo'],
                                       function=noise_mot),
                         name='noise_motn')
    noise_motn.inputs.use_compcor = c.use_compcor
    # generate first level analysis workflow
    modelfit = create_first()
    modelfit.inputs.inputspec.interscan_interval = c.interscan_interval
    modelfit.inputs.inputspec.film_threshold = c.film_threshold

    contrasts = pe.Node(util.Function(input_names=['subject_id'],
                                      output_names=['contrasts']),
                        name='getcontrasts')
    contrasts.inputs.function_str = c.contrasts

    modelflow.connect(infosource, 'subject_id', contrasts, 'subject_id')
    modelflow.connect(contrasts, 'contrasts', modelfit, 'inputspec.contrasts')

    modelfit.inputs.inputspec.bases = c.bases
    modelfit.inputs.inputspec.model_serial_correlations = True
    noise_motn.inputs.num_noise_components = prep_c.num_noise_components

    # make a data sink
    sinkd = pe.Node(nio.DataSink(), name='sinkd')
    sinkd.inputs.base_directory = os.path.join(c.sink_dir)

    modelflow.connect(infosource, 'subject_id', sinkd, 'container')
    #modelflow.connect(infosource, ('subject_id',getsubs, getcontrasts, subjectinfo, prep_c.fwhm), sinkd, 'substitutions')
    modelflow.connect(infosource, 'subject_id', get_substitutions,
                      'subject_id')
    modelflow.connect(contrasts, 'contrasts', get_substitutions, 'cons')
    modelflow.connect(subjectinfo, 'output', get_substitutions, 'info')
    get_substitutions.inputs.fwhm = prep_c.fwhm
    modelflow.connect(get_substitutions, 'subs', sinkd, 'substitutions')

    sinkd.inputs.regexp_substitutions = [
        ('mask/fwhm_%d/_threshold([0-9]*)/.*nii' % x,
         'mask/fwhm_%d/funcmask.nii' % x) for x in prep_c.fwhm
    ]
    sinkd.inputs.regexp_substitutions.append(
        ('realigned/fwhm_([0-9])/_copy_geom([0-9]*)/', 'realigned/'))
    sinkd.inputs.regexp_substitutions.append(
        ('motion/fwhm_([0-9])/', 'motion/'))
    sinkd.inputs.regexp_substitutions.append(('bbreg/fwhm_([0-9])/', 'bbreg/'))

    # make connections
    modelflow.connect(preproc, 'datagrabber.motion_parameters', trad_motn,
                      'files')
    modelflow.connect(preproc, 'datagrabber.noise_components', noise_motn,
                      'files')
    modelflow.connect(preproc, 'datagrabber.highpassed_files', s,
                      'functional_runs')
    modelflow.connect(preproc, 'datagrabber.highpassed_files', modelfit,
                      'inputspec.functional_data')
    modelflow.connect(preproc, 'datagrabber.outlier_files', s, 'outlier_files')
    modelflow.connect(trad_motn, 'subinfo', noise_motn, 'subinfo')
    modelflow.connect(noise_motn, 'subinfo', s, 'subject_info')
    modelflow.connect(s, 'session_info', modelfit, 'inputspec.session_info')
    modelflow.connect(modelfit, 'outputspec.parameter_estimates', sinkd,
                      'modelfit.estimates')
    modelflow.connect(modelfit, 'outputspec.sigmasquareds', sinkd,
                      'modelfit.estimates.@sigsq')
    modelflow.connect(modelfit, 'outputspec.dof_file', sinkd, 'modelfit.dofs')
    modelflow.connect(modelfit, 'outputspec.copes', sinkd,
                      'modelfit.contrasts.@copes')
    modelflow.connect(modelfit, 'outputspec.varcopes', sinkd,
                      'modelfit.contrasts.@varcopes')
    modelflow.connect(modelfit, 'outputspec.zstats', sinkd,
                      'modelfit.contrasts.@zstats')
    modelflow.connect(modelfit, 'outputspec.tstats', sinkd,
                      'modelfit.contrasts.@tstats')
    modelflow.connect(modelfit, 'outputspec.design_image', sinkd,
                      'modelfit.design')
    modelflow.connect(modelfit, 'outputspec.design_cov', sinkd,
                      'modelfit.design.@cov')
    modelflow.connect(modelfit, 'outputspec.design_file', sinkd,
                      'modelfit.design.@matrix')
    modelflow.connect(modelfit, 'outputspec.pfiles', sinkd,
                      'modelfit.contrasts.@pstats')
    return modelflow