Exemple #1
0
def run(args):
    if args.externals:
        print(mvpa2.wtf(include=['externals']))
    elif args.debug:
        mvpa2.debug.print_registered()
    elif not args.learner_warehouse is False:
        from mvpa2.clfs.warehouse import clfswh
        clfswh.print_registered(*args.learner_warehouse)
    else:
        print(mvpa2.wtf())
Exemple #2
0
def run(args):
    if args.externals:
        print mvpa2.wtf(include=['externals'])
    elif args.debug:
        mvpa2.debug.print_registered()
    elif not args.learner_warehouse is False:
        from mvpa2.clfs.warehouse import clfswh
        clfswh.print_registered(*args.learner_warehouse)
    else:
        print mvpa2.wtf()
Exemple #3
0
def generate_testing_fmri_dataset(filename=None):
    """Helper to generate a dataset for regression testing of mvpa2/nibabel

    Parameters
    ----------
    filename : str
       Filename of a dataset file to store.  If not provided, it is composed
       using :func:`get_testing_fmri_dataset_filename`

    Returns
    -------
    Dataset, string
       Generated dataset, filename to the HDF5 where it was stored
    """
    import mvpa2
    from mvpa2.base.hdf5 import h5save
    from mvpa2.datasets.sources import load_example_fmri_dataset
    # Load our sample dataset
    ds_full = load_example_fmri_dataset(name='1slice', literal=False)
    # Subselect a small "ROI"
    ds = ds_full[20:23, 10:14]
    # collect all versions/dependencies for possible need to troubleshoot later
    ds.a['wtf'] = mvpa2.wtf()
    ds.a['versions'] = mvpa2.externals.versions
    # save to a file identified by version of PyMVPA and nibabel and hash of
    # all other versions
    out_filename = filename or get_testing_fmri_dataset_filename()
    h5save(out_filename, ds, compression=9)
    # ATM it produces >700kB .hdf5 which is this large because of
    # the ds.a.mapper with both Flatten and StaticFeatureSelection occupying
    # more than 190kB each, with ds.a.mapper as a whole generating 570kB file
    # Among those .ca seems to occupy notable size, e.g. 130KB for the FlattenMapper
    # even though no heavy storage is really needed for any available value --
    # primarily all is meta-information embedded into hdf5 to describe our things
    return ds, out_filename
Exemple #4
0
def generate_testing_fmri_dataset(filename=None):
    """Helper to generate a dataset for regression testing of mvpa2/nibabel

    Parameters
    ----------
    filename : str
       Filename of a dataset file to store.  If not provided, it is composed
       using :func:`get_testing_fmri_dataset_filename`

    Returns
    -------
    Dataset, string
       Generated dataset, filename to the HDF5 where it was stored
    """
    import mvpa2
    from mvpa2.base.hdf5 import h5save
    from mvpa2.datasets.sources import load_example_fmri_dataset
    # Load our sample dataset
    ds_full = load_example_fmri_dataset(name='1slice', literal=False)
    # Subselect a small "ROI"
    ds = ds_full[20:23, 10:14]
    # collect all versions/dependencies for possible need to troubleshoot later
    ds.a['wtf'] = mvpa2.wtf()
    ds.a['versions'] = mvpa2.externals.versions
    # save to a file identified by version of PyMVPA and nibabel and hash of
    # all other versions
    out_filename = filename or get_testing_fmri_dataset_filename()
    h5save(out_filename, ds, compression=9)
    # ATM it produces >700kB .hdf5 which is this large because of
    # the ds.a.mapper with both Flatten and StaticFeatureSelection occupying
    # more than 190kB each, with ds.a.mapper as a whole generating 570kB file
    # Among those .ca seems to occupy notable size, e.g. 130KB for the FlattenMapper
    # even though no heavy storage is really needed for any available value --
    # primarily all is meta-information embedded into hdf5 to describe our things
    return ds, out_filename
Exemple #5
0
def teardown_module(module, verbosity=None):
    "tear down test fixtures"
    verbosity = _get_verbosity(verbosity)

    # restore warning handlers
    warning.maxcount = _sys_settings['maxcount']

    if verbosity < 3:
        # restore warning handlers
        warning.handlers = _sys_settings['handlers']

    if verbosity < 4:
        # restore numpy settings
        np.seterr(**_sys_settings['np_errsettings'])

    if cfg.getboolean('tests', 'wtf', default='no'):
        sys.stderr.write(str(wtf()))
Exemple #6
0
def teardown_module(module, verbosity=None):
    "tear down test fixtures"
    verbosity = _get_verbosity(verbosity)

    # restore warning handlers
    warning.maxcount = _sys_settings['maxcount']

    if verbosity < 3:
        # restore warning handlers
        warning.handlers = _sys_settings['handlers']

    if verbosity < 4:
        # restore numpy settings
        np.seterr(**_sys_settings['np_errsettings'])

    if cfg.getboolean('tests', 'wtf', default='no'):
        sys.stderr.write(str(wtf()))