Beispiel #1
0
    # store the whole datasets warehouse in one hdf5 file
    hdf = h5py.File(pathjoin(tempdir, 'myhdf5.hdf5'), 'w')
    for d in datasets:
        obj2hdf(hdf, datasets[d], d)
    hdf.close()

    hdf = h5py.File(pathjoin(tempdir, 'myhdf5.hdf5'), 'r')
    rc_ds = {}
    for d in hdf:
        rc_ds[d] = hdf2obj(hdf[d])
    hdf.close()

    #cleanup temp dir
    shutil.rmtree(tempdir, ignore_errors=True)

    # return the reconstructed datasets (for use in datasets warehouse)
    return rc_ds


datasets = generate_testing_datasets(specs)

if cfg.getboolean('tests', 'use hdf datasets', False):
    if not externals.exists('h5py'):
        raise RuntimeError(
            "Cannot perform HDF5 dump of all datasets in the warehouse, "
            "because 'h5py' is not available")

    datasets = saveload_warehouse()
    print "Replaced all dataset warehouse for HDF5 loaded alternative."
Beispiel #2
0
        # an output dataset (something x nfeatures of input ds)
        raise NotImplementedError

    def _forward_dataset(self, ds):
        reg_names, X = self._build_design(ds)
        model, out = self._fit_model(ds, X, reg_names)
        out.fa.update(ds.fa)
        out.a.update(ds.a) # this last one might be a bit to opportunistic
        if self.params.return_design:
            if not len(out) == len(X.T):
                raise ValueError("cannot include GLM regressors as sample "
                                 "attributes (dataset probably contains "
                                 "something other than parameter estimates")
            out.sa['regressors'] = X.T
        if self.params.return_model:
            out.a['model'] = model
        return out

 
    # TODO: this is not unreasonable, forward+reverse cycle throws away residuals...
    #def _reverse_dataset(self, ds):
        # reconstruct timeseries from model fit

from mvpa2 import externals
if externals.exists('nipy'):
    from .nipy_glm import NiPyGLMMapper
    __all__.append('NiPyGLMMapper')
if externals.exists('statsmodels'):
    from .statsmodels_glm import StatsmodelsGLMMapper
    __all__.append('StatsmodelsGLMMapper')
Beispiel #3
0
        # an output dataset (something x nfeatures of input ds)
        raise NotImplementedError

    def _forward_dataset(self, ds):
        reg_names, X = self._build_design(ds)
        model, out = self._fit_model(ds, X, reg_names)
        out.fa.update(ds.fa)
        out.a.update(ds.a)  # this last one might be a bit to opportunistic
        if self.params.return_design:
            if not len(out) == len(X.T):
                raise ValueError("cannot include GLM regressors as sample "
                                 "attributes (dataset probably contains "
                                 "something other than parameter estimates")
            out.sa['regressors'] = X.T
        if self.params.return_model:
            out.a['model'] = model
        return out

    # TODO: this is not unreasonable, forward+reverse cycle throws away residuals...
    #def _reverse_dataset(self, ds):
    # reconstruct timeseries from model fit


from mvpa2 import externals
if externals.exists('nipy'):
    from .nipy_glm import NiPyGLMMapper
    __all__.append('NiPyGLMMapper')
if externals.exists('statsmodels'):
    from .statsmodels_glm import StatsmodelsGLMMapper
    __all__.append('StatsmodelsGLMMapper')
Beispiel #4
0
      Dimensionality of target space
    data : array, optional
      Some data (should have rank high enough) to derive
      rotation
    """
    if nt is None:
        nt = ns
    # figure out some "random" rotation
    d = max(ns, nt)
    if data is None:
        data = np.random.normal(size=(d*10, d))
    _u, _s, _vh = np.linalg.svd(data[:, :d])
    R = _vh[:ns, :nt]
    if ns == nt:
        # Test if it is indeed a rotation matrix ;)
        # Lets flip first axis if necessary
        if np.linalg.det(R) < 0:
            R[:, 0] *= -1.0
    return R

datasets = generate_testing_datasets(specs)

if cfg.getboolean('tests', 'use hdf datasets', False):
    if not externals.exists('h5py'):
        raise RuntimeError(
            "Cannot perform HDF5 dump of all datasets in the warehouse, "
            "because 'h5py' is not available")

    datasets = saveload_warehouse()
    print "Replaced all dataset warehouse for HDF5 loaded alternative."