Ejemplo n.º 1
0
def test_h5py_dataset_typecheck():
    ds = datasets['uni2small']

    fd, fpath = tempfile.mkstemp('mvpa', 'test')
    os.close(fd)
    fd, fpath2 = tempfile.mkstemp('mvpa', 'test')
    os.close(fd)

    h5save(fpath2, [[1, 2, 3]])
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2)
    # this one just catches if there is such a group
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2, name='bogus')

    hdf = h5py.File(fpath, 'w')
    ds = AttrDataset([1, 2, 3])
    obj2hdf(hdf, ds, name='non-bogus')
    obj2hdf(hdf, [1, 2, 3], name='bogus')
    hdf.close()

    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name='bogus')
    ds_loaded = AttrDataset.from_hdf5(fpath, name='non-bogus')
    assert_array_equal(ds, ds_loaded)  # just to do smth useful with ds ;)

    # cleanup and ignore stupidity
    os.remove(fpath)
    os.remove(fpath2)
Ejemplo n.º 2
0
def test_dataset_without_chunks(fname):
    #  ValueError: All chunk dimensions must be positive (Invalid arguments to routine: Out of range)
    # MH: This is not about Dataset chunks, but about an empty samples array
    ds = AttrDataset([8], a=dict(custom=1))
    save(ds, fname, compression='gzip')
    ds_loaded = h5load(fname)
    ok_(ds_loaded.a.custom == ds.a.custom)
Ejemplo n.º 3
0
def test_h5py_dataset_typecheck():
    ds = datasets["uni2small"]

    fd, fpath = tempfile.mkstemp("mvpa", "test")
    os.close(fd)
    fd, fpath2 = tempfile.mkstemp("mvpa", "test")
    os.close(fd)

    h5save(fpath2, [[1, 2, 3]])
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2)
    # this one just catches if there is such a group
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2, name="bogus")

    hdf = h5py.File(fpath, "w")
    ds = AttrDataset([1, 2, 3])
    obj2hdf(hdf, ds, name="non-bogus")
    obj2hdf(hdf, [1, 2, 3], name="bogus")
    hdf.close()

    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name="bogus")
    ds_loaded = AttrDataset.from_hdf5(fpath, name="non-bogus")
    assert_array_equal(ds, ds_loaded)  # just to do smth useful with ds ;)

    # cleanup and ignore stupidity
    os.remove(fpath)
    os.remove(fpath2)
Ejemplo n.º 4
0
    def query_byid(self, vertexid):
        """Given a vertex ID give us indices of dataset features (voxels)

        Parameters
        ----------
        vertexid: int
            Index of searchlight center vertex on the surface.
            This value should be an element in self.ids

        Returns
        -------
        voxel_ids: list of int or AttrDataset
            The linear indices of voxels near the vertex with index vertexid.
            If the instance was constructed with add_fa=None, then voxel_ids
            is a list; otherwise it is a AttrDataset with additional feature
            attributes stored in voxel_ids.fa.

        """
        if self._map_voxel_coord is None:
            raise ValueError("No voxel mapping - did you train?")

        voxel_unmasked_ids = self.voxsel.get(vertexid)

        # map into dataset
        voxel_dataset_ids = [
            self._map_voxel_coord[i] for i in voxel_unmasked_ids
        ]
        voxel_dataset_ids_flat = sum(voxel_dataset_ids, [])

        if self._add_fa is not None:
            # optionally add additional information from voxsel
            ds = AttrDataset(np.asarray(voxel_dataset_ids_flat)[np.newaxis])
            for n in self._add_fa:
                fa_values = self.voxsel.get_aux(vertexid, n)
                assert (len(fa_values) == len(voxel_dataset_ids))
                ds.fa[n] = sum(
                    [[x] * len(ids)
                     for x, ids in zip(fa_values, voxel_dataset_ids)], [])
            return ds
        return voxel_dataset_ids_flat
Ejemplo n.º 5
0
    def query_byid(self, vertexid):
        """Given a vertex ID give us indices of dataset features (voxels)

        Parameters
        ----------
        vertexid: int
            Index of searchlight center vertex on the surface.
            This value should be an element in self.ids

        Returns
        -------
        voxel_ids: list of int or AttrDataset
            The linear indices of voxels near the vertex with index vertexid.
            If the instance was constructed with add_fa=None, then voxel_ids
            is a list; otherwise it is a AttrDataset with additional feature
            attributes stored in voxel_ids.fa.

        """
        if self._map_voxel_coord is None:
            raise ValueError("No voxel mapping - did you train?")

        voxel_unmasked_ids = self.voxsel.get(vertexid)

        # map into dataset
        voxel_dataset_ids = [self._map_voxel_coord[i]
                             for i in voxel_unmasked_ids]
        voxel_dataset_ids_flat = sum(voxel_dataset_ids, [])

        if self._add_fa is not None:
            # optionally add additional information from voxsel
            ds = AttrDataset(np.asarray(voxel_dataset_ids_flat)[np.newaxis])
            for n in self._add_fa:
                fa_values = self.voxsel.get_aux(vertexid, n)
                assert(len(fa_values) == len(voxel_dataset_ids))
                ds.fa[n] = sum([[x] * len(ids)
                                for x, ids in zip(fa_values,
                                                  voxel_dataset_ids)], [])
            return ds
        return voxel_dataset_ids_flat
Ejemplo n.º 6
0
    def test_flat_surface_plotting_exception_wrong_size(self):
        s = surf.generate_plane((0, 0, 0), (0, 0, 1), (0, 1, 0), 6, 6)

        for offset in (-1, 0, 1):
            nfeatures = s.nvertices + offset
            ds = AttrDataset(samples=np.random.normal(size=(1, nfeatures)))
Ejemplo n.º 7
0
    # simple 1d -- would have worked before as well
    np.array([{
        'd': np.empty(shape=(2, 3))
    }], dtype=object),
    # 2d -- before fix would be reconstructed incorrectly
    np.array([[{
        'd': np.empty(shape=(2, 3))
    }]], dtype=object),
    # a bit more elaborate storage
    np.array([[{
        'd': np.empty(shape=(2, 3)),
        'k': 33
    }] * 2] * 3, dtype=object),
    # Swaroop's use-case
    AttrDataset(np.array([{
        'd': np.empty(shape=(2, 3))
    }], dtype=object)),
    # as it would be reconstructed before the fix -- obj array of obj arrays
    np.array([np.array([{
        'd': np.empty(shape=(2, 3))
    }], dtype=object)],
             dtype=object),
    np.array([], dtype='int64'),
))
def test_save_load_object_dtype_ds(obj=None):
    """Test saving of custom object ndarray (GH #84)
    """
    aobjf = np.asanyarray(obj).flatten()

    if not aobjf.size and externals.versions['hdf5'] < '1.8.7':
        raise SkipTest(