示例#1
0
def save(where, name, obj, overwrite=True):
    """
    hdf5pickle an object to given node in a HDF5 file.

    :Parameters:
      where : tables.Group : The parent node where to save
      name : str : Sub-path where to save
      obj : anything : The object to save

      overwrite : bool: Whether to overwrite the object, if it already is there

    :raises: tables.NodeError, if overwrite==False and node exists
    """
    if isinstance(where, _tbl.File):
        where = where.root

    file = where._v_file
    path = '/'.join([where._v_pathname, name]).replace('//', '/')

    create_space(where, name, overwrite=overwrite)

    if isinstance(obj, _n.ndarray):
        obj = _typeconvert(obj)
    
    hdf5pickle.dump(obj, file, path, type_map=_type_map)
示例#2
0
    def dumps(self, arg, proto=0, fast=0):
        try:
            os.unlink('hdf5test.h5')
        except IOError: pass
        except OSError: pass
        p.dump(arg, 'hdf5test.h5')

        with open('hdf5test.h5', 'rb') as f:
            return f.read()
示例#3
0
def test_1():
    try:
        a = np.array(['sdf', {'a': 'b'}])
        dump(a, 'test.hdf5')
        a2 = load('test.hdf5')
        assert a[0] == a2[0]
        assert a[1] == a2[1]
    finally:
        os.unlink('test.hdf5')
示例#4
0
def test_masked_array_persistence():
    # The special-case picker fails, because saving masked_array
    # not implemented, but it just delegates to the standard pickler.
    rnd = np.random.RandomState(0)
    a = rnd.random_sample(10)
    a = np.ma.masked_greater(a, 0.5)
    filename = env['filename'] + str(random.randint(0, 1000))
    p.dump(a, filename)
    b = p.load(filename)
    nose.tools.assert_true(isinstance(b, np.ma.masked_array))
示例#5
0
 def save_and_redo(self):
     newfn = self.entry1.get()
     newclus = self.entry2.get()
     savefn = (self.savedir + os.sep + newfn + '_' + 'cluster_' + newclus +
               '.h5')
     hdf5pickle.dump(self.data, savefn, '/data')
     fid = open('curr_clus.dat', 'w')
     fid.write('%s' % newclus)
     fid.close()
     self.redo = True
     self.frame.quit()
     self.frame.destroy()
     del self
示例#6
0
def test_standard_types():
    # Test pickling and saving with standard types.
    filename = env['filename']
    for compress in [0, 1]:
        for member in typelist:
            # Change the file name to avoid side effects between tests
            this_filename = filename + str(random.randint(0, 1000))
            p.dump(member, this_filename, compress=compress)
            _member = p.load(this_filename)
            # We compare the pickled instance to the reloaded one only if it
            # can be compared to a copied one
            if member == copy.deepcopy(member):
                yield nose.tools.assert_equal, member, _member
示例#7
0
文件: test.py 项目: pv/hdf5pickle
 def dumps(self, arg, proto=0, fast=0):
     try: os.unlink('hdf5test.h5')
     except IOError: pass
     except OSError: pass
     f = tables.openFile('hdf5test.h5', 'w')
     try:
         p.dump(arg, f, '/obj')
     finally:
         f.close()
     f = open('hdf5test.h5', 'r')
     try:
         return f.read()
     finally:
         f.close()
示例#8
0
def test_numpy_persistence():
    filename = env['filename']
    rnd = np.random.RandomState(0)
    a = rnd.random_sample((10, 2))
    for compress in (0, 1, 9):
        # We use 'a.T' to have a non C-contiguous array.
        for index, obj in enumerate(((a,), (a.T,), (a, a), [a, a, a])):
            # Change the file name to avoid side effects between tests
            this_filename = filename + str(random.randint(0, 1000))
            p.dump(obj, this_filename, compress=compress)

            # Unpickle the object
            obj_ = p.load(this_filename)
            # Check that the items are indeed arrays
            for item in obj_:
                nose.tools.assert_true(isinstance(item, np.ndarray))
            # And finally, check that all the values are equal.
            nose.tools.assert_true(np.all(np.array(obj) ==
                                                np.array(obj_)))

        # Now test with array subclasses
        for obj in (
                    np.matrix(np.zeros(10)),
                    np.core.multiarray._reconstruct(np.memmap, (), np.float)
                   ):
            this_filename = filename + str(random.randint(0, 1000))
            p.dump(obj, this_filename, compress=compress)
            obj_ = p.load(this_filename)
            if (type(obj) is not np.memmap
                        and hasattr(obj, '__array_prepare__')):
                # We don't reconstruct memmaps
                nose.tools.assert_true(isinstance(obj_, type(obj)))

    # Finally smoke test the warning in case of compress + mmap_mode
    this_filename = filename + str(random.randint(0, 1000))
    p.dump(a, this_filename, compress=1)
    p.load(this_filename)
示例#9
0
def test_numpy_subclass():
    filename = env['filename']
    a = SubArray((10,))
    p.dump(a, filename)
    c = p.load(filename)
    nose.tools.assert_true(isinstance(c, SubArray))