Beispiel #1
0
 def test_write_dataset_iterable_multidimensional_array(self):
     a = np.arange(30).reshape(5, 2, 3)
     aiter = iter(a)
     daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', daiter, attributes={}))
     dset = self.f['test_dataset']
     self.assertListEqual(dset[:].tolist(), a.tolist())
Beispiel #2
0
 def test_write_dataset_datachunkiterator(self):
     a = np.arange(30).reshape(5, 2, 3)
     aiter = iter(a)
     daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
     ts = TimeSeries('ts_name', daiter, 'A', timestamps=np.arange(5))
     self.nwbfile.add_acquisition(ts)
     with NWBHDF5IO(self.path, 'w') as io:
         io.write(self.nwbfile)
     infile = File(self.path, 'r')
     dset = infile['/acquisition/ts_name/data']
     self.assertListEqual(dset[:].tolist(), a.tolist())
Beispiel #3
0
 def test_write_dataset_iterable_multidimensional_array_compression(self):
     a = np.arange(30).reshape(5, 2, 3)
     aiter = iter(a)
     daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
     wrapped_daiter = H5DataIO(data=daiter,
                               compression='gzip',
                               compression_opts=5,
                               shuffle=True,
                               fletcher32=True)
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', wrapped_daiter, attributes={}))
     dset = self.f['test_dataset']
     self.assertEqual(dset.shape, a.shape)
     self.assertListEqual(dset[:].tolist(), a.tolist())
     self.assertEqual(dset.compression, 'gzip')
     self.assertEqual(dset.compression_opts, 5)
     self.assertEqual(dset.shuffle, True)
     self.assertEqual(dset.fletcher32, True)
Beispiel #4
0
 def test_write_dataset_datachunkiterator_with_compression(self):
     a = np.arange(30).reshape(5, 2, 3)
     aiter = iter(a)
     daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
     wrapped_daiter = H5DataIO(data=daiter,
                               compression='gzip',
                               compression_opts=5,
                               shuffle=True,
                               fletcher32=True)
     ts = TimeSeries('ts_name',
                     wrapped_daiter,
                     'A',
                     timestamps=np.arange(5))
     self.nwbfile.add_acquisition(ts)
     with NWBHDF5IO(self.path, 'w') as io:
         io.write(self.nwbfile)
     infile = File(self.path, 'r')
     dset = infile['/acquisition/ts_name/data']
     self.assertEqual(dset.shape, a.shape)
     self.assertListEqual(dset[:].tolist(), a.tolist())
     self.assertEqual(dset.compression, 'gzip')
     self.assertEqual(dset.compression_opts, 5)
     self.assertEqual(dset.shuffle, True)
     self.assertEqual(dset.fletcher32, True)
Beispiel #5
0
 def test_maxshape(self):
     a = np.arange(30).reshape(5, 2, 3)
     aiter = iter(a)
     daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
     self.assertEqual(daiter.maxshape, (None, 2, 3))
Beispiel #6
0
 def test_dtype(self):
     a = np.arange(30, dtype='int32').reshape(5, 2, 3)
     aiter = iter(a)
     daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
     self.assertEqual(daiter.dtype, a.dtype)