class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello.txt') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo)
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # store and retrieve a callable self.dstore['key1_upper'] = key1_upper self.assertEqual(self.dstore['key1_upper'], 'VALUE1') def test_hdf5(self): # optional test, run only if h5py is available try: import h5py except ImportError: raise unittest.SkipTest # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['/key1', '/key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['/key1']) numpy.testing.assert_equal(self.dstore['/key1'], value1) self.assertGreater(self.dstore.getsize('/key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('/dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal( self.dstore['/dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # notice: it is not possible to store non-arrays with self.assertRaises(ValueError): self.dstore['/key1'] = 'value1'
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # optional test, run only if h5py is available try: import h5py except ImportError: raise unittest.SkipTest # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1'
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b'], dtype=bytes) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal( self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello.txt') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(IOError): read(42, datadir='/fake/directory') # case of a non-existing file with self.assertRaises(IOError): read(42, datadir='/tmp') # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, 'calc_42.hdf5') open(fname, 'w').write('') os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn('Permission denied:', str(ctx.exception))
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore["key1"] = "value1" self.assertEqual(len(self.dstore), 1) self.dstore["key2"] = "value2" self.assertEqual(list(self.dstore), ["key1", "key2"]) del self.dstore["key2"] self.assertEqual(list(self.dstore), ["key1"]) self.assertEqual(self.dstore["key1"], "value1") def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore["/key1"] = value1 = numpy.array(["a", "b"], dtype=bytes) self.dstore["/key2"] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ["key1", "key2"]) del self.dstore["/key2"] self.assertEqual(list(self.dstore), ["key1"]) numpy.testing.assert_equal(self.dstore["key1"], value1) self.assertGreater(self.dstore.getsize("key1"), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset("dset", shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore["dset"][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore["key1"] = "value1" # test `in` functionality with composite keys self.dstore["a/b"] = 42 self.assertTrue("a/b" in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs["a"] = 2 parent = DataStore(params=[("a", 1), ("b", 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [("a", 2), ("b", 2)]) def test_export_path(self): path = self.dstore.export_path("hello.txt") mo = re.match("\./hello_\d+", path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(IOError): read(42, datadir="/fake/directory") # case of a non-existing file with self.assertRaises(IOError): read(42, datadir="/tmp") # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, "calc_42.hdf5") open(fname, "w").write("") os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn("Permission denied:", str(ctx.exception))
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello.txt') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(IOError): read(42, datadir='/fake/directory') # case of a non-existing file with self.assertRaises(IOError): read(42, datadir='/tmp') # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, 'calc_42.hdf5') open(fname, 'w').write('') os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn('Permission denied:', unicode(ctx.exception))
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') # test a datastore view self.assertEqual(view('key1_upper', self.dstore), 'VALUE1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b']) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal( self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_parent(self): # copy the attributes of the parent datastore on the child datastore, # without overriding the attributes with the same name self.dstore.attrs['a'] = 2 parent = DataStore(params=[('a', 1), ('b', 2)]) self.dstore.set_parent(parent) attrs = sorted(self.dstore.attrs.items()) self.assertEqual(attrs, [('a', 2), ('b', 2)]) def test_export_path(self): path = self.dstore.export_path('hello') mo = re.match('\./hello_\d+', path) self.assertIsNotNone(mo)
class DataStoreTestCase(unittest.TestCase): def setUp(self): self.dstore = DataStore() def tearDown(self): self.dstore.clear() def test_pik(self): # store pickleable Python objects self.dstore['key1'] = 'value1' self.assertEqual(len(self.dstore), 1) self.dstore['key2'] = 'value2' self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['key2'] self.assertEqual(list(self.dstore), ['key1']) self.assertEqual(self.dstore['key1'], 'value1') def test_hdf5(self): # store numpy arrays as hdf5 files self.assertEqual(len(self.dstore), 0) self.dstore['/key1'] = value1 = numpy.array(['a', 'b'], dtype=bytes) self.dstore['/key2'] = numpy.array([1, 2]) self.assertEqual(list(self.dstore), ['key1', 'key2']) del self.dstore['/key2'] self.assertEqual(list(self.dstore), ['key1']) numpy.testing.assert_equal(self.dstore['key1'], value1) self.assertGreater(self.dstore.getsize('key1'), 0) self.assertGreater(self.dstore.getsize(), 0) # test creating and populating a dset dset = self.dstore.hdf5.create_dataset('dset', shape=(4, 2), dtype=int) dset[0] = [1, 2] dset[3] = [4, 5] numpy.testing.assert_equal(self.dstore['dset'][:], [[1, 2], [0, 0], [0, 0], [4, 5]]) # it is possible to store twice the same key (work around a bug) self.dstore['key1'] = 'value1' # test `in` functionality with composite keys self.dstore['a/b'] = 42 self.assertTrue('a/b' in self.dstore) def test_export_path(self): path = self.dstore.export_path('hello.txt', tempfile.mkdtemp()) mo = re.search('hello_\d+', path) self.assertIsNotNone(mo) def test_read(self): # cas of a non-existing directory with self.assertRaises(OSError): read(42, datadir='/fake/directory') # case of a non-existing file with self.assertRaises(IOError): read(42, datadir='/tmp') # case of no read permission tmp = tempfile.mkdtemp() fname = os.path.join(tmp, 'calc_42.hdf5') open(fname, 'w').write('') os.chmod(fname, 0) with self.assertRaises(IOError) as ctx: read(42, datadir=tmp) self.assertIn('permission denied', str(ctx.exception).lower())