def test_append2(self): persist = blaze.Storage(self.rooturi, format="blz") a = blaze.empty('0 * 2 * float64', storage=persist) self.assertTrue(isinstance(a, blaze.Array)) lvals = [[i,i*2] for i in range(10)] append(a,lvals) self.assertEqual(dd_as_py(a._data), lvals)
def test_create(self): persist = blaze.Storage(self.rooturi, format="blz") a = blaze.array([], 'float64', storage=persist) self.assertTrue(isinstance(a, blaze.Array)) print("->", a.dshape.shape) self.assertTrue(a.dshape.shape == (0,)) self.assertEqual(dd_as_py(a._data), [])
def test_open(self): persist = blaze.Storage(self.rooturi, format="blz") a = blaze.ones('0 * float64', storage=persist) append(a,range(10)) # Re-open the dataset in URI a2 = blaze.open(persist) self.assertTrue(isinstance(a2, blaze.Array)) self.assertEqual(dd_as_py(a2._data), list(range(10)))
del describe_array # -------------------------------------------------------------------- print_section('Persisted arrays') def maybe_remove(persist): import os.path if os.path.exists(persist.path): # Remove every directory starting with rootdir blaze.drop(persist) # Create an empty array on-disk dname = 'blz://persisted.blz' store = blaze.Storage(dname) maybe_remove(store) p = blaze.zeros('0, float64', storage=store) # Feed it with some data blaze.append(p, range(10)) print('Before re-opening:', p) # Re-open the dataset in URI p2 = blaze.open(store) print('After re-opening:', p2) blaze.drop(dname)
def test_append(self): persist = blaze.Storage(self.rooturi, format="blz") a = blaze.zeros('0 * float64', storage=persist) self.assertTrue(isinstance(a, blaze.Array)) append(a,list(range(10))) self.assertEqual(dd_as_py(a._data), list(range(10)))
def test_deprecated_open(self): url = "json://" + self.fname store = blaze.Storage(url, mode='r') a = blaze.open(store, schema=json_schema) self.assert_(isinstance(a, blaze.Array)) self.assertEqual(dd_as_py(a._data), [1, 2, 3, 4, 5])
fname = "sample.h5" print_section('building basic hdf5 files') # Create a simple HDF5 file a1 = np.array([[1, 2, 3], [4, 5, 6]], dtype="int32") a2 = np.array([[1, 2, 3], [3, 2, 1]], dtype="int64") t1 = np.array([(1, 2, 3), (3, 2, 1)], dtype="i4,i8,f8") with tb.open_file(fname, "w") as f: f.create_array(f.root, 'a1', a1) f.create_table(f.root, 't1', t1) f.create_group(f.root, 'g') f.create_array(f.root.g, 'a2', a2) print("Created HDF5 file with the next contents:\n%s" % str(f)) print_section('opening and handling datasets in hdf5 files') # Open an homogeneous dataset there store = blaze.Storage(fname, format='hdf5') a = blaze.open(store, datapath="/a1") # Print it print("/a1 contents:", a) # Print the datashape print("datashape for /a1:", a.dshape) # Open another homogeneous dataset there store = blaze.Storage(fname, format='hdf5') a = blaze.open(store, datapath="/g/a2") # Print it print("/g/a2 contents:", a) # Print the datashape print("datashape for /g/a2:", a.dshape) # Now, get an heterogeneous dataset
def test_open(self): store = blaze.Storage(self.url, mode='r') a = blaze.open(store, schema=json_schema) self.assert_(isinstance(a, blaze.Array)) self.assertEqual(dd_as_py(a._data), [1, 2, 3, 4, 5])
def test_deprecated_open(self): url = "csv://" + self.fname store = blaze.Storage(url, mode='r') a = blaze.open(store, schema=csv_schema) self.assert_(isinstance(a, blaze.Array)) self.assertEqual(dd_as_py(a._data), csv_ldict)
def test_append(self): store = blaze.Storage(self.url, mode='r+') a = blaze.open(store, schema=csv_schema) blaze.append(a, ["k4", "v4", 4, True]) self.assertEqual(dd_as_py(a._data), csv_ldict + \ [{u'f0': u'k4', u'f1': u'v4', u'f2': 4, u'f3': True}])
def test_open_has_header(self): store = blaze.Storage(self.url, mode='r') a = blaze.open(store, schema=csv_schema, has_header=False) self.assert_(isinstance(a, blaze.Array)) self.assertEqual(dd_as_py(a._data), csv_ldict)
def test_open_dialect(self): store = blaze.Storage(self.url, mode='r') a = blaze.open(store, schema=csv_schema, dialect='excel') self.assert_(isinstance(a, blaze.Array)) self.assertEqual(dd_as_py(a._data), csv_ldict)