def test_removes_files(self): temp_fs = fsopendir("temp://") temp_fs.createfile("temp.h5") hdf_partition = HDFPartition(temp_fs, path="temp.h5") self.assertTrue(temp_fs.exists("temp.h5")) hdf_partition.remove() self.assertFalse(temp_fs.exists("temp.h5"))
def test_removes_files(self): temp_fs = fsopendir('temp://') temp_fs.createfile('temp.h5') hdf_partition = HDFPartition(temp_fs, path='temp.h5') self.assertTrue(temp_fs.exists('temp.h5')) hdf_partition.remove() self.assertFalse(temp_fs.exists('temp.h5'))
def write_small_blocks(): df = HDFPartition(fs, path='foobar') if df.exists: df.remove() with Timer() as t, df.writer as w: w.headers = headers type_index = w.meta['schema'][0].index('type') pos_index = w.meta['schema'][0].index('pos') columns = w.meta['schema'][1:] for column in columns: column[type_index] = type(rows[0][column[pos_index] - 1]).__name__ for i in range(N): w.insert_row(rows[i]) print('HDF write small', float(N) / t.elapsed, w.n_rows)
def write_small_blocks(): df = HDFPartition(fs, path='foobar') if df.exists: df.remove() with Timer() as t, df.writer as w: w.headers = headers type_index = w.meta['schema'][0].index('type') pos_index = w.meta['schema'][0].index('pos') columns = w.meta['schema'][1:] for column in columns: column[type_index] = type(rows[0][column[pos_index] - 1]).__name__ for i in range(N): w.insert_row(rows[i]) print('HDF write small', float(N) / t.elapsed, w.n_rows)