def test_integration(): f = NamedTemporaryFile() # set up a new hdf5 table to work with h5file = tables.open_file(f.name, mode="w", title="Test file") h5file.create_group('/', 'testgroup', 'Test Group') h5file.create_table('/testgroup', 'testtable', FooBar, 'Test Table') h5file.flush() h5file.close() # load some initial data via tohdf5() table1 = etl.wrap( (('foo', 'bar'), (1, b'asdfgh'), (2, b'qwerty'), (3, b'zxcvbn'))) table1.tohdf5(f.name, '/testgroup', 'testtable') ieq(table1, etl.fromhdf5(f.name, '/testgroup', 'testtable')) # append some more data table1.appendhdf5(f.name, '/testgroup', 'testtable') ieq(chain(table1, table1[1:]), etl.fromhdf5(f.name, '/testgroup', 'testtable'))
def test_integration(): f = NamedTemporaryFile() # set up a new hdf5 table to work with h5file = tables.open_file(f.name, mode="w", title="Test file") h5file.create_group('/', 'testgroup', 'Test Group') h5file.create_table('/testgroup', 'testtable', FooBar, 'Test Table') h5file.flush() h5file.close() # load some initial data via tohdf5() table1 = etl.wrap((('foo', 'bar'), (1, b'asdfgh'), (2, b'qwerty'), (3, b'zxcvbn'))) table1.tohdf5(f.name, '/testgroup', 'testtable') ieq(table1, etl.fromhdf5(f.name, '/testgroup', 'testtable')) # append some more data table1.appendhdf5(f.name, '/testgroup', 'testtable') ieq(chain(table1, table1[1:]), etl.fromhdf5(f.name, '/testgroup', 'testtable'))
# load some data into the table table1 = (('foo', 'bar'), (1, b'asdfgh'), (2, b'qwerty'), (3, b'zxcvbn')) for row in table1[1:]: for i, f in enumerate(table1[0]): h5table.row[f] = row[i] h5table.row.append() h5file.flush() h5file.close() # # now demonstrate use of fromhdf5 table1 = etl.fromhdf5('example.h5', '/testgroup', 'testtable') table1 # alternatively just specify path to table node table1 = etl.fromhdf5('example.h5', '/testgroup/testtable') # ...or use an existing tables.File object h5file = tables.openFile('example.h5') table1 = etl.fromhdf5(h5file, '/testgroup/testtable') # ...or use an existing tables.Table object h5tbl = h5file.getNode('/testgroup/testtable') table1 = etl.fromhdf5(h5tbl) # use a condition to filter data table2 = etl.fromhdf5(h5tbl, condition='foo < 3') table2 h5file.close()
h5table = h5file.createTable('/testgroup', 'testtable', FooBar, 'Test Table') # load some data into the table table1 = (('foo', 'bar'), (1, b'asdfgh'), (2, b'qwerty'), (3, b'zxcvbn')) for row in table1[1:]: for i, f in enumerate(table1[0]): h5table.row[f] = row[i] h5table.row.append() h5file.flush() h5file.close() # # now demonstrate use of fromhdf5 table1 = etl.fromhdf5('example.h5', '/testgroup', 'testtable') table1 # alternatively just specify path to table node table1 = etl.fromhdf5('example.h5', '/testgroup/testtable') # ...or use an existing tables.File object h5file = tables.openFile('example.h5') table1 = etl.fromhdf5(h5file, '/testgroup/testtable') # ...or use an existing tables.Table object h5tbl = h5file.getNode('/testgroup/testtable') table1 = etl.fromhdf5(h5tbl) # use a condition to filter data table2 = etl.fromhdf5(h5tbl, condition='foo < 3') table2 h5file.close() # fromhdf5sorted()