def main(): # We don't use an environment so we enable logging manually logging.basicConfig(level=logging.INFO) filename = os.path.join('hdf5','example_16.hdf5') traj = Trajectory(filename=filename, overwrite_file=True) # The result that will be manipulated traj.f_add_result('last_process_name', 'N/A', comment='Name of the last process that manipulated the trajectory') with MultiprocContext(trajectory=traj, wrap_mode='LOCK') as mc: # The multiprocessing context manager wraps the storage service of the trajectory # and passes the wrapped service to the trajectory. # Also restores the original storage service in the end. # Moreover, wee need to use the `MANAGER_LOCK` wrapping because the locks # are pickled and send to the pool for all function executions # Start a pool of processes manipulating the trajectory iterable = (traj for x in range(50)) pool = mp.Pool(processes=4) # Pass the trajectory and the function to the pool and execute it 20 times pool.map_async(manipulate_multiproc_safe, iterable) pool.close() # Wait for all processes to join pool.join() # Reload the data from disk and overwrite the existing result in RAM traj.results.f_load(load_data=3) # Print the name of the last process the trajectory was manipulated by print('The last process to manipulate the trajectory was: `%s`' % traj.last_process_name)
def test_errors(self): filename = make_temp_dir("hdf5errors.hdf5") traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name npearray = np.ones((2, 10, 3), dtype=np.float) thevlarray = np.array([compat.tobytes("j"), 22.2, compat.tobytes("gutter")]) with self.assertRaises(TypeError): traj.f_add_result(SharedResult, "arrays.vlarray", SharedVLArray()).create_shared_data(obj=thevlarray) traj.f_store() traj.arrays.vlarray.create_shared_data(obj=thevlarray) traj.f_add_result(SharedResult, "arrays.array", SharedArray()).create_shared_data(data=npearray) traj.arrays.f_add_result(SharedResult, "super.carray", SharedCArray(), comment="carray").create_shared_data( shape=(10, 10), atom=pt.atom.FloatAtom() ) traj.arrays.f_add_result(SharedResult, "earray", SharedEArray()).create_shared_data("earray", obj=npearray) traj.f_store() with self.assertRaises(TypeError): traj.arrays.array.iter_rows() with StorageContextManager(traj) as cm: with self.assertRaises(RuntimeError): with StorageContextManager(traj) as cm2: pass self.assertTrue(traj.v_storage_service.is_open) with self.assertRaises(RuntimeError): StorageContextManager(traj).f_open_store() self.assertFalse(traj.v_storage_service.is_open)
def test_removal_of_error_parameter(self): filename = make_temp_dir('remove_errored.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) traj.f_add_result('iii', 42) traj.f_add_result(FakeResult, 'j.j.josie', 43) file = traj.v_storage_service.filename traj.f_store(only_init=True) with self.assertRaises(RuntimeError): traj.f_store() with ptcompat.open_file(file, mode='r') as fh: jj = ptcompat.get_node(fh, where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie' not in jj) traj.j.j.f_remove_child('josie') traj.j.j.f_add_result(FakeResult2, 'josie2', 444) traj.f_store() with self.assertRaises(pex.NoSuchServiceError): traj.f_store_child('results', recursive=True) with ptcompat.open_file(file, mode='r') as fh: jj = ptcompat.get_node(fh, where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie2' in jj) josie2 = ptcompat.get_child(jj, 'josie2') self.assertTrue('hey' in josie2) self.assertTrue('fail' not in josie2)
def test_version_mismatch(self): traj = Trajectory(name='TestVERSION', filename=make_temp_dir('testversionmismatch.hdf5'), add_time=True) traj.f_add_parameter('group1.test',42) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj._version='0.1a.1' traj.f_store() traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('testversionmismatch.hdf5')) with self.assertRaises(pex.VersionMismatchError): traj2.f_load(load_parameters=2, load_results=2) traj2.f_load(load_parameters=2, load_results=2, force=True) self.compare_trajectories(traj,traj2) get_root_logger().info('Mismatch testing done!')
def test_migrations(self): traj = Trajectory(name='Testmigrate', filename=make_temp_dir('migrate.hdf5'), add_time=True) traj.f_add_result('I.am.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() new_file = make_temp_dir('migrate2.hdf5') traj.f_migrate(filename=new_file) traj.f_store() new_traj = Trajectory() new_traj.f_migrate(new_name=traj.v_name, filename=new_file, in_store=True) new_traj.v_auto_load = True self.assertTrue(new_traj.results.I.am.a.mean.resu == 42)
def test_store_and_load_large_dictionary(self): traj = Trajectory(name='Testlargedict', filename=make_temp_dir('large_dict.hdf5')) large_dict = {} for irun in range(1025): large_dict['item_%d' % irun] = irun large_dict2 = {} for irun in range(33): large_dict2['item_%d' % irun] = irun traj.f_add_result('large_dict', large_dict, comment='Huge_dict!') traj.f_add_result('large_dict2', large_dict2, comment='Not so large dict!') traj.f_store() traj_name = traj.v_name traj2 = Trajectory(filename=make_temp_dir('large_dict.hdf5')) traj2.f_load(name=traj_name, load_data=2) self.compare_trajectories(traj, traj2)
def test_store_items_and_groups(self): traj = Trajectory(name='testtraj', filename=make_temp_dir('teststoreitems.hdf5')) traj.f_store() traj.f_add_parameter('group1.test',42, comment= 'TooLong' * pypetconstants.HDF5_STRCOL_MAX_COMMENT_LENGTH) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj.f_store_items(['test','testres','group1']) traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('teststoreitems.hdf5')) traj2.f_load(load_parameters=2, load_results=2) traj.f_add_result('Im.stored.along.a.path', 43) traj.Im.stored.along.v_annotations['wtf'] =4444 traj.res.f_store_child('Im.stored.along.a.path') traj2.res.f_load_child('Im.stored.along.a.path', load_data=2) self.compare_trajectories(traj,traj2)
def test_get_default(self): traj = Trajectory(name='Testgetdefault', filename=make_temp_dir('autoload.hdf5')) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') val = traj.f_get_default('jjjjjjjjjj', 555) self.assertTrue(val==555) traj.f_store() traj.f_remove_child('results', recursive=True) val = traj.f_get_default('res.I.am.crun.a.mean.answ', 444, auto_load=True) self.assertTrue(val==444) val = traj.f_get_default('res.I.am.crun.a.mean.resu', auto_load=True, fast_access=True) self.assertTrue(val==42) with self.assertRaises(Exception): traj.kdsfdsf
def test_store_items_and_groups(self): traj = Trajectory(name='testtraj', filename=make_temp_dir('teststoreitems.hdf5'), add_time=True) traj.f_store() traj.f_add_parameter('group1.test',42, comment= 'TooLong' * pypetconstants.HDF5_STRCOL_MAX_COMMENT_LENGTH) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj.f_store_items(['test','testres','group1']) traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('teststoreitems.hdf5')) traj2.f_load(load_parameters=2, load_results=2) traj.f_add_result('Im.stored.along.a.path', 43) traj.Im.stored.along.v_annotations['wtf'] =4444 traj.res.f_store_child('Im.stored.along.a.path') traj2.res.f_load_child('Im.stored.along.a.path', load_data=2) self.compare_trajectories(traj,traj2)
def test_version_mismatch(self): traj = Trajectory(name='TestVERSION', filename=make_temp_dir('testversionmismatch.hdf5'), add_time=True) traj.f_add_parameter('group1.test',42) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj._version='0.1a.1' traj.f_store() traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('testversionmismatch.hdf5')) with self.assertRaises(pex.VersionMismatchError): traj2.f_load(load_parameters=2, load_results=2) traj2.f_load(load_parameters=2, load_results=2, force=True) self.compare_trajectories(traj,traj2) get_root_logger().info('Mismatch testing done!')
def main(): # We don't use an environment so we enable logging manually logging.basicConfig(level=logging.INFO) filename = os.path.join('hdf5','example_16.hdf5') traj = Trajectory(filename=filename, overwrite_file=True) # The result that will be manipulated traj.f_add_result('last_process_name', 'N/A', comment='Name of the last process that manipulated the trajectory') with MultiprocContext(trajectory=traj, wrap_mode='LOCK') as mc: # The multiprocessing context manager wraps the storage service of the trajectory # and passes the wrapped service to the trajectory. # Also restores the original storage service in the end. # Moreover, wee need to use the `MANAGER_LOCK` wrapping because the locks # are pickled and send to the pool for all function executions # Start a pool of processes manipulating the trajectory iterable = (traj for x in range(20)) pool = mp.Pool(processes=4) # Pass the trajectory and the function to the pool and execute it 20 times pool.map_async(manipulate_multiproc_safe, iterable) pool.close() # Wait for all processes to join pool.join() # Reload the data from disk and overwrite the existing result in RAM traj.results.f_load(load_data=3) # Print the name of the last process the trajectory was manipulated by print('The last process to manipulate the trajectory was: `%s`' % traj.last_process_name)
def test_auto_load(self): traj = Trajectory(name='Testautoload', filename=make_temp_dir('autoload.hdf5'), add_time=True) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() ffa=traj.f_get('ffa') ffa.f_unlock() ffa.f_empty() self.assertTrue(ffa.f_is_empty()) traj.f_remove_child('results', recursive=True) # check auto load val = traj.res.I.am.crun.a.mean.resu self.assertTrue(val==42) val = traj.ffa self.assertTrue(val==42) with self.assertRaises(pex.DataNotInStorageError): traj.kdsfdsf
def test_get_default(self): traj = Trajectory(name='Testgetdefault', filename=make_temp_dir('autoload.hdf5'), add_time=True) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') val = traj.f_get_default('jjjjjjjjjj', 555) self.assertTrue(val==555) traj.f_store() traj.f_remove_child('results', recursive=True) val = traj.f_get_default('res.I.am.crun.a.mean.answ', 444, auto_load=True) self.assertTrue(val==444) val = traj.f_get_default('res.I.am.crun.a.mean.resu', auto_load=True, fast_access=True) self.assertTrue(val==42) with self.assertRaises(Exception): traj.kdsfdsf
def test_store_and_load_large_dictionary(self): traj = Trajectory(name='Testlargedict', filename=make_temp_dir('large_dict.hdf5'), add_time=True) large_dict = {} for irun in range(1025): large_dict['item_%d' % irun] = irun large_dict2 = {} for irun in range(33): large_dict2['item_%d' % irun] = irun traj.f_add_result('large_dict', large_dict, comment='Huge_dict!') traj.f_add_result('large_dict2', large_dict2, comment='Not so large dict!') traj.f_store() traj_name = traj.v_name traj2 = Trajectory(filename=make_temp_dir('large_dict.hdf5'), add_time=True) traj2.f_load(name=traj_name, load_data=2) self.compare_trajectories(traj, traj2)
def test_removal_of_error_parameter(self): filename = make_temp_dir('remove_errored.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) traj.f_add_result('iii', 42) traj.f_add_result(FakeResult, 'j.j.josie', 43) file = traj.v_storage_service.filename traj.f_store(only_init=True) with self.assertRaises(RuntimeError): traj.f_store() with pt.open_file(file, mode='r') as fh: jj = fh.get_node(where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie' not in jj) traj.j.j.f_remove_child('josie') traj.j.j.f_add_result(FakeResult2, 'josie2', 444) traj.f_store() with self.assertRaises(pex.NoSuchServiceError): traj.f_store_child('results', recursive=True) with pt.open_file(file, mode='r') as fh: jj = fh.get_node(where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie2' in jj) josie2 =jj._f_get_child('josie2') self.assertTrue('hey' in josie2) self.assertTrue('fail' not in josie2)
def test_auto_load(self): traj = Trajectory(name='Testautoload', filename=make_temp_dir('autoload.hdf5')) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() ffa=traj.f_get('ffa') ffa.f_unlock() ffa.f_empty() self.assertTrue(ffa.f_is_empty()) traj.f_remove_child('results', recursive=True) # check auto load val = traj.res.I.am.crun.a.mean.resu self.assertTrue(val==42) val = traj.ffa self.assertTrue(val==42) with self.assertRaises(pex.DataNotInStorageError): traj.kdsfdsf
def test_loading_and_storing_empty_containers(self): filename = make_temp_dir('empty_containers.hdf5') traj = Trajectory(filename=filename, add_time=True) # traj.f_add_parameter('empty.dict', {}) # traj.f_add_parameter('empty.list', []) traj.f_add_parameter(ArrayParameter, 'empty.tuple', ()) traj.f_add_parameter(ArrayParameter, 'empty.array', np.array([], dtype=float)) spsparse_csc = spsp.csc_matrix((2, 10)) spsparse_csr = spsp.csr_matrix((6660, 660)) spsparse_bsr = spsp.bsr_matrix((3330, 2220)) spsparse_dia = spsp.dia_matrix((1230, 1230)) traj.f_add_parameter(SparseParameter, 'empty.csc', spsparse_csc) traj.f_add_parameter(SparseParameter, 'empty.csr', spsparse_csr) traj.f_add_parameter(SparseParameter, 'empty.bsr', spsparse_bsr) traj.f_add_parameter(SparseParameter, 'empty.dia', spsparse_dia) traj.f_add_result(SparseResult, 'empty.all', dict={}, list=[], series=pd.Series(), frame=pd.DataFrame(), panel=pd.Panel(), **traj.par.f_to_dict(short_names=True, fast_access=True)) traj.f_store() newtraj = load_trajectory(index=-1, filename=filename) newtraj.f_load(load_data=2) epg = newtraj.par.empty self.assertTrue(type(epg.tuple) is tuple) self.assertTrue(len(epg.tuple) == 0) self.assertTrue(type(epg.array) is np.ndarray) self.assertTrue(epg.array.size == 0) self.assertTrue(spsp.isspmatrix_csr(epg.csr)) self.assertTrue(epg.csr.size == 0) self.assertTrue(spsp.isspmatrix_csc(epg.csc)) self.assertTrue(epg.csc.size == 0) self.assertTrue(spsp.isspmatrix_bsr(epg.bsr)) self.assertTrue(epg.bsr.size == 0) self.assertTrue(spsp.isspmatrix_dia(epg.dia)) self.assertTrue(epg.dia.size == 0) self.compare_trajectories(traj, newtraj)
def test_compacting(self): filename = make_temp_dir('hdf5compacting.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name traj.v_storage_service.complevel = 7 first_row = {'ha': compat.tobytes('hi'), 'haha': np.zeros((3, 3))} traj.f_store(only_init=True) traj.f_add_result('My.Tree.Will.Be.Deleted', 42) traj.f_add_result('Mine.Too.HomeBoy', 42, comment='Don`t cry for me!') res = traj.f_add_result(SharedResult, 'myres') res['myres'] = SharedTable() res['myres'].create_shared_data(first_row=first_row) with StorageContextManager(traj): traj.myres for irun in range(10000): row = traj.myres.row for key in first_row: row[key] = first_row[key] row.append() traj.f_store() del traj traj = load_trajectory(name=trajname, filename=filename, load_all=2) with StorageContextManager(traj) as cm: tb = traj.myres.get_data_node() ptcompat.remove_rows(tb, 1000, 10000) cm.flush_store() self.assertTrue(traj.myres.nrows == 1001) traj.f_delete_item(traj.My, recursive=True) traj.f_delete_item(traj.Mine, recursive=True) size = os.path.getsize(filename) get_root_logger().info('Filesize is %s' % str(size)) name_wo_ext, ext = os.path.splitext(filename) backup_file_name = name_wo_ext + '_backup' + ext code = compact_hdf5_file(filename, keep_backup=True) if code != 0: raise RuntimeError('ptrepack fail') backup_size = os.path.getsize(backup_file_name) self.assertTrue(backup_size == size) new_size = os.path.getsize(filename) get_root_logger().info('New filesize is %s' % str(new_size)) self.assertTrue(new_size < size, "%s > %s" % (str(new_size), str(size)))
def test_loading_and_storing_empty_containers(self): filename = make_temp_dir('empty_containers.hdf5') traj = Trajectory(filename=filename) # traj.f_add_parameter('empty.dict', {}) # traj.f_add_parameter('empty.list', []) traj.f_add_parameter(ArrayParameter, 'empty.tuple', ()) traj.f_add_parameter(ArrayParameter, 'empty.array', np.array([], dtype=float)) spsparse_csc = spsp.csc_matrix((2,10)) spsparse_csr = spsp.csr_matrix((6660,660)) spsparse_bsr = spsp.bsr_matrix((3330,2220)) spsparse_dia = spsp.dia_matrix((1230,1230)) traj.f_add_parameter(SparseParameter, 'empty.csc', spsparse_csc) traj.f_add_parameter(SparseParameter, 'empty.csr', spsparse_csr) traj.f_add_parameter(SparseParameter, 'empty.bsr', spsparse_bsr) traj.f_add_parameter(SparseParameter, 'empty.dia', spsparse_dia) traj.f_add_result(SparseResult, 'empty.all', dict={}, list=[], series = pd.Series(), frame = pd.DataFrame(), panel = pd.Panel(), **traj.par.f_to_dict(short_names=True, fast_access=True)) traj.f_store() newtraj = load_trajectory(index=-1, filename=filename) newtraj.f_load(load_data=2) epg = newtraj.par.empty self.assertTrue(type(epg.tuple) is tuple) self.assertTrue(len(epg.tuple) == 0) self.assertTrue(type(epg.array) is np.ndarray) self.assertTrue(epg.array.size == 0) self.assertTrue(spsp.isspmatrix_csr(epg.csr)) self.assertTrue(epg.csr.size == 0) self.assertTrue(spsp.isspmatrix_csc(epg.csc)) self.assertTrue(epg.csc.size == 0) self.assertTrue(spsp.isspmatrix_bsr(epg.bsr)) self.assertTrue(epg.bsr.size == 0) self.assertTrue(spsp.isspmatrix_dia(epg.dia)) self.assertTrue(epg.dia.size == 0) self.compare_trajectories(traj, newtraj)
def test_compacting(self): filename = make_temp_dir("hdf5compacting.hdf5") traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name traj.v_storage_service.complevel = 7 first_row = {"ha": compat.tobytes("hi"), "haha": np.zeros((3, 3))} traj.f_store(only_init=True) res1 = traj.f_add_result("My.Tree.Will.Be.Deleted", 42) res2 = traj.f_add_result("Mine.Too.HomeBoy", 42, comment="Don`t cry for me!") res = traj.f_add_result(SharedResult, "myres") res["myres"] = SharedTable() res["myres"].create_shared_data(first_row=first_row) with StorageContextManager(traj): tab = traj.myres for irun in range(10000): row = traj.myres.row for key in first_row: row[key] = first_row[key] row.append() traj.f_store() del traj traj = load_trajectory(name=trajname, filename=filename, load_all=2) with StorageContextManager(traj) as cm: tb = traj.myres.get_data_node() ptcompat.remove_rows(tb, 1000, 10000) cm.f_flush_store() self.assertTrue(traj.myres.nrows == 1001) traj.f_delete_item(traj.My, recursive=True) traj.f_delete_item(traj.Mine, recursive=True) size = os.path.getsize(filename) get_root_logger().info("Filesize is %s" % str(size)) name_wo_ext, ext = os.path.splitext(filename) backup_file_name = name_wo_ext + "_backup" + ext code = compact_hdf5_file(filename, keep_backup=True) if code != 0: raise RuntimeError("ptrepack fail") backup_size = os.path.getsize(backup_file_name) self.assertTrue(backup_size == size) new_size = os.path.getsize(filename) get_root_logger().info("New filesize is %s" % str(new_size)) self.assertTrue(new_size < size, "%s > %s" % (str(new_size), str(size)))
def test_storing_and_loading_groups(self): filename = make_temp_dir('grpgrp.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) res = traj.f_add_result('aaa.bbb.ccc.iii', 42, 43, comment=7777 * '6') traj.ccc.v_annotations['gg'] = 4 res = traj.f_add_result('aaa.ddd.eee.jjj', 42, 43, comment=777 * '6') traj.ccc.v_annotations['j'] = 'osajdsojds' traj.f_store(only_init=True) traj.f_store_item('aaa', recursive=True) newtraj = load_trajectory(traj.v_name, filename=filename, load_all=2) self.compare_trajectories(traj, newtraj) traj.iii.f_set(55) self.assertFalse(results_equal(traj.iii, newtraj.iii)) traj.aaa.f_store(recursive=True, store_data=3) newtraj.bbb.f_load(recursive=True, load_data=3) self.compare_trajectories(traj, newtraj) traj.ccc.v_annotations['gg'] = 5 traj.f_load(load_data=3) self.assertTrue(traj.ccc.v_annotations['gg'] == 4) traj.ccc.v_annotations['gg'] = 5 traj.f_store(store_data=3) newtraj.f_load(load_data=2) self.assertTrue(newtraj.ccc.v_annotations['gg'] == 4) newtraj.f_load(load_data=3) self.assertTrue(newtraj.ccc.v_annotations['gg'] == 5) traj.ccc.f_add_link('link', res) traj.f_store_item(traj.ccc, store_data=3, with_links=False) newtraj.f_load(load_data=3) self.assertTrue('link' not in newtraj.ccc) traj.f_store_item(traj.ccc, store_data=3, with_links=True, recursive=True) newtraj.f_load_item(newtraj.ccc, with_links=False, recursive=True) self.assertTrue('link' not in newtraj.ccc) newtraj.f_load_item(newtraj.ccc, recursive=True) self.assertTrue('link' in newtraj.ccc)
def test_partially_delete_stuff(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5')) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() self.assertTrue('a' in res) traj.f_delete_item(res, delete_only=['a'], remove_from_item=True) self.assertTrue('c' in res) self.assertTrue('a' not in res) res['a'] = 'offf' self.assertTrue('a' in res) traj.f_load(load_results=3) self.assertTrue('a' not in res) self.assertTrue('c' in res) traj.f_delete_item(res, remove_from_trajectory=True) self.assertTrue('results' in traj) self.assertTrue(res not in traj)
def test_overwrite_stuff(self): traj = Trajectory(name='TestOverwrite', filename=make_temp_dir('testowrite.hdf5')) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() res['a'] = np.array([1,2,3]) res['c'] = 123445 traj.f_store_item(res, overwrite='a', complevel=4) # Should emit a warning traj.f_store_item(res, overwrite=['a', 'b']) traj.f_load(load_results=3) res = traj.test self.assertTrue((res['a']==np.array([1,2,3])).all()) self.assertTrue(res['c']=='d') res['c'] = 123445 traj.f_store_item(res, store_data=3) res.f_empty() traj.f_load(load_results=3) self.assertTrue(traj.test['c']==123445)
def test_delete_links(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_add_link('x.y', res) traj.f_add_link('x.g.h', res) traj.f_store() traj.f_remove_child('x', recursive=True) traj.f_load() self.assertEqual(traj.x.y.a, traj.test.a) self.assertEqual(traj.x.g.h.a, traj.test.a) traj.f_delete_link('x.y', remove_from_trajectory=True) traj.f_delete_link((traj.x.g, 'h'), remove_from_trajectory=True) traj.f_load() with self.assertRaises(AttributeError): traj.x.g.h
def test_partial_loading(self): traj = Trajectory(name='TestPartial', filename=make_temp_dir('testpartially.hdf5')) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() traj.f_load_item(traj.test, load_only=['a', 'x']) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() load_except= ['c', 'd'] traj.f_load_item(traj.test, load_except=load_except) self.assertTrue(len(load_except)==2) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) with self.assertRaises(ValueError): traj.f_load_item(traj.test, load_except=['x'], load_only=['y'])
def test_partial_loading(self): traj = Trajectory(name='TestPartial', filename=make_temp_dir('testpartially.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() traj.f_load_item(traj.test, load_only=['a', 'x']) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() load_except= ['c', 'd'] traj.f_load_item(traj.test, load_except=load_except) self.assertTrue(len(load_except)==2) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) with self.assertRaises(ValueError): traj.f_load_item(traj.test, load_except=['x'], load_only=['y'])
def test_partially_delete_stuff(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() self.assertTrue('a' in res) traj.f_delete_item(res, delete_only=['a'], remove_from_item=True) self.assertTrue('c' in res) self.assertTrue('a' not in res) res['a'] = 'offf' self.assertTrue('a' in res) traj.f_load(load_results=3) self.assertTrue('a' not in res) self.assertTrue('c' in res) traj.f_delete_item(res, remove_from_trajectory=True) self.assertTrue('results' in traj) self.assertTrue(res not in traj)
def test_storage_and_loading(self): filename = make_temp_dir('linktest.hdf5') traj = Trajectory(filename=filename) traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') res = traj.f_add_result('kk', 42) traj.par.f_add_link('gg', res) traj.f_add_link('hh', res) traj.f_add_link('jj', traj.par) traj.f_add_link('ii', res) traj.test.f_add_link('circle1', traj.test2) traj.test2.f_add_link('circle2', traj.test) traj.test.f_add_link('circle2', traj.test.circle1.circle2) traj.f_add_parameter_group('test.ab.bc.cd') traj.cd.f_add_link(traj.test) traj.test.f_add_link(traj.cd) traj.f_store() traj2 = Trajectory(filename=filename) traj2.f_load(name=traj.v_name, load_data=2) self.assertTrue(traj.kk == traj2.gg, '%s != %s' % (traj.kk, traj2.gg)) self.assertTrue(traj.cd.test is traj.test) self.assertTrue(len(traj._linked_by), len(traj2._linked_by)) self.compare_trajectories(traj, traj2) self.assertTrue('jj' in traj2._nn_interface._links_count) traj2.f_remove_child('jj') self.assertTrue('jj' not in traj2._nn_interface._links_count) traj2.f_remove_child('hh') traj2.f_remove_child('ii') traj2.f_remove_child('parameters', recursive=True) traj2.v_auto_load = True group = traj2.par.test2.circle2 self.assertTrue(group is traj2.test) retest = traj2.test.circle1 self.assertTrue(retest is traj2.test2) self.assertTrue(traj2.test.circle2 is traj2.test) self.assertTrue(traj2.hh == traj2.res.kk) traj2.v_auto_load = False traj2.f_load_child('jj') self.assertTrue(traj2.jj is traj2.par) traj2.f_load(load_data=2) self.assertTrue(traj2.ii == traj2.res.kk)
def test_delete_links(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_add_link('x.y', res) traj.f_add_link('x.g.h', res) traj.f_store() traj.f_remove_child('x', recursive=True) traj.f_load() self.assertEqual(traj.x.y.a, traj.test.a) self.assertEqual(traj.x.g.h.a, traj.test.a) traj.f_delete_link('x.y', remove_from_trajectory=True) traj.f_delete_link((traj.x.g, 'h'), remove_from_trajectory=True) traj.f_load() with self.assertRaises(AttributeError): traj.x.g.h
def test_overwrite_stuff(self): traj = Trajectory(name='TestOverwrite', filename=make_temp_dir('testowrite.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() res['a'] = np.array([1,2,3]) res['c'] = 123445 traj.f_store_item(res, overwrite='a', complevel=4) # Should emit a warning traj.f_store_item(res, overwrite=['a', 'b']) traj.f_load(load_results=3) res = traj.test self.assertTrue((res['a']==np.array([1,2,3])).all()) self.assertTrue(res['c']=='d') res['c'] = 123445 traj.f_store_item(res, store_data=3) res.f_empty() traj.f_load(load_results=3) self.assertTrue(traj.test['c']==123445)
def test_store_overly_long_comment(self): filename = make_temp_dir('remove_errored.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) res=traj.f_add_result('iii', 42, 43, comment=7777 * '6') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_child('results', recursive=True) self.assertTrue(traj.iii.v_comment == 7777 * '6')
def test_store_overly_long_comment(self): filename = make_temp_dir('remove_errored.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) res=traj.f_add_result('iii', 42, 43, comment=7777 * '6') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_child('results', recursive=True) self.assertTrue(traj.iii.v_comment == 7777 * '6')
def test_storing_and_loading_groups(self): filename = make_temp_dir('grpgrp.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) res=traj.f_add_result('aaa.bbb.ccc.iii', 42, 43, comment=7777 * '6') traj.ccc.v_annotations['gg']=4 res=traj.f_add_result('aaa.ddd.eee.jjj', 42, 43, comment=777 * '6') traj.ccc.v_annotations['j'] = 'osajdsojds' traj.f_store(only_init=True) traj.f_store_item('aaa', recursive=True) newtraj = load_trajectory(traj.v_name, filename=filename, load_all=2) self.compare_trajectories(traj, newtraj) traj.iii.f_set(55) self.assertFalse(results_equal(traj.iii, newtraj.iii)) traj.aaa.f_store(recursive=True, store_data=3) newtraj.bbb.f_load(recursive=True, load_data=3) self.compare_trajectories(traj, newtraj) traj.ccc.v_annotations['gg'] = 5 traj.f_load(load_data=3) self.assertTrue(traj.ccc.v_annotations['gg'] == 4) traj.ccc.v_annotations['gg'] = 5 traj.f_store(store_data=3) newtraj.f_load(load_data=2) self.assertTrue(newtraj.ccc.v_annotations['gg'] == 4) newtraj.f_load(load_data=3) self.assertTrue(newtraj.ccc.v_annotations['gg'] == 5) traj.ccc.f_add_link('link', res) traj.f_store_item(traj.ccc, store_data=3, with_links=False) newtraj.f_load(load_data=3) self.assertTrue('link' not in newtraj.ccc) traj.f_store_item(traj.ccc, store_data=3, with_links=True, recursive=True) newtraj.f_load_item(newtraj.ccc, with_links=False, recursive=True) self.assertTrue('link' not in newtraj.ccc) newtraj.f_load_item(newtraj.ccc, recursive=True) self.assertTrue('link' in newtraj.ccc)
def test_all_arrays(self): filename = make_temp_dir("hdf5arrays.hdf5") traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name npearray = np.ones((2, 10, 3), dtype=np.float) thevlarray = np.array([compat.tobytes("j"), 22.2, compat.tobytes("gutter")]) traj.f_store(only_init=True) res = traj.f_add_result(SharedResult, "arrays") res["carray"] = SharedCArray() res["carray"].create_shared_data(shape=(10, 10), atom=pt.atom.FloatAtom()) res["earray"] = SharedEArray() res["earray"].create_shared_data(obj=npearray) res["vlarray"] = SharedVLArray() res["vlarray"].create_shared_data(obj=thevlarray) res["array"] = SharedArray() res["array"].create_shared_data(data=npearray) traj.f_store() traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) toappned = [44, compat.tobytes("k")] with StorageContextManager(traj) as cm: a1 = traj.arrays.array a1[0, 0, 0] = 4.0 a2 = traj.arrays.carray a2[0, 1] = 4 a4 = traj.arrays.vlarray a4.append(toappned) a3 = traj.arrays.earray a3.append(np.zeros((1, 10, 3))) # cm.f_flush_storage() traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) with StorageContextManager(traj) as cm: a1 = traj.arrays.array self.assertTrue(a1[0, 0, 0] == 4.0) a2 = traj.arrays.carray self.assertTrue(a2[0, 1] == 4) a3 = traj.arrays.earray self.assertTrue(a3.read().shape == (3, 10, 3)) a4 = traj.arrays.vlarray for idx, x in enumerate(a4): if idx == 0: self.assertTrue(np.all(x == np.array(thevlarray))) elif idx == 1: self.assertTrue(np.all(x == np.array(toappned))) else: raise RuntimeError()
def test_storing_and_manipulating(self): filename = make_temp_dir("hdf5manipulation.hdf5") traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name thedata = np.zeros((1000, 1000)) res = traj.f_add_result(SharedResult, "shared") myarray = SharedArray("array", res, trajectory=traj, add_to_parent=True) mytable = SharedTable("t1", res, trajectory=traj, add_to_parent=True) mytable2 = SharedTable("t2", res, trajectory=traj, add_to_parent=True) mytable3 = SharedTable("t3", res, trajectory=traj, add_to_parent=True) traj.f_store(only_init=True) myarray.create_shared_data(data=thedata) mytable.create_shared_data(first_row={"hi": compat.tobytes("hi"), "huhu": np.ones(3)}) mytable2.create_shared_data(description={"ha": pt.StringCol(2, pos=0), "haha": pt.FloatCol(pos=1)}) mytable3.create_shared_data(description={"ha": pt.StringCol(2, pos=0), "haha": pt.FloatCol(pos=1)}) traj.f_store() newrow = {"ha": "hu", "haha": 4.0} with self.assertRaises(TypeError): row = traj.shared.t2.row with StorageContextManager(traj) as cm: row = traj.shared.t2.row for irun in range(11): for key, val in newrow.items(): row[key] = val row.append() traj.shared.t3.flush() data = myarray.read() arr = myarray.get_data_node() self.assertTrue(np.all(data == thedata)) with StorageContextManager(traj) as cm: myarray[2, 2] = 10 data = myarray.read() self.assertTrue(data[2, 2] == 10) self.assertTrue(data[2, 2] == 10) self.assertFalse(traj.v_storage_service.is_open) traj = load_trajectory(name=trajname, filename=filename) traj.f_load(load_data=2) traj.shared.t2.traj = traj traj.shared.t1.traj = traj traj.shared.array.traj = traj self.assertTrue(traj.shared.t2.nrows == 11, "%s != 11" % str(traj.shared.t2.nrows)) self.assertTrue(traj.shared.t2[0]["ha"] == compat.tobytes("hu"), traj.shared.t2[0]["ha"]) self.assertTrue(traj.shared.t2[1]["ha"] == compat.tobytes("hu"), traj.shared.t2[1]["ha"]) self.assertTrue("huhu" in traj.shared.t1.colnames) self.assertTrue(traj.shared.array[2, 2] == 10)
def test_migrations(self): traj = Trajectory(name='Testmigrate', filename=make_temp_dir('migrate.hdf5')) traj.f_add_result('I.am.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() new_file = make_temp_dir('migrate2.hdf5') traj.f_migrate(filename=new_file) traj.f_store() new_traj = Trajectory() new_traj.f_migrate(new_name=traj.v_name, filename=new_file, in_store=True) new_traj.v_auto_load=True self.assertTrue(new_traj.results.I.am.a.mean.resu == 42)
def test_df(self): filename = make_temp_dir('hdf5errors.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) traj.f_store() dadict = { 'hi': [1, 2, 3, 4, 5], 'shu': ['bi', 'du', 'da', 'ha', 'hui'] } dadict2 = {'answer': [42]} traj.f_add_result( SharedResult, 'dfs.df', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict)) traj.f_add_result( SharedResult, 'dfs.df1', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict2)) traj.f_add_result(SharedResult, 'dfs.df3', SharedPandasFrame()) for irun in range(10): traj.df3.append(traj.df1.read()) dframe = traj.df3.read() self.assertTrue(len(dframe) == 10) what = traj.df.select(where='index == 2') self.assertTrue(len(what) == 1)
def test_errors(self): filename = make_temp_dir('hdf5errors.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) npearray = np.ones((2, 10, 3), dtype=np.float) thevlarray = np.array( [compat.tobytes('j'), 22.2, compat.tobytes('gutter')]) with self.assertRaises(TypeError): traj.f_add_result( SharedResult, 'arrays.vlarray', SharedVLArray()).create_shared_data(obj=thevlarray) traj.f_store() traj.arrays.vlarray.create_shared_data(obj=thevlarray) traj.f_add_result(SharedResult, 'arrays.array', SharedArray()).create_shared_data(data=npearray) traj.arrays.f_add_result(SharedResult, 'super.carray', SharedCArray(), comment='carray').create_shared_data( shape=(10, 10), atom=pt.atom.FloatAtom()) traj.arrays.f_add_result(SharedResult, 'earray', SharedEArray()).create_shared_data( 'earray', obj=npearray) traj.f_store() with self.assertRaises(TypeError): traj.arrays.array.iterrows() with StorageContextManager(traj): with self.assertRaises(RuntimeError): with StorageContextManager(traj): pass self.assertTrue(traj.v_storage_service.is_open) with self.assertRaises(RuntimeError): StorageContextManager(traj).open_store() self.assertFalse(traj.v_storage_service.is_open)
def test_wildcard_search(self): traj = Trajectory(name='Testwildcard', filename=make_temp_dir('wilcard.hdf5'), add_time=True) traj.f_add_parameter('expl', 2) traj.f_explore({'expl': [1, 2, 3, 4]}) traj.f_add_result('wc2test.$.hhh', 333) traj.f_add_leaf('results.wctest.run_00000000.jjj', 42) traj.f_add_result('results.wctest.run_00000001.jjj', 43) traj.f_add_result('results.wctest.%s.jjj' % traj.f_wildcard('$', -1), 43) traj.v_crun = 1 self.assertTrue(traj.results.wctest['$'].jjj == 43) self.assertTrue(traj.results.wc2test.crun.hhh == 333) traj.f_store() get_root_logger().info('Removing child1') traj.f_remove_child('results', recursive=True) get_root_logger().info('Doing auto-load') traj.v_auto_load = True self.assertTrue(traj.results.wctest['$'].jjj == 43) self.assertTrue(traj.results.wc2test.crun.hhh == 333) get_root_logger().info('Removing child2') traj.f_remove_child('results', recursive=True) get_root_logger().info('auto-loading') traj.v_auto_load = True self.assertTrue(traj.results.wctest[-1].jjj == 43) self.assertTrue(traj.results.wc2test[-1].hhh == 333) get_root_logger().info('Removing child3') traj.f_remove_child('results', recursive=True) get_root_logger().info('auto-loading') traj.v_auto_load = True self.assertTrue(traj.results.wctest[1].jjj == 43) self.assertTrue(traj.results.wc2test[-1].hhh == 333) get_root_logger().info('Done with wildcard test')
def test_find_in_all_runs_with_links(self): traj = Trajectory() traj.f_add_parameter('FloatParam') traj.par.FloatParam = 4.0 self.explore_dict = {'FloatParam': [1.0, 1.1, 1.2, 1.3]} traj.f_explore(self.explore_dict) self.assertTrue(len(traj) == 4) traj.f_add_result('results.runs.run_00000000.sub.resulttest', 42) traj.f_add_result('results.runs.run_00000001.sub.resulttest', 43) traj.f_add_result('results.runs.run_00000002.sub.resulttest', 44) traj.f_add_result('results.runs.run_00000002.sub.resulttest2', 42) traj.f_add_result('results.runs.run_00000003.sub.resulttest2', 43) traj.f_add_derived_parameter( 'derived_parameters.runs.run_00000002.testing', 44) res_dict = traj.f_get_from_runs('resulttest', fast_access=True) self.assertTrue(len(res_dict) == 3) self.assertTrue(res_dict['run_00000001'] == 43) self.assertTrue('run_00000003' not in res_dict) res_dict = traj.f_get_from_runs(name='sub.resulttest2', use_indices=True) self.assertTrue(len(res_dict) == 2) self.assertTrue(res_dict[3] is traj.f_get('run_00000003.resulttest2')) self.assertTrue(1 not in res_dict) traj.res.runs.r_0.f_add_link('resulttest2', traj.r_1.f_get('resulttest')) res_dict = traj.f_get_from_runs(name='resulttest2', use_indices=True) self.assertTrue(len(res_dict) == 3) self.assertTrue(res_dict[0] is traj.f_get('run_00000001.resulttest')) self.assertTrue(1 not in res_dict) res_dict = traj.f_get_from_runs(name='resulttest2', use_indices=True, with_links=False) self.assertTrue(len(res_dict) == 2) self.assertTrue(0 not in res_dict) self.assertTrue(1 not in res_dict)
def test_wildcard_search(self): traj = Trajectory(name='Testwildcard', filename=make_temp_dir('wilcard.hdf5'), add_time=True) traj.f_add_parameter('expl', 2) traj.f_explore({'expl':[1,2,3,4]}) traj.f_add_result('wc2test.$.hhh', 333) traj.f_add_leaf('results.wctest.run_00000000.jjj', 42) traj.f_add_result('results.wctest.run_00000001.jjj', 43) traj.f_add_result('results.wctest.%s.jjj' % traj.f_wildcard('$', -1), 43) traj.v_crun = 1 self.assertTrue(traj.results.wctest['$'].jjj==43) self.assertTrue(traj.results.wc2test.crun.hhh==333) traj.f_store() get_root_logger().info('Removing child1') traj.f_remove_child('results', recursive=True) get_root_logger().info('Doing auto-load') traj.v_auto_load = True self.assertTrue(traj.results.wctest['$'].jjj==43) self.assertTrue(traj.results.wc2test.crun.hhh==333) get_root_logger().info('Removing child2') traj.f_remove_child('results', recursive=True) get_root_logger().info('auto-loading') traj.v_auto_load = True self.assertTrue(traj.results.wctest[-1].jjj==43) self.assertTrue(traj.results.wc2test[-1].hhh==333) get_root_logger().info('Removing child3') traj.f_remove_child('results', recursive=True) get_root_logger().info('auto-loading') traj.v_auto_load = True self.assertTrue(traj.results.wctest[1].jjj==43) self.assertTrue(traj.results.wc2test[-1].hhh==333) get_root_logger().info('Done with wildcard test')
def test_link_deletion(self): filename = make_temp_dir('linktest2.hdf5') traj = Trajectory(filename=filename) traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') res = traj.f_add_result('kk', 42) traj.par.f_add_link('gg', res) traj.test.f_add_link('circle1', traj.test2) traj.test2.f_add_link('circle2', traj.test) traj.f_store() traj.f_delete_link('par.gg') traj2 = Trajectory(filename=filename) traj2.f_load(name=traj.v_name, load_data=2) with self.assertRaises(AttributeError): traj2.gg
def test_link_deletion(self): filename = make_temp_dir('linktest2.hdf5') traj = Trajectory(filename=filename) traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') res= traj.f_add_result('kk', 42) traj.par.f_add_link('gg', res) traj.test.f_add_link('circle1' , traj.test2) traj.test2.f_add_link('circle2' , traj.test) traj.f_store() traj.f_delete_link('par.gg') traj2 = Trajectory(filename=filename) traj2.f_load(name=traj.v_name, load_data=2) with self.assertRaises(AttributeError): traj2.gg
def test_find_in_all_runs_with_links(self): traj = Trajectory() traj.f_add_parameter('FloatParam') traj.par.FloatParam=4.0 self.explore_dict = {'FloatParam':[1.0,1.1,1.2,1.3]} traj.f_explore(self.explore_dict) self.assertTrue(len(traj) == 4) traj.f_add_result('results.runs.run_00000000.sub.resulttest', 42) traj.f_add_result('results.runs.run_00000001.sub.resulttest', 43) traj.f_add_result('results.runs.run_00000002.sub.resulttest', 44) traj.f_add_result('results.runs.run_00000002.sub.resulttest2', 42) traj.f_add_result('results.runs.run_00000003.sub.resulttest2', 43) traj.f_add_derived_parameter('derived_parameters.runs.run_00000002.testing', 44) res_dict = traj.f_get_from_runs('resulttest', fast_access=True) self.assertTrue(len(res_dict)==3) self.assertTrue(res_dict['run_00000001']==43) self.assertTrue('run_00000003' not in res_dict) res_dict = traj.f_get_from_runs(name='sub.resulttest2', use_indices=True) self.assertTrue(len(res_dict)==2) self.assertTrue(res_dict[3] is traj.f_get('run_00000003.resulttest2')) self.assertTrue(1 not in res_dict) traj.res.runs.r_0.f_add_link('resulttest2', traj.r_1.f_get('resulttest')) res_dict = traj.f_get_from_runs(name='resulttest2', use_indices=True) self.assertTrue(len(res_dict)==3) self.assertTrue(res_dict[0] is traj.f_get('run_00000001.resulttest')) self.assertTrue(1 not in res_dict) res_dict = traj.f_get_from_runs(name='resulttest2', use_indices=True, with_links=False) self.assertTrue(len(res_dict)==2) self.assertTrue(0 not in res_dict) self.assertTrue(1 not in res_dict)
def test_df(self): filename = make_temp_dir('hdf5errors.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) traj.f_store() dadict = {'hi': [1, 2, 3, 4, 5], 'shu': ['bi', 'du', 'da', 'ha', 'hui']} dadict2 = {'answer': [42]} traj.f_add_result(SharedResult, 'dfs.df', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict)) traj.f_add_result(SharedResult, 'dfs.df1', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict2)) traj.f_add_result(SharedResult, 'dfs.df3', SharedPandasFrame()) for irun in range(10): traj.df3.append(traj.df1.read()) dframe = traj.df3.read() self.assertTrue(len(dframe) == 10) what = traj.df.select(where='index == 2') self.assertTrue(len(what) == 1)
def test_df(self): filename = make_temp_dir("hdf5errors.hdf5") traj = Trajectory(name=make_trajectory_name(self), filename=filename) traj.f_store() dadict = {"hi": [1, 2, 3, 4, 5], "shu": ["bi", "du", "da", "ha", "hui"]} dadict2 = {"answer": [42]} traj.f_add_result(SharedResult, "dfs.df", SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict)) traj.f_add_result(SharedResult, "dfs.df1", SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict2)) traj.f_add_result(SharedResult, "dfs.df3", SharedPandasFrame()) for irun in range(10): traj.df3.append(traj.df1.read()) dframe = traj.df3.read() self.assertTrue(len(dframe) == 10) what = traj.df.select(where="index == 2") self.assertTrue(len(what) == 1)
def test_conversions(self): filename = make_temp_dir("hdf5manipulation.hdf5") traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name traj.v_standard_result = SharedResult traj.f_store(only_init=True) traj.f_add_result("shared_data") thedata = np.zeros((1000, 1000)) myarray = SharedArray("array", traj.shared_data, trajectory=traj) traj.shared_data["array"] = myarray mytable = SharedTable("t1", traj.shared_data, trajectory=traj) traj.shared_data["t1"] = mytable # mytable2 = SharedTableResult('h.t2', trajectory=traj) # mytable3 = SharedTableResult('jjj.t3', trajectory=traj) dadict = {"hi": [1, 2, 3, 4, 5], "shu": ["bi", "du", "da", "ha", "hui"]} dadict2 = {"answer": [42]} res = traj.f_add_result("shared.dfs") res["df"] = SharedPandasFrame() res["df"].create_shared_data(data=pd.DataFrame(dadict), trajectory=traj) frame = SharedPandasFrame("df1", traj.f_get("shared.dfs"), trajectory=traj) frame.create_shared_data(data=pd.DataFrame(dadict2)) res["df1"] = frame traj.f_add_result("mylist", [1, 2, 3]) traj.f_add_result("my.mytuple", k=(1, 2, 3), wa=42) traj.f_add_result("my.myarray", np.zeros((50, 50))) traj.f_add_result("my.myframe", data=pd.DataFrame(dadict2)) traj.f_add_result("my.mytable", ObjectTable(data=dadict2)) myarray.create_shared_data(data=thedata) mytable.create_shared_data(first_row={"hi": compat.tobytes("hi"), "huhu": np.ones(3)}) traj.f_store() data = myarray.read() arr = myarray.get_data_node() self.assertTrue(np.all(data == thedata)) with StorageContextManager(traj) as cm: myarray[2, 2] = 10 data = myarray.read() self.assertTrue(data[2, 2] == 10) self.assertTrue(data[2, 2] == 10) self.assertFalse(traj.v_storage_service.is_open) traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) make_ordinary_result(traj.shared_data, "array", trajectory=traj) array = traj.shared_data.array self.assertTrue(isinstance(array, np.ndarray)) thedata[2, 2] = 10 self.assertTrue(np.all(array == thedata)) make_ordinary_result(traj.shared_data, "t1", trajectory=traj) t1 = traj.shared_data.t1 self.assertTrue(isinstance(t1, ObjectTable)) # self.assertTrue(np.all(t1["huhu"][0] == np.ones(3))) dfs = traj.shared.dfs make_ordinary_result(traj.shared.dfs, "df", trajectory=traj) theframe = dfs.f_get("df") self.assertTrue(isinstance(dfs, Result)) self.assertTrue(isinstance(theframe, pd.DataFrame)) self.assertTrue(theframe["hi"][0] == 1) listres = traj.f_get("mylist") listres = make_shared_result(listres, 0, trajectory=traj) with StorageContextManager(traj) as cm: self.assertTrue(listres[0][2] == 3) listres[0][0] = 4 self.assertTrue(listres[0][0] == 4) listres = make_ordinary_result(listres, 0, trajectory=traj) traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) mylist = traj.mylist self.assertTrue(isinstance(listres, Result)) self.assertTrue(mylist[0] == 4) self.assertTrue(isinstance(mylist, list)) mytuple = traj.mytuple with self.assertRaises(AttributeError): mytuple = make_shared_result(mytuple, "mylist", traj, new_class=SharedArray) mytuple = make_shared_result(mytuple, "k", traj, new_class=SharedArray) self.assertTrue(mytuple.k[1] == 2) mytuple = make_ordinary_result(mytuple, "k", trajectory=traj) self.assertTrue(isinstance(mytuple.k, tuple)) self.assertTrue(mytuple.k[2] == 3) myframe = traj.myframe myframe = make_shared_result(myframe, "data", traj) theframe = myframe.data.read() self.assertTrue(theframe["answer"][0] == 42) myframe = make_ordinary_result(myframe, "data", trajectory=traj) traj.f_load_item(myframe) self.assertTrue(myframe.data["answer"][0] == 42) mytable = traj.f_get("mytable") mytable = make_shared_result(mytable, 0, traj) self.assertTrue(isinstance(mytable[0], SharedTable)) rows = mytable.mytable.read() self.assertTrue(rows[0][0] == 42) mytable = make_ordinary_result(mytable, 0, trajectory=traj) self.assertTrue(isinstance(mytable, Result)) self.assertTrue(mytable[0]["answer"][0] == 42)
class SharedArrayTest(TrajectoryComparator): tags = 'unittest', 'trajectory', 'shared', 'hdf5', 'array', 'mehmet' def setUp(self): self.filename = make_temp_dir('shared_table_test.hdf5') self.traj = Trajectory(name=make_trajectory_name(self), filename=self.filename) self.traj.v_standard_result = SharedResult self.traj.f_store(only_init=True) self.traj.f_add_result('shared_data') self.shared_array = SharedArray(name='array', parent=self.traj.shared_data, trajectory=self.traj, add_to_parent=True) def test_array_read(self): the_reading_array = np.ones((100, 100)) * 4 first_reading_array = self.traj.results.shared_data.array self.assertTrue(first_reading_array is self.shared_array) first_reading_array.create_shared_data(obj=the_reading_array) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_reading_array = traj2.shared_data.array.read() self.assertTrue(np.all(the_reading_array == second_reading_array), '%s != %s' % (str(the_reading_array), str(second_reading_array))) def test_array_getitem(self): the_getitem_array = np.array(range(100)) first_getitem_array = self.traj.results.shared_data.array first_getitem_array.create_shared_data(obj=the_getitem_array) for k in range(len(the_getitem_array)): self.assertEqual(the_getitem_array[k], first_getitem_array[k]) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) for j in range(len(the_getitem_array)): self.assertEqual(the_getitem_array[j], traj2.results.shared_data.array[j]) def test_array_getenum(self): the_getenum_array = np.array(range(100)) first_getenum_array = self.traj.results.shared_data.array first_getenum_array.create_shared_data(obj=the_getenum_array) with self.assertRaises(TypeError): first_getenum_array.get_enum() self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_enum_array = traj2.results.shared_data.array with self.assertRaises(TypeError): second_enum_array.get_enum() def test_array_iterrows(self): the_iterrows_array = np.random.randint(0, 100, (100, 100)) first_iterrows_array = self.traj.results.shared_data.array first_iterrows_array.create_shared_data(obj=the_iterrows_array) with StorageContextManager(self.traj): for idx, row in enumerate(first_iterrows_array.iterrows()): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_iterrows_array = traj2.results.shared_data.array with StorageContextManager(traj2): for idx, row in enumerate(second_iterrows_array.iterrows()): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) def test_array_setitem(self): the_setitem_array = np.zeros((50, 50)) first_setitem_array = self.traj.results.shared_data.array first_setitem_array.create_shared_data(obj=the_setitem_array) first_setitem_array[2, 2] = 10 self.assertEqual(first_setitem_array[2, 2], 10) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_setitem_array = traj2.results.shared_data.array self.assertEqual(second_setitem_array[2, 2], 10) second_setitem_array[3, 3] = 17 self.assertEqual(second_setitem_array[3, 3], 17) def test_array_iter(self): the_iterrows_array = np.random.randint(0, 100, (100, 100)) first_iterrows_array = self.traj.results.shared_data.array first_iterrows_array.create_shared_data(obj=the_iterrows_array) with StorageContextManager(self.traj): for idx, row in enumerate(first_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.assertTrue(np.all(the_iterrows_array == first_iterrows_array.read())) for idx, row in enumerate(the_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_iterrows_array = traj2.results.shared_data.array with StorageContextManager(traj2): for idx, row in enumerate(second_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.assertTrue(np.all(the_iterrows_array == second_iterrows_array.read())) for idx, row in enumerate(second_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) def test_array_len(self): the_len_array = np.ones((100, 100)) first_len_array = self.traj.results.shared_data.array self.assertTrue(first_len_array is self.shared_array) first_len_array.create_shared_data(obj=the_len_array) self.assertEqual(len(first_len_array), 100) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_len_array = traj2.results.shared_data.array self.assertEqual(len(second_len_array), 100)
# Now let's see what fast access is: print('The name of the actor playing Luke is %s.' % traj.luke_skywalker) # And now what happens if you forbid it traj.v_fast_access = False print('The object found for luke_skywalker is `%s`.' % str(traj.luke_skywalker)) #Let's store the trajectory: traj.f_store() # That was easy, let's assume we already completed a simulation and now we add a veeeery large # result that we want to store to disk immediately and than empty it traj.f_add_result('starwars.gross_income_of_film', amount=10.1**11, currency='$$$', comment='George Lucas is rich, dude!') # This is a large number, we better store it and than free the memory: traj.f_store_item('gross_income_of_film') traj.gross_income_of_film.f_empty() # Moreover, if you don't like prefixes `f_` and `v_` you can also use `func` and `vars`: traj.func.add_result('starwars.robots', c3p0='android', r2d2='beeep!', comment='Help me Obiwan!') print(traj.results.starwars.robots.vars.comment) # Now lets reload the trajectory del traj
def test_delete_whole_subtrees(self): filename = make_temp_dir('testdeltree.hdf5') traj = Trajectory(name='TestDelete', filename=filename, large_overview_tables=True, add_time=True) res = traj.f_add_result('mytest.yourtest.test', a='b', c='d') dpar = traj.f_add_derived_parameter('mmm.gr.dpdp', 666) res = traj.f_add_result('hhh.ll', a='b', c='d') res = traj.f_add_derived_parameter('hhh.gg', 555) traj.f_store() with pt.open_file(filename) as fh: daroot = fh.root._f_get_child(traj.v_name) dpar_table = daroot.overview.derived_parameters_overview self.assertTrue(len(dpar_table) == 2) res_table = daroot.overview.results_overview self.assertTrue((len(res_table)) == 2) with self.assertRaises(TypeError): traj.f_remove_item(traj.yourtest) with self.assertRaises(TypeError): traj.f_delete_item(traj.yourtest) traj.f_remove_item(traj.yourtest, recursive=True) self.assertTrue('mytest' in traj) self.assertTrue('yourtest' not in traj) traj.f_load(load_data=2) self.assertTrue('yourtest.test' in traj) traj.f_delete_item(traj.yourtest, recursive=True, remove_from_trajectory=True) traj.f_delete_item(traj.mmm, recursive=True, remove_from_trajectory=True) traj.f_load(load_data=2) self.assertTrue('yourtest.test' not in traj) self.assertTrue('yourtest' not in traj) with pt.open_file(filename) as fh: daroot = fh.root._f_get_child(traj.v_name) dpar_table = daroot.overview.derived_parameters_overview self.assertTrue(len(dpar_table) == 2) res_table = daroot.overview.results_overview self.assertTrue((len(res_table)) == 2) traj.f_add_parameter('ggg', 43) traj.f_add_parameter('hhh.mmm', 45) traj.f_add_parameter('jjj', 55) traj.f_add_parameter('hhh.nnn', 55555) traj.f_explore({'ggg':[1,2,3]}) traj.f_store() with pt.open_file(filename) as fh: daroot = fh.root._f_get_child(traj.v_name) par_table = daroot.overview.parameters_overview self.assertTrue(len(par_table) == 4) traj.f_delete_item('par.hhh', recursive=True, remove_from_trajectory=True) traj.f_add_parameter('saddsdfdsfd', 111) traj.f_store() with pt.open_file(filename) as fh: daroot = fh.root._f_get_child(traj.v_name) par_table = daroot.overview.parameters_overview self.assertTrue(len(par_table) == 5)
class SharedArrayTest(TrajectoryComparator): tags = 'unittest', 'trajectory', 'shared', 'hdf5', 'array', 'mehmet' def setUp(self): self.filename = make_temp_dir('shared_table_test.hdf5') self.traj = Trajectory(name=make_trajectory_name(self), filename=self.filename) self.traj.v_standard_result = SharedResult self.traj.f_store(only_init=True) self.traj.f_add_result('shared_data') self.shared_array = SharedArray(name='array', parent=self.traj.shared_data, trajectory=self.traj, add_to_parent=True) def test_array_read(self): the_reading_array = np.ones((100, 100)) * 4 first_reading_array = self.traj.results.shared_data.array self.assertTrue(first_reading_array is self.shared_array) first_reading_array.create_shared_data(obj=the_reading_array) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_reading_array = traj2.shared_data.array.read() self.assertTrue( np.all(the_reading_array == second_reading_array), '%s != %s' % (str(the_reading_array), str(second_reading_array))) def test_array_getitem(self): the_getitem_array = np.array(range(100)) first_getitem_array = self.traj.results.shared_data.array first_getitem_array.create_shared_data(obj=the_getitem_array) for k in range(len(the_getitem_array)): self.assertEqual(the_getitem_array[k], first_getitem_array[k]) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) for j in range(len(the_getitem_array)): self.assertEqual(the_getitem_array[j], traj2.results.shared_data.array[j]) def test_array_getenum(self): the_getenum_array = np.array(range(100)) first_getenum_array = self.traj.results.shared_data.array first_getenum_array.create_shared_data(obj=the_getenum_array) with self.assertRaises(TypeError): first_getenum_array.get_enum() self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_enum_array = traj2.results.shared_data.array with self.assertRaises(TypeError): second_enum_array.get_enum() def test_array_iterrows(self): the_iterrows_array = np.random.randint(0, 100, (100, 100)) first_iterrows_array = self.traj.results.shared_data.array first_iterrows_array.create_shared_data(obj=the_iterrows_array) with StorageContextManager(self.traj): for idx, row in enumerate(first_iterrows_array.iterrows()): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_iterrows_array = traj2.results.shared_data.array with StorageContextManager(traj2): for idx, row in enumerate(second_iterrows_array.iterrows()): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) def test_array_setitem(self): the_setitem_array = np.zeros((50, 50)) first_setitem_array = self.traj.results.shared_data.array first_setitem_array.create_shared_data(obj=the_setitem_array) first_setitem_array[2, 2] = 10 self.assertEqual(first_setitem_array[2, 2], 10) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_setitem_array = traj2.results.shared_data.array self.assertEqual(second_setitem_array[2, 2], 10) second_setitem_array[3, 3] = 17 self.assertEqual(second_setitem_array[3, 3], 17) def test_array_iter(self): the_iterrows_array = np.random.randint(0, 100, (100, 100)) first_iterrows_array = self.traj.results.shared_data.array first_iterrows_array.create_shared_data(obj=the_iterrows_array) with StorageContextManager(self.traj): for idx, row in enumerate(first_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.assertTrue( np.all(the_iterrows_array == first_iterrows_array.read())) for idx, row in enumerate(the_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_iterrows_array = traj2.results.shared_data.array with StorageContextManager(traj2): for idx, row in enumerate(second_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) self.assertTrue( np.all(the_iterrows_array == second_iterrows_array.read())) for idx, row in enumerate(second_iterrows_array): self.assertTrue(np.all(row == the_iterrows_array[idx, :])) def test_array_len(self): the_len_array = np.ones((100, 100)) first_len_array = self.traj.results.shared_data.array self.assertTrue(first_len_array is self.shared_array) first_len_array.create_shared_data(obj=the_len_array) self.assertEqual(len(first_len_array), 100) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_len_array = traj2.results.shared_data.array self.assertEqual(len(second_len_array), 100)
def test_storage_and_loading(self): filename = make_temp_dir('linktest.hdf5') traj = Trajectory(filename=filename) traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') res= traj.f_add_result('kk', 42) traj.par.f_add_link('gg', res) traj.f_add_link('hh', res) traj.f_add_link('jj', traj.par) traj.f_add_link('ii', res) traj.test.f_add_link('circle1' , traj.test2) traj.test2.f_add_link('circle2' , traj.test) traj.test.f_add_link('circle2' , traj.test.circle1.circle2) traj.f_add_parameter_group('test.ab.bc.cd') traj.cd.f_add_link(traj.test) traj.test.f_add_link(traj.cd) traj.f_store() traj2 = Trajectory(filename=filename) traj2.f_load(name=traj.v_name, load_data=2) self.assertTrue(traj.kk == traj2.gg, '%s != %s' % (traj.kk, traj2.gg)) self.assertTrue(traj.cd.test is traj.test) self.assertTrue(len(traj._linked_by), len(traj2._linked_by)) self.compare_trajectories(traj, traj2) self.assertTrue('jj' in traj2._nn_interface._links_count) traj2.f_remove_child('jj') self.assertTrue('jj' not in traj2._nn_interface._links_count) traj2.f_remove_child('hh') traj2.f_remove_child('ii') traj2.f_remove_child('parameters', recursive=True) traj2.v_auto_load = True group = traj2.par.test2.circle2 self.assertTrue(group is traj2.test) retest = traj2.test.circle1 self.assertTrue(retest is traj2.test2) self.assertTrue(traj2.test.circle2 is traj2.test) self.assertTrue(traj2.hh == traj2.res.kk) traj2.v_auto_load = False traj2.f_load_child('jj') self.assertTrue(traj2.jj is traj2.par) traj2.f_load(load_data=2) self.assertTrue(traj2.ii == traj2.res.kk)
class SharedTableTest(TrajectoryComparator): tags = 'unittest', 'trajectory', 'shared', 'hdf5', 'table', 'mehmet' def setUp(self): self.filename = make_temp_dir('shared_table_test.hdf5') self.traj = Trajectory(name=make_trajectory_name(self), filename=self.filename) self.traj.v_standard_result = SharedResult self.traj.f_store(only_init=True) self.traj.f_add_result('shared_data') self.shared_table = SharedTable(name='table', parent=self.traj.shared_data, trajectory=self.traj, add_to_parent=True) def test_table_read(self): the_reading_table = self.traj.results.shared_data.table self.assertTrue(the_reading_table is self.shared_table) the_reading_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_reading_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_reading_table.flush() for idx, row in enumerate(the_reading_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_reading_table = traj2.results.shared_data.table self.assertTrue(np.all(the_reading_table.read() == second_reading_table.read())) second_reading_table.append([(21, 'aaa', 'bbb', 100)]) self.assertTrue(np.all(the_reading_table.read() == second_reading_table.read())) traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_reading_table = traj3.results.shared_data.table self.assertTrue(np.all(the_reading_table.read() == third_reading_table.read())) def test_table_append(self): the_append_table = self.traj.results.shared_data.table self.assertTrue(the_append_table is self.shared_table) the_append_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_append_table.row for i in range(15): row['id'] = i * 2 row['name'] = 'name %d' % i row['surname'] = '%d surname' % i row['weight'] = (i*0.5 + 50.0) row.append() the_append_table.flush() for idx, row in enumerate(the_append_table.iterrows()): self.assertEqual(row['id'], idx * 2) self.assertEqual(row['name'], compat.tobytes('name %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('%d surname' % idx)) self.assertEqual(row['weight'], idx*0.5+50.0) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_append_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_append_table.iterrows()): self.assertEqual(row['id'], idx * 2) self.assertEqual(row['name'], compat.tobytes('name %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('%d surname' % idx)) self.assertEqual(row['weight'], idx*0.5+50.0) second_append_table.append([(30, 'mehmet', 'timur', 65.5)]) self.assertEqual(second_append_table.read(field='id')[-1], 30) self.assertEqual(second_append_table.read(field='name')[-1], compat.tobytes('mehmet')) self.assertEqual(second_append_table.read(field='surname')[-1], compat.tobytes('timur')) self.assertEqual(second_append_table.read(field='weight')[-1], 65.5) traj2.f_store() traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_append_table = traj3.results.shared_data.table self.assertEqual((third_append_table.read(field='id')[-1]), 30) self.assertEqual((third_append_table.read(field='name')[-1]), compat.tobytes('mehmet')) self.assertEqual((third_append_table.read(field='surname')[-1]), compat.tobytes('timur')) self.assertEqual((third_append_table.read(field='weight')[-1]), 65.5) third_append_table.append([(33, 'Harrison', 'Ford', 95.5)]) self.assertEqual((third_append_table.read(field='id')[-1]), 33) self.assertEqual((third_append_table.read(field='name')[-1]), compat.tobytes('Harrison')) self.assertEqual((third_append_table.read(field='surname')[-1]), compat.tobytes('Ford')) self.assertEqual((third_append_table.read(field='weight')[-1]), 95.5) def test_table_iterrows(self): the_iterrows_table = self.traj.results.shared_data.table self.assertTrue(the_iterrows_table is self.shared_table) the_iterrows_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_iterrows_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_iterrows_table.flush() for idx, row in enumerate(the_iterrows_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_iterrows_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_iterrows_table.iterrows()): self.assertEqual(row['id'], idx) def test_table_col(self): the_col_table = self.traj.results.shared_data.table self.assertTrue(the_col_table is self.shared_table) the_col_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_col_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_col_table.flush() for idx, row in enumerate(the_col_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_col_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_col_table.iterrows()): self.assertEqual(row['id'], idx) self.assertTrue(np.all(second_col_table.read(field='id') == second_col_table.col('id'))) self.assertTrue(np.all(second_col_table.read(field='name') == second_col_table.col('name'))) self.assertTrue(np.all(second_col_table.read(field='surname') == second_col_table.col('surname'))) self.assertTrue(np.all(second_col_table.read(field='weight') == second_col_table.col('weight'))) # def test_table_itersequence(self): # pass # # def test_table_itersorted(self): # pass # # def test_table_read_coordinates(self): # pass # # def test_table_read_sorted(self): # pass def test_table_getitem(self): the_getitem_table = self.traj.results.shared_data.table self.assertTrue(the_getitem_table is self.shared_table) the_getitem_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_getitem_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_getitem_table.flush() for idx, row in enumerate(the_getitem_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_getitem_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_getitem_table.iterrows()): self.assertTrue(np.all(second_getitem_table.read()[idx] == second_getitem_table[idx])) second_getitem_table.append([(30, 'mehmet nevvaf', 'timur', 65.5)]) for idx, row in enumerate(second_getitem_table.iterrows(-1)): self.assertEqual(row['id'], 30) self.assertEqual(row['name'], compat.tobytes('mehmet nevvaf')) self.assertEqual(row['surname'], compat.tobytes('timur')) self.assertEqual(row['weight'], 65.5) traj2.f_store() traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_getitem_table = traj3.results.shared_data.table with StorageContextManager(traj3): for idx, row in enumerate(third_getitem_table.iterrows()): self.assertTrue(np.all(third_getitem_table.read()[idx] == third_getitem_table[idx])) # def test_table_iter(self): # pass # # def test_table_modify_column(self): # pass # # def test_table_modify_columns(self): # pass # # def test_table_modify_coordinates(self): # pass # # def test_table_modify_rows(self): # pass # # def test_table_remove_rows(self): # pass # # def test_table_remove_row(self): # pass def test_table_setitem(self): the_setitem_table = self.traj.results.shared_data.table self.assertTrue(the_setitem_table is self.shared_table) the_setitem_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_setitem_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_setitem_table.flush() for idx, row in enumerate(the_setitem_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_setitem_table = traj2.results.shared_data.table second_setitem_table[0] = [(100, 'Mehmet Nevvaf', 'TIMUR', 75.5)] self.assertEqual(second_setitem_table.read(field='id')[0], 100) self.assertEqual(second_setitem_table.read(field='name')[0], compat.tobytes('Mehmet Nevvaf')) self.assertEqual(second_setitem_table.read(field='surname')[0], compat.tobytes('TIMUR')) self.assertEqual(second_setitem_table.read(field='weight')[0], 75.5) traj2.f_store() traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_setitem_table = traj3.results.shared_data.table self.assertEqual(third_setitem_table.read(field='id')[0], 100) self.assertEqual(third_setitem_table.read(field='name')[0], compat.tobytes('Mehmet Nevvaf')) self.assertEqual(third_setitem_table.read(field='surname')[0], compat.tobytes('TIMUR')) self.assertEqual(third_setitem_table.read(field='weight')[0], 75.5) # def test_table_get_where_list(self): # pass # # def test_table_read_where(self): # pass def test_table_where(self): the_where_table = self.traj.results.shared_data.table self.assertTrue(the_where_table is self.shared_table) the_where_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_where_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i row.append() the_where_table.flush() for idx, row in enumerate(the_where_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_where_table = traj2.results.shared_data.table with StorageContextManager(traj2): result = second_where_table.where('(id == 2)&(name == b"mehmet 2")&(surname ==b"Timur")&(weight == 67.5)') there = False for row in result: there = True self.assertTrue(there) # def test_table_append_where(self): # pass # # def test_table_will_query_use_indexing(self): # pass # # def test_table_copy(self): # pass # # def test_table_flush_rows_to_index(self): # pass # # def test_table_get_enum(self): # pass # # def test_table_reindex(self): # pass # # def test_table_reindex_dirty(self): # pass # # def test_table_remove_index(self): # pass # # def test_table_create_index(self): # pass # # def test_table_create_cindex(self): # pass # # def test_table_colindexes(self): # pass # # def test_table_cols(self): # pass # # def test_table_row(self): # pass def test_table_flush(self): the_flush_table = self.traj.results.shared_data.table self.assertTrue(the_flush_table is self.shared_table) the_flush_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_flush_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i row.append() the_flush_table.flush() for idx, row in enumerate(the_flush_table.iterrows()): self.assertEqual(row['id'], idx) self.assertEqual(row['name'], compat.tobytes('mehmet %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('Timur')) self.assertEqual(row['weight'], 65.5+idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_flush_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_flush_table.iterrows()): self.assertEqual(row['id'], idx) self.assertEqual(row['name'], compat.tobytes('mehmet %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('Timur')) self.assertEqual(row['weight'], 65.5+idx) row = second_flush_table.row for i in range(10, 11): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i row.append() second_flush_table.flush() for idx, row in enumerate(second_flush_table.iterrows()): self.assertEqual(row['id'], idx) self.assertEqual(row['name'], compat.tobytes('mehmet %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('Timur')) self.assertEqual(row['weight'], 65.5+idx)
class SharedTableTest(TrajectoryComparator): tags = 'unittest', 'trajectory', 'shared', 'hdf5', 'table', 'mehmet' def setUp(self): self.filename = make_temp_dir('shared_table_test.hdf5') self.traj = Trajectory(name=make_trajectory_name(self), filename=self.filename) self.traj.v_standard_result = SharedResult self.traj.f_store(only_init=True) self.traj.f_add_result('shared_data') self.shared_table = SharedTable(name='table', parent=self.traj.shared_data, trajectory=self.traj, add_to_parent=True) def test_table_read(self): the_reading_table = self.traj.results.shared_data.table self.assertTrue(the_reading_table is self.shared_table) the_reading_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_reading_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_reading_table.flush() for idx, row in enumerate(the_reading_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_reading_table = traj2.results.shared_data.table self.assertTrue( np.all(the_reading_table.read() == second_reading_table.read())) second_reading_table.append([(21, 'aaa', 'bbb', 100)]) self.assertTrue( np.all(the_reading_table.read() == second_reading_table.read())) traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_reading_table = traj3.results.shared_data.table self.assertTrue( np.all(the_reading_table.read() == third_reading_table.read())) def test_table_append(self): the_append_table = self.traj.results.shared_data.table self.assertTrue(the_append_table is self.shared_table) the_append_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_append_table.row for i in range(15): row['id'] = i * 2 row['name'] = 'name %d' % i row['surname'] = '%d surname' % i row['weight'] = (i * 0.5 + 50.0) row.append() the_append_table.flush() for idx, row in enumerate(the_append_table.iterrows()): self.assertEqual(row['id'], idx * 2) self.assertEqual(row['name'], compat.tobytes('name %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('%d surname' % idx)) self.assertEqual(row['weight'], idx * 0.5 + 50.0) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_append_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_append_table.iterrows()): self.assertEqual(row['id'], idx * 2) self.assertEqual(row['name'], compat.tobytes('name %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('%d surname' % idx)) self.assertEqual(row['weight'], idx * 0.5 + 50.0) second_append_table.append([(30, 'mehmet', 'timur', 65.5)]) self.assertEqual(second_append_table.read(field='id')[-1], 30) self.assertEqual( second_append_table.read(field='name')[-1], compat.tobytes('mehmet')) self.assertEqual( second_append_table.read(field='surname')[-1], compat.tobytes('timur')) self.assertEqual( second_append_table.read(field='weight')[-1], 65.5) traj2.f_store() traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_append_table = traj3.results.shared_data.table self.assertEqual((third_append_table.read(field='id')[-1]), 30) self.assertEqual((third_append_table.read(field='name')[-1]), compat.tobytes('mehmet')) self.assertEqual((third_append_table.read(field='surname')[-1]), compat.tobytes('timur')) self.assertEqual((third_append_table.read(field='weight')[-1]), 65.5) third_append_table.append([(33, 'Harrison', 'Ford', 95.5)]) self.assertEqual((third_append_table.read(field='id')[-1]), 33) self.assertEqual((third_append_table.read(field='name')[-1]), compat.tobytes('Harrison')) self.assertEqual((third_append_table.read(field='surname')[-1]), compat.tobytes('Ford')) self.assertEqual((third_append_table.read(field='weight')[-1]), 95.5) def test_table_iterrows(self): the_iterrows_table = self.traj.results.shared_data.table self.assertTrue(the_iterrows_table is self.shared_table) the_iterrows_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_iterrows_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_iterrows_table.flush() for idx, row in enumerate(the_iterrows_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_iterrows_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_iterrows_table.iterrows()): self.assertEqual(row['id'], idx) def test_table_col(self): the_col_table = self.traj.results.shared_data.table self.assertTrue(the_col_table is self.shared_table) the_col_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_col_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_col_table.flush() for idx, row in enumerate(the_col_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_col_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_col_table.iterrows()): self.assertEqual(row['id'], idx) self.assertTrue( np.all( second_col_table.read( field='id') == second_col_table.col('id'))) self.assertTrue( np.all( second_col_table.read( field='name') == second_col_table.col('name'))) self.assertTrue( np.all( second_col_table.read( field='surname') == second_col_table.col('surname'))) self.assertTrue( np.all( second_col_table.read( field='weight') == second_col_table.col('weight'))) # def test_table_itersequence(self): # pass # # def test_table_itersorted(self): # pass # # def test_table_read_coordinates(self): # pass # # def test_table_read_sorted(self): # pass def test_table_getitem(self): the_getitem_table = self.traj.results.shared_data.table self.assertTrue(the_getitem_table is self.shared_table) the_getitem_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_getitem_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_getitem_table.flush() for idx, row in enumerate(the_getitem_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_getitem_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_getitem_table.iterrows()): self.assertTrue( np.all(second_getitem_table.read()[idx] == second_getitem_table[idx])) second_getitem_table.append([(30, 'mehmet nevvaf', 'timur', 65.5)]) for idx, row in enumerate(second_getitem_table.iterrows(-1)): self.assertEqual(row['id'], 30) self.assertEqual(row['name'], compat.tobytes('mehmet nevvaf')) self.assertEqual(row['surname'], compat.tobytes('timur')) self.assertEqual(row['weight'], 65.5) traj2.f_store() traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_getitem_table = traj3.results.shared_data.table with StorageContextManager(traj3): for idx, row in enumerate(third_getitem_table.iterrows()): self.assertTrue( np.all(third_getitem_table.read()[idx] == third_getitem_table[idx])) # def test_table_iter(self): # pass # # def test_table_modify_column(self): # pass # # def test_table_modify_columns(self): # pass # # def test_table_modify_coordinates(self): # pass # # def test_table_modify_rows(self): # pass # # def test_table_remove_rows(self): # pass # # def test_table_remove_row(self): # pass def test_table_setitem(self): the_setitem_table = self.traj.results.shared_data.table self.assertTrue(the_setitem_table is self.shared_table) the_setitem_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_setitem_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i * 1.5 row.append() the_setitem_table.flush() for idx, row in enumerate(the_setitem_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_setitem_table = traj2.results.shared_data.table second_setitem_table[0] = [(100, 'Mehmet Nevvaf', 'TIMUR', 75.5)] self.assertEqual(second_setitem_table.read(field='id')[0], 100) self.assertEqual( second_setitem_table.read(field='name')[0], compat.tobytes('Mehmet Nevvaf')) self.assertEqual( second_setitem_table.read(field='surname')[0], compat.tobytes('TIMUR')) self.assertEqual(second_setitem_table.read(field='weight')[0], 75.5) traj2.f_store() traj3 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) third_setitem_table = traj3.results.shared_data.table self.assertEqual(third_setitem_table.read(field='id')[0], 100) self.assertEqual( third_setitem_table.read(field='name')[0], compat.tobytes('Mehmet Nevvaf')) self.assertEqual( third_setitem_table.read(field='surname')[0], compat.tobytes('TIMUR')) self.assertEqual(third_setitem_table.read(field='weight')[0], 75.5) # def test_table_get_where_list(self): # pass # # def test_table_read_where(self): # pass def test_table_where(self): the_where_table = self.traj.results.shared_data.table self.assertTrue(the_where_table is self.shared_table) the_where_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_where_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i row.append() the_where_table.flush() for idx, row in enumerate(the_where_table.iterrows()): self.assertEqual(row['id'], idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_where_table = traj2.results.shared_data.table with StorageContextManager(traj2): result = second_where_table.where( '(id == 2)&(name == b"mehmet 2")&(surname ==b"Timur")&(weight == 67.5)' ) there = False for row in result: there = True self.assertTrue(there) # def test_table_append_where(self): # pass # # def test_table_will_query_use_indexing(self): # pass # # def test_table_copy(self): # pass # # def test_table_flush_rows_to_index(self): # pass # # def test_table_get_enum(self): # pass # # def test_table_reindex(self): # pass # # def test_table_reindex_dirty(self): # pass # # def test_table_remove_index(self): # pass # # def test_table_create_index(self): # pass # # def test_table_create_cindex(self): # pass # # def test_table_colindexes(self): # pass # # def test_table_cols(self): # pass # # def test_table_row(self): # pass def test_table_flush(self): the_flush_table = self.traj.results.shared_data.table self.assertTrue(the_flush_table is self.shared_table) the_flush_table.create_shared_data(description=MyTable) with StorageContextManager(self.traj): row = the_flush_table.row for i in range(10): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i row.append() the_flush_table.flush() for idx, row in enumerate(the_flush_table.iterrows()): self.assertEqual(row['id'], idx) self.assertEqual(row['name'], compat.tobytes('mehmet %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('Timur')) self.assertEqual(row['weight'], 65.5 + idx) self.traj.f_store() traj2 = load_trajectory(name=self.traj.v_name, filename=self.filename, load_all=2, dynamic_imports=SharedResult) second_flush_table = traj2.results.shared_data.table with StorageContextManager(traj2): for idx, row in enumerate(second_flush_table.iterrows()): self.assertEqual(row['id'], idx) self.assertEqual(row['name'], compat.tobytes('mehmet %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('Timur')) self.assertEqual(row['weight'], 65.5 + idx) row = second_flush_table.row for i in range(10, 11): row['id'] = i row['name'] = 'mehmet %d' % i row['surname'] = 'Timur' row['weight'] = 65.5 + i row.append() second_flush_table.flush() for idx, row in enumerate(second_flush_table.iterrows()): self.assertEqual(row['id'], idx) self.assertEqual(row['name'], compat.tobytes('mehmet %d' % idx)) self.assertEqual(row['surname'], compat.tobytes('Timur')) self.assertEqual(row['weight'], 65.5 + idx)
def test_conversions(self): filename = make_temp_dir('hdf5manipulation.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name traj.v_standard_result = SharedResult traj.f_store(only_init=True) traj.f_add_result('shared_data') thedata = np.zeros((1000, 1000)) myarray = SharedArray('array', traj.shared_data, trajectory=traj) traj.shared_data['array'] = myarray mytable = SharedTable('t1', traj.shared_data, trajectory=traj) traj.shared_data['t1'] = mytable dadict = { 'hi': [1, 2, 3, 4, 5], 'shu': ['bi', 'du', 'da', 'ha', 'hui'] } dadict2 = {'answer': [42]} res = traj.f_add_result('shared.dfs') res['df'] = SharedPandasFrame() res['df'].create_shared_data(data=pd.DataFrame(dadict), trajectory=traj) frame = SharedPandasFrame('df1', traj.f_get('shared.dfs'), trajectory=traj, add_to_parent=True) frame.create_shared_data(data=pd.DataFrame(dadict2), ) res['df1'] = frame traj.f_add_result('mylist', [1, 2, 3]) traj.f_add_result('my.mytuple', k=(1, 2, 3), wa=42) traj.f_add_result('my.myarray', np.zeros((50, 50))) traj.f_add_result('my.myframe', data=pd.DataFrame(dadict2)) traj.f_add_result('my.mytable', ObjectTable(data=dadict2)) myarray.create_shared_data(data=thedata) mytable.create_shared_data(first_row={ 'hi': compat.tobytes('hi'), 'huhu': np.ones(3) }) traj.f_store() data = myarray.read() myarray.get_data_node() self.assertTrue(np.all(data == thedata)) with StorageContextManager(traj): myarray[2, 2] = 10 data = myarray.read() self.assertTrue(data[2, 2] == 10) self.assertTrue(data[2, 2] == 10) self.assertFalse(traj.v_storage_service.is_open) traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) make_ordinary_result(traj.shared_data, 'array', trajectory=traj) array = traj.shared_data.array self.assertTrue(isinstance(array, np.ndarray)) thedata[2, 2] = 10 self.assertTrue(np.all(array == thedata)) make_ordinary_result( traj.shared_data, 't1', trajectory=traj, ) t1 = traj.shared_data.t1 self.assertTrue(isinstance(t1, ObjectTable)) self.assertTrue(np.all(t1['huhu'][0] == np.ones(3))) dfs = traj.shared.dfs make_ordinary_result(traj.shared.dfs, 'df', trajectory=traj) theframe = dfs.f_get('df') self.assertTrue(isinstance(dfs, Result)) self.assertTrue(isinstance(theframe, pd.DataFrame)) self.assertTrue(theframe['hi'][0] == 1) listres = traj.f_get('mylist') listres = make_shared_result(listres, 0, trajectory=traj) with StorageContextManager(traj): self.assertTrue(listres[0][2] == 3) listres[0][0] = 4 self.assertTrue(listres[0][0] == 4) listres = make_ordinary_result(listres, 0, trajectory=traj) traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) mylist = traj.mylist self.assertTrue(isinstance(listres, Result)) self.assertTrue(mylist[0] == 4) self.assertTrue(isinstance(mylist, list)) mytuple = traj.mytuple with self.assertRaises(AttributeError): mytuple = make_shared_result(mytuple, 'mylist', traj, new_class=SharedArray) mytuple = make_shared_result(mytuple, 'k', traj, new_class=SharedArray) self.assertTrue(mytuple.k[1] == 2) mytuple = make_ordinary_result(mytuple, 'k', trajectory=traj) self.assertTrue(isinstance(mytuple.k, tuple)) self.assertTrue(mytuple.k[2] == 3) myframe = traj.myframe myframe = make_shared_result(myframe, 'data', traj) theframe = myframe.data.read() self.assertTrue(theframe['answer'][0] == 42) myframe = make_ordinary_result(myframe, 'data', trajectory=traj) traj.f_load_item(myframe) self.assertTrue(myframe.data['answer'][0] == 42) mytable = traj.f_get('mytable') mytable = make_shared_result(mytable, 0, traj) self.assertTrue(isinstance(mytable[0], SharedTable)) rows = mytable.mytable.read() self.assertTrue(rows[0][0] == 42) mytable = make_ordinary_result(mytable, 0, trajectory=traj) self.assertTrue(isinstance(mytable, Result)) self.assertTrue(mytable[0]['answer'][0] == 42)
def test_delete_whole_subtrees(self): filename = make_temp_dir('testdeltree.hdf5') traj = Trajectory(name='TestDelete', filename=filename, large_overview_tables=True) res = traj.f_add_result('mytest.yourtest.test', a='b', c='d') dpar = traj.f_add_derived_parameter('mmm.gr.dpdp', 666) res = traj.f_add_result('hhh.ll', a='b', c='d') res = traj.f_add_derived_parameter('hhh.gg', 555) traj.f_store() with ptcompat.open_file(filename) as fh: daroot = ptcompat.get_child(fh.root, traj.v_name) dpar_table = daroot.overview.derived_parameters_overview self.assertTrue(len(dpar_table) == 2) res_table = daroot.overview.results_overview self.assertTrue((len(res_table)) == 2) with self.assertRaises(TypeError): traj.f_remove_item(traj.yourtest) with self.assertRaises(TypeError): traj.f_delete_item(traj.yourtest) traj.f_remove_item(traj.yourtest, recursive=True) self.assertTrue('mytest' in traj) self.assertTrue('yourtest' not in traj) traj.f_load(load_data=2) self.assertTrue('yourtest.test' in traj) traj.f_delete_item(traj.yourtest, recursive=True, remove_from_trajectory=True) traj.f_delete_item(traj.mmm, recursive=True, remove_from_trajectory=True) traj.f_load(load_data=2) self.assertTrue('yourtest.test' not in traj) self.assertTrue('yourtest' not in traj) with ptcompat.open_file(filename) as fh: daroot = ptcompat.get_child(fh.root, traj.v_name) dpar_table = daroot.overview.derived_parameters_overview self.assertTrue(len(dpar_table) == 2) res_table = daroot.overview.results_overview self.assertTrue((len(res_table)) == 2) traj.f_add_parameter('ggg', 43) traj.f_add_parameter('hhh.mmm', 45) traj.f_add_parameter('jjj', 55) traj.f_add_parameter('hhh.nnn', 55555) traj.f_explore({'ggg':[1,2,3]}) traj.f_store() with ptcompat.open_file(filename) as fh: daroot = ptcompat.get_child(fh.root, traj.v_name) par_table = daroot.overview.parameters_overview self.assertTrue(len(par_table) == 4) traj.f_delete_item('par.hhh', recursive=True, remove_from_trajectory=True) traj.f_add_parameter('saddsdfdsfd', 111) traj.f_store() with ptcompat.open_file(filename) as fh: daroot = ptcompat.get_child(fh.root, traj.v_name) par_table = daroot.overview.parameters_overview self.assertTrue(len(par_table) == 5) # with self.assertRaises(TypeError): # # We cannot delete something containing an explored parameter # traj.f_delete_item('par', recursive=True) with self.assertRaises(TypeError): traj.f_delete_item('ggg')
def test_all_arrays(self): filename = make_temp_dir('hdf5arrays.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name npearray = np.ones((2, 10, 3), dtype=np.float) thevlarray = np.array( [compat.tobytes('j'), 22.2, compat.tobytes('gutter')]) traj.f_store(only_init=True) res = traj.f_add_result(SharedResult, 'arrays') res['carray'] = SharedCArray() res['carray'].create_shared_data(shape=(10, 10), atom=pt.atom.FloatAtom()) res['earray'] = SharedEArray() res['earray'].create_shared_data(obj=npearray) res['vlarray'] = SharedVLArray() res['vlarray'].create_shared_data(obj=thevlarray) res['array'] = SharedArray() res['array'].create_shared_data(data=npearray) traj.f_store() traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) toappned = [44, compat.tobytes('k')] with StorageContextManager(traj): a1 = traj.arrays.array a1[0, 0, 0] = 4.0 a2 = traj.arrays.carray a2[0, 1] = 4 a4 = traj.arrays.vlarray a4.append(toappned) a3 = traj.arrays.earray a3.append(np.zeros((1, 10, 3))) traj = load_trajectory(name=trajname, filename=filename, load_all=2, dynamic_imports=SharedResult) with StorageContextManager(traj): a1 = traj.arrays.array self.assertTrue(a1[0, 0, 0] == 4.0) a2 = traj.arrays.carray self.assertTrue(a2[0, 1] == 4) a3 = traj.arrays.earray self.assertTrue(a3.read().shape == (3, 10, 3)) a4 = traj.arrays.vlarray for idx, x in enumerate(a4): if idx == 0: self.assertTrue(np.all(x == np.array(thevlarray))) elif idx == 1: self.assertTrue(np.all(x == np.array(toappned))) else: raise RuntimeError()
# But if we are more specific we have again a unique finding characters = traj.starwars.characters # Now let's see what fast access is: print('The name of the actor playing Luke is %s.' % traj.luke_skywalker) # And now what happens if you forbid it traj.v_fast_access=False print('The object found for luke_skywalker is `%s`.' % str(traj.luke_skywalker)) #Let's store the trajectory: traj.f_store() # That was easy, let's assume we already completed a simulation and now we add a veeeery large # result that we want to store to disk immediately and than empty it traj.f_add_result('starwars.gross_income_of_film', amount=10.1 ** 11, currency='$$$', comment='George Lucas is rich, dude!') # This is a large number, we better store it and than free the memory: traj.f_store_item('gross_income_of_film') traj.gross_income_of_film.f_empty() # Now lets reload the trajectory del traj traj = Trajectory(filename=filename) # We want to load the last trajectory in the file, therefore index = -1 # We want to load the parameters, therefore load_parameters=2 # We only want to load the skeleton of the results, so load_results=1 traj.f_load(index=-1, load_parameters=2, load_results=1) # Let's check if our result is really empty if traj.gross_income_of_film.f_is_empty():
def test_storing_and_manipulating(self): filename = make_temp_dir('hdf5manipulation.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name thedata = np.zeros((1000, 1000)) res = traj.f_add_result(SharedResult, 'shared') myarray = SharedArray('array', res, trajectory=traj, add_to_parent=True) mytable = SharedTable('t1', res, trajectory=traj, add_to_parent=True) mytable2 = SharedTable('t2', res, trajectory=traj, add_to_parent=True) mytable3 = SharedTable('t3', res, trajectory=traj, add_to_parent=True) traj.f_store(only_init=True) myarray.create_shared_data(data=thedata) mytable.create_shared_data(first_row={ 'hi': compat.tobytes('hi'), 'huhu': np.ones(3) }) mytable2.create_shared_data(description={ 'ha': pt.StringCol(2, pos=0), 'haha': pt.FloatCol(pos=1) }) mytable3.create_shared_data(description={ 'ha': pt.StringCol(2, pos=0), 'haha': pt.FloatCol(pos=1) }) traj.f_store() newrow = {'ha': 'hu', 'haha': 4.0} with self.assertRaises(TypeError): traj.shared.t2.row with StorageContextManager(traj) as cm: row = traj.shared.t2.row for irun in range(11): for key, val in newrow.items(): row[key] = val row.append() traj.shared.t3.flush() data = myarray.read() myarray.get_data_node() self.assertTrue(np.all(data == thedata)) with StorageContextManager(traj): myarray[2, 2] = 10 data = myarray.read() self.assertTrue(data[2, 2] == 10) self.assertTrue(data[2, 2] == 10) self.assertFalse(traj.v_storage_service.is_open) traj = load_trajectory(name=trajname, filename=filename) traj.f_load(load_data=2) traj.shared.t2.traj = traj traj.shared.t1.traj = traj traj.shared.array.traj = traj self.assertTrue(traj.shared.t2.nrows == 11, '%s != 11' % str(traj.shared.t2.nrows)) self.assertTrue(traj.shared.t2[0]['ha'] == compat.tobytes('hu'), traj.shared.t2[0]['ha']) self.assertTrue(traj.shared.t2[1]['ha'] == compat.tobytes('hu'), traj.shared.t2[1]['ha']) self.assertTrue('huhu' in traj.shared.t1.colnames) self.assertTrue(traj.shared.array[2, 2] == 10)