def test_version_mismatch(self): traj = Trajectory(name='TestVERSION', filename=make_temp_dir('testversionmismatch.hdf5'), add_time=True) traj.f_add_parameter('group1.test',42) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj._version='0.1a.1' traj.f_store() traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('testversionmismatch.hdf5')) with self.assertRaises(pex.VersionMismatchError): traj2.f_load(load_parameters=2, load_results=2) traj2.f_load(load_parameters=2, load_results=2, force=True) self.compare_trajectories(traj,traj2) get_root_logger().info('Mismatch testing done!')
def test_logging_stdout(self): filename = 'teststdoutlog.hdf5' filename = make_temp_dir(filename) folder = make_temp_dir('logs') env = Environment(trajectory=make_trajectory_name(self), filename=filename, log_config=get_log_config(), # log_levels=logging.CRITICAL, # needed for the test log_stdout=('STDOUT', 50), #log_folder=folder ) env.f_run(log_error) traj = env.v_traj path = get_log_path(traj) mainstr = 'sTdOuTLoGGinG' print(mainstr) env.f_disable_logging() mainfilename = os.path.join(path, 'LOG.txt') with open(mainfilename, mode='r') as mainf: full_text = mainf.read() self.assertTrue(mainstr in full_text) self.assertTrue('4444444' not in full_text) self.assertTrue('DEBUG' not in full_text)
def test_store_and_load_large_dictionary(self): traj = Trajectory(name='Testlargedict', filename=make_temp_dir('large_dict.hdf5')) large_dict = {} for irun in range(1025): large_dict['item_%d' % irun] = irun large_dict2 = {} for irun in range(33): large_dict2['item_%d' % irun] = irun traj.f_add_result('large_dict', large_dict, comment='Huge_dict!') traj.f_add_result('large_dict2', large_dict2, comment='Not so large dict!') traj.f_store() traj_name = traj.v_name traj2 = Trajectory(filename=make_temp_dir('large_dict.hdf5')) traj2.f_load(name=traj_name, load_data=2) self.compare_trajectories(traj, traj2)
def make_environment(self, idx, filename): #self.filename = '../../experiments/tests/HDF5/test.hdf5' self.logfolder = make_temp_dir(os.path.join('experiments', 'tests', 'Log')) self.cnt_folder = make_temp_dir(os.path.join('experiments','tests','cnt')) trajname = 'Test%d' % idx + '_' + make_trajectory_name(self) env = Environment(trajectory=trajname, filename=filename, file_title=trajname, log_stdout=False, log_config=get_log_config(), continuable=True, continue_folder=self.cnt_folder, delete_continue=False, large_overview_tables=True) self.envs.append(env) self.trajs.append( env.v_trajectory)
def test_continueing_mp_custom(self): self.filenames = [ make_temp_dir('test_continueing_mp_custom.hdf5'), make_temp_dir('test_continueing_mp_custom2.hdf5') ] self.envs = [] self.trajs = [] for irun, filename in enumerate(self.filenames): if isinstance(filename, int): filename = self.filenames[filename] self.make_environment(irun, filename, continuable=irun == 1) self.param_dict = {'x': 1.0, 'y': 2.0} for irun in range(len(self.filenames)): self.trajs[irun].f_add_parameter(CustomParameter, 'x', 1.0) self.trajs[irun].f_add_parameter(CustomParameter, 'y', 1.0) self.explore_mp(self.trajs[0]) self.explore_mp(self.trajs[1]) arg = 33 for irun in range(len(self.filenames)): self.envs[irun].f_run(Multiply(), arg) traj_name = self.trajs[1].v_name continue_folder = os.path.join(self.cnt_folder, self.trajs[1].v_name) self.envs.pop() self.assertEqual(len(self.envs), 1) self.make_environment(1, self.filenames[1], continuable=True, add_time=False, trajectory=traj_name) self.trajs[1] = self.envs[1].v_traj self.trajs[1].f_load(load_data=pypetconstants.LOAD_NOTHING) self._remove_nresults(self.trajs[1], 3, continue_folder) results = self.envs[1].resume(trajectory_name=traj_name) results = sorted(results, key=lambda x: x[0]) for irun in range(len(self.filenames)): self.trajs[irun].f_load( load_parameters=pypetconstants.OVERWRITE_DATA, load_derived_parameters=pypetconstants.OVERWRITE_DATA, load_results=pypetconstants.OVERWRITE_DATA, load_other_data=pypetconstants.OVERWRITE_DATA) self.compare_trajectories(self.trajs[1], self.trajs[0]) for run_name in self.trajs[0].f_iter_runs(): z = (self.trajs[0].v_idx, self.trajs[0].crun.z) self.assertTrue(z in results, '%s not in %s' % (z, results)) self.assertTrue(len(self.trajs[-1]) == len(results))
def make_environment_mp(self, idx, filename): #self.filename = '../../experiments/tests/HDF5/test.hdf5' self.logfolder = make_temp_dir(os.path.join('experiments', 'tests', 'Log')) self.cnt_folder = make_temp_dir(os.path.join('experiments', 'tests', 'cnt')) trajname = 'Test%d' % idx + '_' + make_trajectory_name(self) env = Environment(trajectory=trajname, dynamic_imports=[CustomParameter], filename=filename, file_title=trajname, log_stdout=False, purge_duplicate_comments=False, log_config=get_log_config(), continuable=True, continue_folder=self.cnt_folder, delete_continue=False, multiproc=True, use_pool=True, ncores=4) self.envs.append(env) self.trajs.append( env.v_trajectory)
def test_store_and_load_large_dictionary(self): traj = Trajectory(name='Testlargedict', filename=make_temp_dir('large_dict.hdf5'), add_time=True) large_dict = {} for irun in range(1025): large_dict['item_%d' % irun] = irun large_dict2 = {} for irun in range(33): large_dict2['item_%d' % irun] = irun traj.f_add_result('large_dict', large_dict, comment='Huge_dict!') traj.f_add_result('large_dict2', large_dict2, comment='Not so large dict!') traj.f_store() traj_name = traj.v_name traj2 = Trajectory(filename=make_temp_dir('large_dict.hdf5'), add_time=True) traj2.f_load(name=traj_name, load_data=2) self.compare_trajectories(traj, traj2)
def test_store_items_and_groups(self): traj = Trajectory(name='testtraj', filename=make_temp_dir('teststoreitems.hdf5'), add_time=True) traj.f_store() traj.f_add_parameter('group1.test',42, comment= 'TooLong' * pypetconstants.HDF5_STRCOL_MAX_COMMENT_LENGTH) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj.f_store_items(['test','testres','group1']) traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('teststoreitems.hdf5')) traj2.f_load(load_parameters=2, load_results=2) traj.f_add_result('Im.stored.along.a.path', 43) traj.Im.stored.along.v_annotations['wtf'] =4444 traj.res.f_store_child('Im.stored.along.a.path') traj2.res.f_load_child('Im.stored.along.a.path', load_data=2) self.compare_trajectories(traj,traj2)
def test_migrations(self): traj = Trajectory(name='Testmigrate', filename=make_temp_dir('migrate.hdf5'), add_time=True) traj.f_add_result('I.am.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() new_file = make_temp_dir('migrate2.hdf5') traj.f_migrate(filename=new_file) traj.f_store() new_traj = Trajectory() new_traj.f_migrate(new_name=traj.v_name, filename=new_file, in_store=True) new_traj.v_auto_load = True self.assertTrue(new_traj.results.I.am.a.mean.resu == 42)
def make_environment(self, idx, filename, continuable=True, delete_continue=False): #self.filename = '../../experiments/tests/HDF5/test.hdf5' self.logfolder = make_temp_dir( os.path.join('experiments', 'tests', 'Log')) self.cnt_folder = make_temp_dir( os.path.join('experiments', 'tests', 'cnt')) trajname = 'Test%d' % idx + '_' + make_trajectory_name(self) env = Environment(trajectory=trajname, filename=filename, file_title=trajname, log_stdout=False, log_config=get_log_config(), continuable=continuable, continue_folder=self.cnt_folder, delete_continue=delete_continue, large_overview_tables=True) self.envs.append(env) self.trajs.append(env.v_trajectory)
def test_store_items_and_groups(self): traj = Trajectory(name='testtraj', filename=make_temp_dir('teststoreitems.hdf5')) traj.f_store() traj.f_add_parameter('group1.test',42, comment= 'TooLong' * pypetconstants.HDF5_STRCOL_MAX_COMMENT_LENGTH) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj.f_store_items(['test','testres','group1']) traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('teststoreitems.hdf5')) traj2.f_load(load_parameters=2, load_results=2) traj.f_add_result('Im.stored.along.a.path', 43) traj.Im.stored.along.v_annotations['wtf'] =4444 traj.res.f_store_child('Im.stored.along.a.path') traj2.res.f_load_child('Im.stored.along.a.path', load_data=2) self.compare_trajectories(traj,traj2)
def test_continueing_mp_custom(self): self.filenames = [make_temp_dir('test_continueing_mp_custom.hdf5'), make_temp_dir('test_continueing_mp_custom2.hdf5')] self.envs=[] self.trajs = [] for irun,filename in enumerate(self.filenames): if isinstance(filename,int): filename = self.filenames[filename] self.make_environment(irun, filename, continuable=irun == 1) self.param_dict={'x':1.0, 'y':2.0} for irun in range(len(self.filenames)): self.trajs[irun].f_add_parameter(CustomParameter,'x', 1.0) self.trajs[irun].f_add_parameter(CustomParameter, 'y', 1.0) self.explore_mp(self.trajs[0]) self.explore_mp(self.trajs[1]) arg=33 for irun in range(len(self.filenames)): self.envs[irun].f_run(Multiply(), arg) traj_name = self.trajs[1].v_name continue_folder = os.path.join(self.cnt_folder, self.trajs[1].v_name) self.envs.pop() self.assertEqual(len(self.envs), 1) self.make_environment(1, self.filenames[1], continuable=True, add_time=False, trajectory=traj_name) self.trajs[1] = self.envs[1].v_traj self.trajs[1].f_load(load_data=pypetconstants.LOAD_NOTHING) self._remove_nresults(self.trajs[1], 3, continue_folder) results = self.envs[1].f_continue(trajectory_name = traj_name) results = sorted(results, key = lambda x: x[0]) for irun in range(len(self.filenames)): self.trajs[irun].f_load(load_parameters=pypetconstants.OVERWRITE_DATA, load_derived_parameters=pypetconstants.OVERWRITE_DATA, load_results=pypetconstants.OVERWRITE_DATA, load_other_data=pypetconstants.OVERWRITE_DATA) self.compare_trajectories(self.trajs[1],self.trajs[0]) for run_name in self.trajs[0].f_iter_runs(): z = (self.trajs[0].v_idx, self.trajs[0].crun.z) self.assertTrue(z in results, '%s not in %s' % (z, results)) self.assertTrue(len(self.trajs[-1])== len(results))
def setUp(self): self.multiproc = True self.mode = 'LOCK' self.trajname = make_trajectory_name(self) self.filename = make_temp_dir( os.path.join('experiments', 'tests', 'HDF5', '%s.hdf5' % self.trajname)) self.logfolder = make_temp_dir( os.path.join('experiments', 'tests', 'Log')) random.seed() cap_dicts = ( dict(cpu_cap=0.000001), # Ensure that these are triggered dict(memory_cap=(0.000001, 150.0)), dict(swap_cap=0.000001, )) cap_dict = cap_dicts[CapTest.cap_count] env = Environment(trajectory=self.trajname, filename=self.filename, file_title=self.trajname, log_folder=self.logfolder, logger_names=('pypet', 'test'), log_levels='ERROR', log_stdout=False, results_per_run=5, derived_parameters_per_run=5, multiproc=True, ncores=4, use_pool=False, niceness=check_nice(11), **cap_dict) logging.getLogger('test').error('Using Cap: %s and file: %s' % (str(cap_dict), str(self.filename))) # Loop through all possible cap configurations # and test one at a time CapTest.cap_count += 1 CapTest.cap_count = CapTest.cap_count % len(cap_dicts) traj = env.v_trajectory ## Create some parameters self.param_dict = {} create_param_dict(self.param_dict) ### Add some parameter: add_params(traj, self.param_dict) #remember the trajectory and the environment self.traj = traj self.env = env
def setUp(self): self.set_mode() self.logfolder = make_temp_dir(os.path.join('experiments', 'tests', 'Log')) random.seed() self.trajname = make_trajectory_name(self) self.filename = make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'test%s.hdf5' % self.trajname)) env = Environment(trajectory=self.trajname, filename=self.filename, file_title=self.trajname, log_stdout=self.log_stdout, log_config=get_log_config(), results_per_run=5, wildcard_functions=self.wildcard_functions, derived_parameters_per_run=5, multiproc=self.multiproc, ncores=self.ncores, wrap_mode=self.mode, use_pool=self.use_pool, gc_interval=self.gc_interval, freeze_input=self.freeze_input, fletcher32=self.fletcher32, complevel=self.complevel, complib=self.complib, shuffle=self.shuffle, pandas_append=self.pandas_append, pandas_format=self.pandas_format, encoding=self.encoding, niceness=self.niceness, use_scoop=self.use_scoop, port=self.port, add_time=self.add_time, timeout=self.timeout, graceful_exit=self.graceful_exit) traj = env.v_trajectory traj.v_standard_parameter=Parameter ## Create some parameters self.param_dict={} create_param_dict(self.param_dict) ### Add some parameter: add_params(traj,self.param_dict) #remember the trajectory and the environment self.traj = traj self.env = env
def test_merge_basic_with_separate_files_only_adding_more_trials(self): self.filenames = [make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge2.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge3.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge4.hdf5'))] self.merge_basic_only_adding_more_trials(True)
def test_basic_within_separate_file_and_skipping_duplicates_which_leads_to_one_reamianing(self): self.filenames = [make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge2.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge3.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge4.hdf5'))] self. basic_and_skipping_duplicates_which_leads_to_one_remaining()
def test_merge_basic_with_separate_files_only_adding_more_trials_slow_merge(self): self.filenames = [make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'slow_merge2.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'slow_merge3.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'slow_merge4.hdf5'))] self.merge_basic_only_adding_more_trials(True, slow_merge=True)
def setUp(self): self.multiproc = True self.mode = 'LOCK' self.trajname = make_trajectory_name(self) self.filename = make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', '%s.hdf5' % self.trajname)) self.logfolder = make_temp_dir(os.path.join('experiments', 'tests', 'Log')) random.seed() cap_dicts = (dict(cpu_cap=0.000001), # Ensure that these are triggered dict(memory_cap=(0.000001, 150.0)), dict(swap_cap=0.000001,)) cap_dict = cap_dicts[CapTest.cap_count] env = Environment(trajectory=self.trajname,filename=self.filename, file_title=self.trajname, log_folder=self.logfolder, logger_names=('pypet', 'test'), log_levels='ERROR', log_stdout=False, results_per_run=5, derived_parameters_per_run=5, multiproc=True, ncores=4, use_pool=False, niceness = check_nice(11), **cap_dict) logging.getLogger('test').error('Using Cap: %s and file: %s' % (str(cap_dict), str(self.filename))) # Loop through all possible cap configurations # and test one at a time CapTest.cap_count += 1 CapTest.cap_count = CapTest.cap_count % len(cap_dicts) traj = env.v_trajectory ## Create some parameters self.param_dict={} create_param_dict(self.param_dict) ### Add some parameter: add_params(traj,self.param_dict) #remember the trajectory and the environment self.traj = traj self.env = env
def test_basic_within_separate_file_and_skipping_duplicates_which_leads_to_one_reamianing(self): self.filenames = [make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge2_one_remaining.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge3_one_remaining.hdf5')), make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge4_one_remaining.hdf5'))] self. basic_and_skipping_duplicates_which_leads_to_one_remaining()
def setUp(self): self.set_mode() logging.basicConfig(level=logging.ERROR) self.logfolder = make_temp_dir(os.path.join('experiments', 'tests', 'Log')) random.seed() self.trajname = make_trajectory_name(self) self.filename = make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'test%s.hdf5' % self.trajname)) env = Environment(trajectory=self.trajname, filename=self.filename, file_title=self.trajname, log_stdout=self.log_stdout, log_config=get_log_config(), results_per_run=5, derived_parameters_per_run=5, multiproc=self.multiproc, ncores=self.ncores, wrap_mode=self.mode, use_pool=self.use_pool, fletcher32=self.fletcher32, complevel=self.complevel, complib=self.complib, shuffle=self.shuffle, pandas_append=self.pandas_append, pandas_format=self.pandas_format, encoding=self.encoding) traj = env.v_trajectory traj.v_standard_parameter=Parameter ## Create some parameters create_link_params(traj) ### Add some parameter: explore_params(traj) #remember the trajectory and the environment self.traj = traj self.env = env
def test_hdf5_store_load_monitorresult(self): traj_name = make_trajectory_name(self) file_name = make_temp_dir(os.path.join('brian2', 'tests', 'hdf5', 'test_%s.hdf5' % traj_name)) env = Environment(trajectory=traj_name, filename=file_name, log_config=get_log_config(), dynamic_imports=[Brian2MonitorResult], add_time=False, storage_service=HDF5StorageService) traj = env.v_trajectory traj.v_standard_result = Brian2MonitorResult traj.f_add_result('brian2.single.millivolts_single_a', 10*mvolt, comment='single value a') traj.f_add_result('brian2.single.millivolts_single_c', 11*mvolt, comment='single value b') traj.f_add_result('brian2.array.millivolts_array_a', [11, 12]*mvolt, comment='array') traj.f_add_result('mV1', 42.0*mV) # results can hold much more than a single data item: traj.f_add_result('ampere1', 1*mA, 44, test=300*mV, test2=[1,2,3], test3=np.array([1,2,3])*mA, comment='Result keeping track of many things') traj.f_add_result('integer', 16) traj.f_add_result('kHz05', 0.5*kHz) traj.f_add_result('nested_array', np.array([[6.,7.,8.],[9.,10.,11.]]) * ms) traj.f_add_result('b2a', np.array([1., 2.]) * mV) traj.f_store() traj2 = load_trajectory(filename=file_name, name=traj_name, dynamic_imports=[Brian2MonitorResult], load_data=2) #traj._logger.error('traj :'+str(traj)) #traj._logger.error('traj2:'+str(traj2)) self.compare_trajectories(traj, traj2)
def test_errors(self): tmp = make_temp_dir('cont') if dill is not None: env1 = Environment(continuable=True, continue_folder=tmp, log_config=None, filename=self.filename) with self.assertRaises(ValueError): env1.f_run_map(multiply_args, [1], [2], [3]) with self.assertRaises(ValueError): Environment(multiproc=True, use_pool=False, freeze_input=True, filename=self.filename, log_config=None) env3 = Environment(log_config=None, filename=self.filename) with self.assertRaises(ValueError): env3.f_run_map(multiply_args) with self.assertRaises(ValueError): Environment(use_scoop=True, immediate_postproc=True) with self.assertRaises(ValueError): Environment(use_pool=True, immediate_postproc=True) with self.assertRaises(ValueError): Environment(continuable=True, wrap_mode='QUEUE', continue_folder=tmp) with self.assertRaises(ValueError): Environment(use_scoop=True, wrap_mode='QUEUE') with self.assertRaises(ValueError): Environment(automatic_storing=False, continuable=True, continue_folder=tmp) with self.assertRaises(ValueError): Environment(port='www.nosi.de', wrap_mode='LOCK')
def test_storage_and_loading(self): filename = make_temp_dir('linktest.hdf5') traj = Trajectory(filename=filename) traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') res = traj.f_add_result('kk', 42) traj.par.f_add_link('gg', res) traj.f_add_link('hh', res) traj.f_add_link('jj', traj.par) traj.f_add_link('ii', res) traj.test.f_add_link('circle1', traj.test2) traj.test2.f_add_link('circle2', traj.test) traj.test.f_add_link('circle2', traj.test.circle1.circle2) traj.f_add_parameter_group('test.ab.bc.cd') traj.cd.f_add_link(traj.test) traj.test.f_add_link(traj.cd) traj.f_store() traj2 = Trajectory(filename=filename) traj2.f_load(name=traj.v_name, load_data=2) self.assertTrue(traj.kk == traj2.gg, '%s != %s' % (traj.kk, traj2.gg)) self.assertTrue(traj.cd.test is traj.test) self.assertTrue(len(traj._linked_by), len(traj2._linked_by)) self.compare_trajectories(traj, traj2) self.assertTrue('jj' in traj2._nn_interface._links_count) traj2.f_remove_child('jj') self.assertTrue('jj' not in traj2._nn_interface._links_count) traj2.f_remove_child('hh') traj2.f_remove_child('ii') traj2.f_remove_child('parameters', recursive=True) traj2.v_auto_load = True group = traj2.par.test2.circle2 self.assertTrue(group is traj2.test) retest = traj2.test.circle1 self.assertTrue(retest is traj2.test2) self.assertTrue(traj2.test.circle2 is traj2.test) self.assertTrue(traj2.hh == traj2.res.kk) traj2.v_auto_load = False traj2.f_load_child('jj') self.assertTrue(traj2.jj is traj2.par) traj2.f_load(load_data=2) self.assertTrue(traj2.ii == traj2.res.kk)
def setUp(self): env = Environment(trajectory='Test_'+repr(time.time()).replace('.','_'), filename=make_temp_dir(os.path.join( 'experiments', 'tests', 'briantests', 'HDF5', 'briantest.hdf5')), file_title='test', log_config=get_log_config(), dynamic_imports=['pypet.brian.parameter.BrianParameter', BrianMonitorResult], multiproc=False) traj = env.v_trajectory #env._set_standard_storage() #env._hdf5_queue_writer._hdf5storageservice = LazyStorageService() traj = env.v_trajectory #traj.set_storage_service(LazyStorageService()) add_params(traj) #traj.mode='Parallel' traj.f_explore(cartesian_product({traj.f_get('N').v_full_name:[50,60], traj.f_get('tauw').v_full_name:[30*ms,40*ms]})) self.traj = traj self.env = env self.traj = traj
def test_get_default(self): traj = Trajectory(name='Testgetdefault', filename=make_temp_dir('autoload.hdf5'), add_time=True) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') val = traj.f_get_default('jjjjjjjjjj', 555) self.assertTrue(val==555) traj.f_store() traj.f_remove_child('results', recursive=True) val = traj.f_get_default('res.I.am.crun.a.mean.answ', 444, auto_load=True) self.assertTrue(val==444) val = traj.f_get_default('res.I.am.crun.a.mean.resu', auto_load=True, fast_access=True) self.assertTrue(val==42) with self.assertRaises(Exception): traj.kdsfdsf
def test_continueing_remove_completed(self): self.filenames = [make_temp_dir('test_continueing_remove_completed.hdf5')] self.envs=[] self.trajs = [] for irun,filename in enumerate(self.filenames): if isinstance(filename,int): filename = self.filenames[filename] self.make_environment( irun, filename, continuable=True, delete_continue=True) self.param_dict={} create_param_dict(self.param_dict) for irun in range(len(self.filenames)): add_params(self.trajs[irun], self.param_dict) self.explore(self.trajs[0]) for irun in range(len(self.filenames)): self.make_run(self.envs[irun]) traj_name = self.trajs[0].v_name continue_folder = os.path.join(self.cnt_folder, self.trajs[0].v_name) self.assertFalse(os.path.isdir(continue_folder))
def test_partially_delete_stuff(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() self.assertTrue('a' in res) traj.f_delete_item(res, delete_only=['a'], remove_from_item=True) self.assertTrue('c' in res) self.assertTrue('a' not in res) res['a'] = 'offf' self.assertTrue('a' in res) traj.f_load(load_results=3) self.assertTrue('a' not in res) self.assertTrue('c' in res) traj.f_delete_item(res, remove_from_trajectory=True) self.assertTrue('results' in traj) self.assertTrue(res not in traj)
def test_loading_as_new(self): filename = make_temp_dir('asnew.h5') traj = Trajectory(name='TestPartial', filename=filename, add_time=True) traj.f_add_parameter('x', 3) traj.f_add_parameter('y', 2) traj.f_explore({'x': [12,3,44], 'y':[1,23,4]}) traj.f_store() traj = load_trajectory(name=traj.v_name, filename=filename) with self.assertRaises(TypeError): traj.f_shrink() traj = load_trajectory(name=traj.v_name, filename=filename, as_new=True, new_name='TestTraj', add_time=False) self.assertTrue(traj.v_name == 'TestTraj') self.assertEqual(len(traj), 3) self.assertEqual(len(traj._explored_parameters), 2) traj.f_shrink() self.assertTrue(len(traj) == 1)
def test_auto_load(self): traj = Trajectory(name='Testautoload', filename=make_temp_dir('autoload.hdf5')) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() ffa=traj.f_get('ffa') ffa.f_unlock() ffa.f_empty() self.assertTrue(ffa.f_is_empty()) traj.f_remove_child('results', recursive=True) # check auto load val = traj.res.I.am.crun.a.mean.resu self.assertTrue(val==42) val = traj.ffa self.assertTrue(val==42) with self.assertRaises(pex.DataNotInStorageError): traj.kdsfdsf
def test_removal_of_error_parameter(self): filename = make_temp_dir('remove_errored.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) traj.f_add_result('iii', 42) traj.f_add_result(FakeResult, 'j.j.josie', 43) file = traj.v_storage_service.filename traj.f_store(only_init=True) with self.assertRaises(RuntimeError): traj.f_store() with ptcompat.open_file(file, mode='r') as fh: jj = ptcompat.get_node(fh, where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie' not in jj) traj.j.j.f_remove_child('josie') traj.j.j.f_add_result(FakeResult2, 'josie2', 444) traj.f_store() with self.assertRaises(pex.NoSuchServiceError): traj.f_store_child('results', recursive=True) with ptcompat.open_file(file, mode='r') as fh: jj = ptcompat.get_node(fh, where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie2' in jj) josie2 = ptcompat.get_child(jj, 'josie2') self.assertTrue('hey' in josie2) self.assertTrue('fail' not in josie2)
def test_partial_loading(self): traj = Trajectory(name='TestPartial', filename=make_temp_dir('testpartially.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() traj.f_load_item(traj.test, load_only=['a', 'x']) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() load_except= ['c', 'd'] traj.f_load_item(traj.test, load_except=load_except) self.assertTrue(len(load_except)==2) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) with self.assertRaises(ValueError): traj.f_load_item(traj.test, load_except=['x'], load_only=['y'])
def test_storage_service_errors(self): traj = Trajectory(filename=make_temp_dir('testnoservice.hdf5')) traj_name = traj.v_name # you cannot store stuff before the trajectory was stored once: with self.assertRaises(ValueError): traj.v_storage_service.store('FAKESERVICE', self, trajectory_name = traj.v_name) traj.f_store() with self.assertRaises(ValueError): traj.v_storage_service.store('FAKESERVICE', self, trajectory_name = 'test') with self.assertRaises(pex.NoSuchServiceError): traj.v_storage_service.store('FAKESERVICE', self, trajectory_name = traj.v_name) with self.assertRaises(ValueError): traj.f_load(name='test', index=1) with self.assertRaises(RuntimeError): traj.v_storage_service.store('LIST', [('LEAF',None,None,None,None)], trajectory_name = traj.v_name) with self.assertRaises(ValueError): traj.f_load(index=9999) with self.assertRaises(ValueError): traj.f_load(name='Non-Existising-Traj')
def test_basic_within_same_file_and_skipping_duplicates_which_will_be_all(self): self.filenames = [make_temp_dir(os.path.join('experiments', 'tests', 'HDF5', 'merge1.hdf5')), 0] with self.assertRaises(ValueError): self.basic_and_skipping_duplicates_which_will_be_all()
def test_merge_all_in_folder(self): self.filename = make_temp_dir(os.path.join('experiments','tests','HDF5', 'subfolder', 'test.hdf5')) path, _ = os.path.split(self.filename) ntrajs = 4 total_len = 0 for irun in range(ntrajs): new_filename = os.path.join(path, 'test%d.hdf5' % irun) self.envs.append(self._make_env(irun, filename=new_filename)) self.trajs.append(self.envs[-1].v_traj) self.trajs[-1].f_add_parameter('x',0) self.trajs[-1].f_add_parameter('y',0) self.explore(self.trajs[-1]) total_len += len(self.trajs[-1]) for irun in range(ntrajs): self.envs[irun].f_run(multiply) merge_traj = merge_all_in_folder(path, delete_other_files=True) merge_traj.f_load(load_data=2) self.assertEqual(len(merge_traj), total_len) self.check_if_z_is_correct(merge_traj)
def test_df(self): filename = make_temp_dir('hdf5errors.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) traj.f_store() dadict = { 'hi': [1, 2, 3, 4, 5], 'shu': ['bi', 'du', 'da', 'ha', 'hui'] } dadict2 = {'answer': [42]} traj.f_add_result( SharedResult, 'dfs.df', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict)) traj.f_add_result( SharedResult, 'dfs.df1', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict2)) traj.f_add_result(SharedResult, 'dfs.df3', SharedPandasFrame()) for irun in range(10): traj.df3.append(traj.df1.read()) dframe = traj.df3.read() self.assertTrue(len(dframe) == 10) what = traj.df.select(where='index == 2') self.assertTrue(len(what) == 1)
def test_get_default(self): traj = Trajectory(name='Testgetdefault', filename=make_temp_dir('autoload.hdf5')) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') val = traj.f_get_default('jjjjjjjjjj', 555) self.assertTrue(val==555) traj.f_store() traj.f_remove_child('results', recursive=True) val = traj.f_get_default('res.I.am.crun.a.mean.answ', 444, auto_load=True) self.assertTrue(val==444) val = traj.f_get_default('res.I.am.crun.a.mean.resu', auto_load=True, fast_access=True) self.assertTrue(val==42) with self.assertRaises(Exception): traj.kdsfdsf
def test_maximum_overview_size(self): filename = make_temp_dir('maxisze.hdf5') env = Environment(trajectory='Testmigrate', filename=filename, log_config=get_log_config(), add_time=True) traj = env.v_trajectory for irun in range(pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH): traj.f_add_parameter('f%d.x' % irun, 5) traj.f_store() store = pt.open_file(filename, mode='r+') table = store.root._f_get_child(traj.v_name).overview.parameters_overview self.assertEquals(table.nrows, pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH) store.close() for irun in range(pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH, 2*pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH): traj.f_add_parameter('f%d.x' % irun, 5) traj.f_store() store = pt.open_file(filename, mode='r+') table = store.root._f_get_child(traj.v_name).overview.parameters_overview self.assertEquals(table.nrows, pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH) store.close() env.f_disable_logging()
def test_partially_delete_stuff(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5')) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() self.assertTrue('a' in res) traj.f_delete_item(res, delete_only=['a'], remove_from_item=True) self.assertTrue('c' in res) self.assertTrue('a' not in res) res['a'] = 'offf' self.assertTrue('a' in res) traj.f_load(load_results=3) self.assertTrue('a' not in res) self.assertTrue('c' in res) traj.f_delete_item(res, remove_from_trajectory=True) self.assertTrue('results' in traj) self.assertTrue(res not in traj)
def test_overwrite_stuff(self): traj = Trajectory(name='TestOverwrite', filename=make_temp_dir('testowrite.hdf5')) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() res['a'] = np.array([1,2,3]) res['c'] = 123445 traj.f_store_item(res, overwrite='a', complevel=4) # Should emit a warning traj.f_store_item(res, overwrite=['a', 'b']) traj.f_load(load_results=3) res = traj.test self.assertTrue((res['a']==np.array([1,2,3])).all()) self.assertTrue(res['c']=='d') res['c'] = 123445 traj.f_store_item(res, store_data=3) res.f_empty() traj.f_load(load_results=3) self.assertTrue(traj.test['c']==123445)
def test_delete_links(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_add_link('x.y', res) traj.f_add_link('x.g.h', res) traj.f_store() traj.f_remove_child('x', recursive=True) traj.f_load() self.assertEqual(traj.x.y.a, traj.test.a) self.assertEqual(traj.x.g.h.a, traj.test.a) traj.f_delete_link('x.y', remove_from_trajectory=True) traj.f_delete_link((traj.x.g, 'h'), remove_from_trajectory=True) traj.f_load() with self.assertRaises(AttributeError): traj.x.g.h
def test_loading_as_new(self): filename = make_temp_dir('asnew.h5') traj = Trajectory(name='TestPartial', filename=filename) traj.f_add_parameter('x', 3) traj.f_add_parameter('y', 2) traj.f_explore({'x': [12,3,44], 'y':[1,23,4]}) traj.f_store() traj = load_trajectory(name=traj.v_name, filename=filename) with self.assertRaises(TypeError): traj.f_shrink() traj = load_trajectory(name=traj.v_name, filename=filename, as_new=True, new_name='TestTraj', add_time=False) self.assertTrue(traj.v_name == 'TestTraj') self.assertTrue(len(traj) == 3) traj.f_shrink() self.assertTrue(len(traj) == 1)
def test_make_default_file_when_giving_directory_without_slash(self): filename = make_temp_dir('test.hdf5') head, tail = os.path.split(filename) env = Environment(filename=head) the_file_name = env.v_traj.v_name + '.hdf5' head, tail = os.path.split(env.v_traj.v_storage_service.filename) self.assertEqual(tail, the_file_name)
def test_partial_loading(self): traj = Trajectory(name='TestPartial', filename=make_temp_dir('testpartially.hdf5')) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() traj.f_load_item(traj.test, load_only=['a', 'x']) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() load_except= ['c', 'd'] traj.f_load_item(traj.test, load_except=load_except) self.assertTrue(len(load_except)==2) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) with self.assertRaises(ValueError): traj.f_load_item(traj.test, load_except=['x'], load_only=['y'])
def test_time_display_of_loading(self): filename = make_temp_dir('sloooow.hdf5') env = Environment(trajectory='traj', add_time=True, filename=filename, log_stdout=False, log_config=get_log_config(), dynamic_imports=SlowResult, display_time=0.1) traj = env.v_traj res=traj.f_add_result(SlowResult, 'iii', 42, 43, comment='llk') traj.f_store() service_logger = traj.v_storage_service._logger root = logging.getLogger('pypet') old_level = root.level service_logger.setLevel(logging.INFO) root.setLevel(logging.INFO) traj.f_load(load_data=3) service_logger.setLevel(old_level) root.setLevel(old_level) path = get_log_path(traj) mainfilename = os.path.join(path, 'LOG.txt') with open(mainfilename, mode='r') as mainf: full_text = mainf.read() self.assertTrue('nodes/s)' in full_text) env.f_disable_logging()
def test_overwrite_stuff(self): traj = Trajectory(name='TestOverwrite', filename=make_temp_dir('testowrite.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() res['a'] = np.array([1,2,3]) res['c'] = 123445 traj.f_store_item(res, overwrite='a', complevel=4) # Should emit a warning traj.f_store_item(res, overwrite=['a', 'b']) traj.f_load(load_results=3) res = traj.test self.assertTrue((res['a']==np.array([1,2,3])).all()) self.assertTrue(res['c']=='d') res['c'] = 123445 traj.f_store_item(res, store_data=3) res.f_empty() traj.f_load(load_results=3) self.assertTrue(traj.test['c']==123445)
def setUp(self): self.set_mode() self.filename = make_temp_dir(os.path.join('experiments','tests','HDF5','sort_tests.hdf5')) self.trajname = make_trajectory_name(self) env = Environment(trajectory=self.trajname,filename=self.filename, file_title=self.trajname, log_stdout=self.log_stdout, log_config=get_log_config() if self.log_config else None, multiproc=self.multiproc, wrap_mode=self.mode, ncores=self.ncores, use_pool=self.use_pool, use_scoop=self.use_scoop, port=self.port, freeze_input=self.freeze_input, graceful_exit=self.graceful_exit) traj = env.v_trajectory traj.v_standard_parameter=Parameter traj.f_add_parameter('x',99) traj.f_add_parameter('y',99) self.env=env self.traj=traj
def test_hdf5_store_load_parameter(self): traj_name = make_trajectory_name(self) file_name = make_temp_dir(os.path.join('brian2', 'tests', 'hdf5', 'test_%s.hdf5' % traj_name)) env = Environment(trajectory=traj_name, filename=file_name, log_config=get_log_config(), dynamic_imports=[Brian2Parameter], add_time=False, storage_service=HDF5StorageService) traj = env.v_trajectory traj.v_standard_parameter = Brian2Parameter traj.f_add_parameter('brian2.single.millivolts', 10*mvolt, comment='single value') #traj.f_add_parameter('brian2.array.millivolts', [11, 12]*mvolt, comment='array') #traj.f_add_parameter('mV1', 42.0*mV) #traj.f_add_parameter('ampere1', 1*mA) #traj.f_add_parameter('integer', 16) #traj.f_add_parameter('kHz05', 0.5*kHz) #traj.f_add_parameter('nested_array', np.array([[6.,7.,8.],[9.,10.,11.]]) * ms) #traj.f_add_parameter('b2a', np.array([1., 2.]) * mV) # We also need to check if explorations work with hdf5 store! #explore_dict = {'ampere1': [1*mA, 2*mA, 3*mA], # 'integer': [42,43,44], # 'b2a': [np.array([1., 2.]) * mV, np.array([1., 4.]) * mV, # np.array([1., 2.]) * mV]} #traj.f_explore(explore_dict) traj.f_store() traj2 = load_trajectory(filename=file_name, name=traj_name, dynamic_imports=[Brian2Parameter], load_data=2) self.compare_trajectories(traj, traj2)
def test_auto_load(self): traj = Trajectory(name='Testautoload', filename=make_temp_dir('autoload.hdf5'), add_time=True) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() ffa=traj.f_get('ffa') ffa.f_unlock() ffa.f_empty() self.assertTrue(ffa.f_is_empty()) traj.f_remove_child('results', recursive=True) # check auto load val = traj.res.I.am.crun.a.mean.resu self.assertTrue(val==42) val = traj.ffa self.assertTrue(val==42) with self.assertRaises(pex.DataNotInStorageError): traj.kdsfdsf
def test_errors(self): filename = make_temp_dir("hdf5errors.hdf5") traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name npearray = np.ones((2, 10, 3), dtype=np.float) thevlarray = np.array([compat.tobytes("j"), 22.2, compat.tobytes("gutter")]) with self.assertRaises(TypeError): traj.f_add_result(SharedResult, "arrays.vlarray", SharedVLArray()).create_shared_data(obj=thevlarray) traj.f_store() traj.arrays.vlarray.create_shared_data(obj=thevlarray) traj.f_add_result(SharedResult, "arrays.array", SharedArray()).create_shared_data(data=npearray) traj.arrays.f_add_result(SharedResult, "super.carray", SharedCArray(), comment="carray").create_shared_data( shape=(10, 10), atom=pt.atom.FloatAtom() ) traj.arrays.f_add_result(SharedResult, "earray", SharedEArray()).create_shared_data("earray", obj=npearray) traj.f_store() with self.assertRaises(TypeError): traj.arrays.array.iter_rows() with StorageContextManager(traj) as cm: with self.assertRaises(RuntimeError): with StorageContextManager(traj) as cm2: pass self.assertTrue(traj.v_storage_service.is_open) with self.assertRaises(RuntimeError): StorageContextManager(traj).f_open_store() self.assertFalse(traj.v_storage_service.is_open)
def setUp(self): self.set_mode() self.filename = make_temp_dir(os.path.join('experiments','tests','HDF5','sort_tests.hdf5')) self.trajname = make_trajectory_name(self) env = Environment(trajectory=self.trajname,filename=self.filename, file_title=self.trajname, log_stdout=self.log_stdout, log_config=get_log_config() if self.log_config else None, multiproc=self.multiproc, wrap_mode=self.mode, ncores=self.ncores, use_pool=self.use_pool, use_scoop=self.use_scoop, port=self.port, freeze_input=self.freeze_input,) traj = env.v_trajectory traj.v_standard_parameter=Parameter traj.f_add_parameter('x',0) traj.f_add_parameter('y',0) self.env=env self.traj=traj
def test_maximum_overview_size(self): filename = make_temp_dir('maxisze.hdf5') env = Environment(trajectory='Testmigrate', filename=filename, log_config=get_log_config()) traj = env.v_trajectory for irun in range(pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH): traj.f_add_parameter('f%d.x' % irun, 5) traj.f_store() store = ptcompat.open_file(filename, mode='r+') table = ptcompat.get_child(store.root,traj.v_name).overview.parameters_overview self.assertEquals(table.nrows, pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH) store.close() for irun in range(pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH, 2*pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH): traj.f_add_parameter('f%d.x' % irun, 5) traj.f_store() store = ptcompat.open_file(filename, mode='r+') table = ptcompat.get_child(store.root,traj.v_name).overview.parameters_overview self.assertEquals(table.nrows, pypetconstants.HDF5_MAX_OVERVIEW_TABLE_LENGTH) store.close() env.f_disable_logging()
def setUp(self): env = Environment( trajectory='Test_' + repr(time.time()).replace('.', '_'), filename=make_temp_dir( os.path.join('experiments', 'tests', 'briantests', 'HDF5', 'briantest.hdf5')), file_title='test', log_config=get_log_config(), dynamic_imports=[ 'pypet.brian.parameter.BrianParameter', BrianMonitorResult ], multiproc=False) traj = env.v_trajectory #env._set_standard_storage() #env._hdf5_queue_writer._hdf5storageservice = LazyStorageService() traj = env.v_trajectory #traj.set_storage_service(LazyStorageService()) add_params(traj) #traj.mode='Parallel' traj.f_explore( cartesian_product({ traj.f_get('N').v_full_name: [50, 60], traj.f_get('tauw').v_full_name: [30 * ms, 40 * ms] })) self.traj = traj self.env = env self.traj = traj