def test_store_items_and_groups(self): traj = Trajectory(name='testtraj', filename=make_temp_dir('teststoreitems.hdf5'), add_time=True) traj.f_store() traj.f_add_parameter('group1.test',42, comment= 'TooLong' * pypetconstants.HDF5_STRCOL_MAX_COMMENT_LENGTH) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj.f_store_items(['test','testres','group1']) traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('teststoreitems.hdf5')) traj2.f_load(load_parameters=2, load_results=2) traj.f_add_result('Im.stored.along.a.path', 43) traj.Im.stored.along.v_annotations['wtf'] =4444 traj.res.f_store_child('Im.stored.along.a.path') traj2.res.f_load_child('Im.stored.along.a.path', load_data=2) self.compare_trajectories(traj,traj2)
def test_store_and_load_large_dictionary(self): traj = Trajectory(name='Testlargedict', filename=make_temp_dir('large_dict.hdf5'), add_time=True) large_dict = {} for irun in range(1025): large_dict['item_%d' % irun] = irun large_dict2 = {} for irun in range(33): large_dict2['item_%d' % irun] = irun traj.f_add_result('large_dict', large_dict, comment='Huge_dict!') traj.f_add_result('large_dict2', large_dict2, comment='Not so large dict!') traj.f_store() traj_name = traj.v_name traj2 = Trajectory(filename=make_temp_dir('large_dict.hdf5'), add_time=True) traj2.f_load(name=traj_name, load_data=2) self.compare_trajectories(traj, traj2)
def test_version_mismatch(self): traj = Trajectory(name='TestVERSION', filename=make_temp_dir('testversionmismatch.hdf5'), add_time=True) traj.f_add_parameter('group1.test',42) traj.f_add_result('testres', 42) traj.group1.f_set_annotations(Test=44) traj._version='0.1a.1' traj.f_store() traj2 = Trajectory(name=traj.v_name, add_time=False, filename=make_temp_dir('testversionmismatch.hdf5')) with self.assertRaises(pex.VersionMismatchError): traj2.f_load(load_parameters=2, load_results=2) traj2.f_load(load_parameters=2, load_results=2, force=True) self.compare_trajectories(traj,traj2) get_root_logger().info('Mismatch testing done!')
def test_migrations(self): traj = Trajectory(name='Testmigrate', filename=make_temp_dir('migrate.hdf5'), add_time=True) traj.f_add_result('I.am.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() new_file = make_temp_dir('migrate2.hdf5') traj.f_migrate(filename=new_file) traj.f_store() new_traj = Trajectory() new_traj.f_migrate(new_name=traj.v_name, filename=new_file, in_store=True) new_traj.v_auto_load = True self.assertTrue(new_traj.results.I.am.a.mean.resu == 42)
def test_storage_and_loading(self): filename = make_temp_dir('linktest.hdf5') traj = Trajectory(filename=filename) traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') res = traj.f_add_result('kk', 42) traj.par.f_add_link('gg', res) traj.f_add_link('hh', res) traj.f_add_link('jj', traj.par) traj.f_add_link('ii', res) traj.test.f_add_link('circle1', traj.test2) traj.test2.f_add_link('circle2', traj.test) traj.test.f_add_link('circle2', traj.test.circle1.circle2) traj.f_add_parameter_group('test.ab.bc.cd') traj.cd.f_add_link(traj.test) traj.test.f_add_link(traj.cd) traj.f_store() traj2 = Trajectory(filename=filename) traj2.f_load(name=traj.v_name, load_data=2) self.assertTrue(traj.kk == traj2.gg, '%s != %s' % (traj.kk, traj2.gg)) self.assertTrue(traj.cd.test is traj.test) self.assertTrue(len(traj._linked_by), len(traj2._linked_by)) self.compare_trajectories(traj, traj2) self.assertTrue('jj' in traj2._nn_interface._links_count) traj2.f_remove_child('jj') self.assertTrue('jj' not in traj2._nn_interface._links_count) traj2.f_remove_child('hh') traj2.f_remove_child('ii') traj2.f_remove_child('parameters', recursive=True) traj2.v_auto_load = True group = traj2.par.test2.circle2 self.assertTrue(group is traj2.test) retest = traj2.test.circle1 self.assertTrue(retest is traj2.test2) self.assertTrue(traj2.test.circle2 is traj2.test) self.assertTrue(traj2.hh == traj2.res.kk) traj2.v_auto_load = False traj2.f_load_child('jj') self.assertTrue(traj2.jj is traj2.par) traj2.f_load(load_data=2) self.assertTrue(traj2.ii == traj2.res.kk)
def test_loading_explored_parameters(self): filename = make_temp_dir('load_explored.hdf5') traj = Trajectory(filename=filename, overwrite_file=True, add_time=False) traj.par.x = Parameter('x', 42, comment='answer') traj.f_explore({'x':[1,2,3,4]}) traj.f_store() name = traj.v_name traj = Trajectory(filename=filename, add_time=False) traj.f_load() x = traj.f_get('x') self.assertIs(x, traj._explored_parameters['parameters.x'])
def test_storage_service_errors(self): traj = Trajectory(filename=make_temp_dir('testnoservice.hdf5'), add_time=True) traj_name = traj.v_name # you cannot store stuff before the trajectory was stored once: with self.assertRaises(ValueError): traj.v_storage_service.store('FAKESERVICE', self, trajectory_name = traj.v_name) traj.f_store() with self.assertRaises(ValueError): traj.v_storage_service.store('FAKESERVICE', self, trajectory_name = 'test') with self.assertRaises(pex.NoSuchServiceError): traj.v_storage_service.store('FAKESERVICE', self, trajectory_name = traj.v_name) with self.assertRaises(ValueError): traj.f_load(name='test', index=1) with self.assertRaises(RuntimeError): traj.v_storage_service.store('LIST', [('LEAF',None,None,None,None)], trajectory_name = traj.v_name) with self.assertRaises(ValueError): traj.f_load(index=9999) with self.assertRaises(ValueError): traj.f_load(name='Non-Existising-Traj')
def test_removal_of_error_parameter(self): filename = make_temp_dir('remove_errored.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) traj.f_add_result('iii', 42) traj.f_add_result(FakeResult, 'j.j.josie', 43) file = traj.v_storage_service.filename traj.f_store(only_init=True) with self.assertRaises(RuntimeError): traj.f_store() with pt.open_file(file, mode='r') as fh: jj = fh.get_node(where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie' not in jj) traj.j.j.f_remove_child('josie') traj.j.j.f_add_result(FakeResult2, 'josie2', 444) traj.f_store() with self.assertRaises(pex.NoSuchServiceError): traj.f_store_child('results', recursive=True) with pt.open_file(file, mode='r') as fh: jj = fh.get_node(where='/%s/results/j/j' % traj.v_name) self.assertTrue('josie2' in jj) josie2 =jj._f_get_child('josie2') self.assertTrue('hey' in josie2) self.assertTrue('fail' not in josie2)
def test_partially_delete_stuff(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() self.assertTrue('a' in res) traj.f_delete_item(res, delete_only=['a'], remove_from_item=True) self.assertTrue('c' in res) self.assertTrue('a' not in res) res['a'] = 'offf' self.assertTrue('a' in res) traj.f_load(load_results=3) self.assertTrue('a' not in res) self.assertTrue('c' in res) traj.f_delete_item(res, remove_from_trajectory=True) self.assertTrue('results' in traj) self.assertTrue(res not in traj)
def main(): filename = os.path.join('hdf5', 'Clustered_Network.hdf5') # If we pass a filename to the trajectory a new HDF5StorageService will # be automatically created traj = Trajectory(filename=filename, dynamically_imported_classes=[BrianMonitorResult, BrianParameter]) # Let's create and fake environment to enable logging: env = Environment(traj, do_single_runs=False) # Load the trajectory, but onyl laod the skeleton of the results traj.f_load(index=-1, load_parameters=2, load_derived_parameters=2, load_results=1) # Find the result instances related to the fano factor fano_dict = traj.f_get_from_runs('mean_fano_factor', fast_access=False) # Load the data of the fano factor results ffs = fano_dict.values() traj.f_load_items(ffs) # Extract all values and R_ee values for each run ffs_values = [x.f_get() for x in ffs] Rees = traj.f_get('R_ee').f_get_range() # Plot average fano factor as a function of R_ee plt.plot(Rees, ffs_values) plt.xlabel('R_ee') plt.ylabel('Avg. Fano Factor') plt.show() # Finally disable logging and close all log-files env.disable_logging()
def test_get_default(self): traj = Trajectory(name='Testgetdefault', filename=make_temp_dir('autoload.hdf5'), add_time=True) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') val = traj.f_get_default('jjjjjjjjjj', 555) self.assertTrue(val==555) traj.f_store() traj.f_remove_child('results', recursive=True) val = traj.f_get_default('res.I.am.crun.a.mean.answ', 444, auto_load=True) self.assertTrue(val==444) val = traj.f_get_default('res.I.am.crun.a.mean.resu', auto_load=True, fast_access=True) self.assertTrue(val==42) with self.assertRaises(Exception): traj.kdsfdsf
def print_traj_parameters_explored(traj_dir): # Load the trajectory from the hdf5 file # Only load parameters, results will be loaded at runtime (auto loading) #traj_dir = os.path.join('trajectories', '2019_03_21_22h48m29s_HCP_test') #if not os.path.isdir(traj_dir): # traj_dir = os.path.join('..', traj_dir) traj_filename = 'traj.hdf5' traj_fullpath = os.path.join(traj_dir, traj_filename) traj = Trajectory() traj.f_load(filename=traj_fullpath, index=0, load_parameters=2, load_results=0, load_derived_parameters=0, force=True) # Turn on auto loading traj.v_auto_load = True # Count number of runs runs_n = len(traj.f_get_run_names()) print('number of runs = {0}'.format(runs_n)) # Get list of explored parameters parameters_explored = [ str.split(par, '.').pop() for par in (traj.f_get_explored_parameters()) ] print(parameters_explored)
def main(): # We don't use an environment so we enable logging manually logging.basicConfig(level=logging.INFO) filename = os.path.join('hdf5','example_16.hdf5') traj = Trajectory(filename=filename, overwrite_file=True) # The result that will be manipulated traj.f_add_result('last_process_name', 'N/A', comment='Name of the last process that manipulated the trajectory') with MultiprocContext(trajectory=traj, wrap_mode='LOCK') as mc: # The multiprocessing context manager wraps the storage service of the trajectory # and passes the wrapped service to the trajectory. # Also restores the original storage service in the end. # Moreover, wee need to use the `MANAGER_LOCK` wrapping because the locks # are pickled and send to the pool for all function executions # Start a pool of processes manipulating the trajectory iterable = (traj for x in range(50)) pool = mp.Pool(processes=4) # Pass the trajectory and the function to the pool and execute it 20 times pool.map_async(manipulate_multiproc_safe, iterable) pool.close() # Wait for all processes to join pool.join() # Reload the data from disk and overwrite the existing result in RAM traj.results.f_load(load_data=3) # Print the name of the last process the trajectory was manipulated by print('The last process to manipulate the trajectory was: `%s`' % traj.last_process_name)
def test_auto_load(self): traj = Trajectory(name='Testautoload', filename=make_temp_dir('autoload.hdf5'), add_time=True) traj.v_auto_load = True traj.f_add_result('I.am.$.a.mean.resu', 42, comment='Test') traj.f_add_derived_parameter('ffa', 42) traj.f_store() ffa=traj.f_get('ffa') ffa.f_unlock() ffa.f_empty() self.assertTrue(ffa.f_is_empty()) traj.f_remove_child('results', recursive=True) # check auto load val = traj.res.I.am.crun.a.mean.resu self.assertTrue(val==42) val = traj.ffa self.assertTrue(val==42) with self.assertRaises(pex.DataNotInStorageError): traj.kdsfdsf
def test_delete_links(self): traj = Trajectory(name='TestDelete', filename=make_temp_dir('testpartiallydel.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_add_link('x.y', res) traj.f_add_link('x.g.h', res) traj.f_store() traj.f_remove_child('x', recursive=True) traj.f_load() self.assertEqual(traj.x.y.a, traj.test.a) self.assertEqual(traj.x.g.h.a, traj.test.a) traj.f_delete_link('x.y', remove_from_trajectory=True) traj.f_delete_link((traj.x.g, 'h'), remove_from_trajectory=True) traj.f_load() with self.assertRaises(AttributeError): traj.x.g.h
def test_partial_loading(self): traj = Trajectory(name='TestPartial', filename=make_temp_dir('testpartially.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() traj.f_load_item(traj.test, load_only=['a', 'x']) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) traj.f_remove_child('results', recursive=True) traj.f_load_skeleton() load_except= ['c', 'd'] traj.f_load_item(traj.test, load_except=load_except) self.assertTrue(len(load_except)==2) self.assertTrue('a' in traj.test) self.assertTrue('c' not in traj.test) with self.assertRaises(ValueError): traj.f_load_item(traj.test, load_except=['x'], load_only=['y'])
def make_plots(resultsPath): traj = Trajectory('tone-in-noise', add_time=False) traj.f_load(load_parameters=2, load_derived_parameters=0, load_results=1, load_other_data=0, filename=resultsPath) traj.v_auto_load = True with PdfPages( path.join(path.expanduser('~'), "pypet-output", 'synaptopathy.pdf')) as pdf: synaptopathy_effect(traj, pdf) with PdfPages( path.join(path.expanduser('~'), "pypet-output", 'periphery.pdf')) as pdf: periphery_effect(traj, pdf) with PdfPages( path.join(path.expanduser('~'), "pypet-output", 'weighting.pdf')) as pdf: weighting_effect(traj, pdf) with PdfPages( path.join(path.expanduser('~'), "pypet-output", 'brainstem.pdf')) as pdf: brainstem_effect(traj, pdf) return 0
def test_loading_as_new(self): filename = make_temp_dir('asnew.h5') traj = Trajectory(name='TestPartial', filename=filename, add_time=True) traj.f_add_parameter('x', 3) traj.f_add_parameter('y', 2) traj.f_explore({'x': [12,3,44], 'y':[1,23,4]}) traj.f_store() traj = load_trajectory(name=traj.v_name, filename=filename) with self.assertRaises(TypeError): traj.f_shrink() traj = load_trajectory(name=traj.v_name, filename=filename, as_new=True, new_name='TestTraj', add_time=False) self.assertTrue(traj.v_name == 'TestTraj') self.assertEqual(len(traj), 3) self.assertEqual(len(traj._explored_parameters), 2) traj.f_shrink() self.assertTrue(len(traj) == 1)
def test_overwrite_stuff(self): traj = Trajectory(name='TestOverwrite', filename=make_temp_dir('testowrite.hdf5'), add_time=True) res = traj.f_add_result('mytest.test', a='b', c='d') traj.f_store() res['a'] = np.array([1,2,3]) res['c'] = 123445 traj.f_store_item(res, overwrite='a', complevel=4) # Should emit a warning traj.f_store_item(res, overwrite=['a', 'b']) traj.f_load(load_results=3) res = traj.test self.assertTrue((res['a']==np.array([1,2,3])).all()) self.assertTrue(res['c']=='d') res['c'] = 123445 traj.f_store_item(res, store_data=3) res.f_empty() traj.f_load(load_results=3) self.assertTrue(traj.test['c']==123445)
def load_trajectory(self,trajectory_index=None,trajectory_name=None,as_new=False): ### Load The Trajectory and check if the values are still the same newtraj = Trajectory(filename=self.filename) newtraj.f_load(name=trajectory_name, index=trajectory_index, as_new=as_new, load_parameters=2, load_derived_parameters=2, load_results=2, load_other_data=2) return newtraj
def test_df(self): filename = make_temp_dir('hdf5errors.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) traj.f_store() dadict = { 'hi': [1, 2, 3, 4, 5], 'shu': ['bi', 'du', 'da', 'ha', 'hui'] } dadict2 = {'answer': [42]} traj.f_add_result( SharedResult, 'dfs.df', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict)) traj.f_add_result( SharedResult, 'dfs.df1', SharedPandasFrame()).create_shared_data(data=pd.DataFrame(dadict2)) traj.f_add_result(SharedResult, 'dfs.df3', SharedPandasFrame()) for irun in range(10): traj.df3.append(traj.df1.read()) dframe = traj.df3.read() self.assertTrue(len(dframe) == 10) what = traj.df.select(where='index == 2') self.assertTrue(len(what) == 1)
def test_store_overly_long_comment(self): filename = make_temp_dir('remove_errored.hdf5') traj = Trajectory(name='traj', add_time=True, filename=filename) res=traj.f_add_result('iii', 42, 43, comment=7777 * '6') traj.f_store() traj.f_remove_child('results', recursive=True) traj.f_load_child('results', recursive=True) self.assertTrue(traj.iii.v_comment == 7777 * '6')
def test_iteration_failure(self): traj = Trajectory() traj.f_add_parameter_group('test.test3') traj.f_add_parameter_group('test2') traj.test2.f_add_link(traj.test3) with self.assertRaises(pex.NotUniqueNodeError): traj.test3
def test_loading_and_storing_empty_containers(self): filename = make_temp_dir('empty_containers.hdf5') traj = Trajectory(filename=filename, add_time=True) # traj.f_add_parameter('empty.dict', {}) # traj.f_add_parameter('empty.list', []) traj.f_add_parameter(ArrayParameter, 'empty.tuple', ()) traj.f_add_parameter(ArrayParameter, 'empty.array', np.array([], dtype=float)) spsparse_csc = spsp.csc_matrix((2, 10)) spsparse_csr = spsp.csr_matrix((6660, 660)) spsparse_bsr = spsp.bsr_matrix((3330, 2220)) spsparse_dia = spsp.dia_matrix((1230, 1230)) traj.f_add_parameter(SparseParameter, 'empty.csc', spsparse_csc) traj.f_add_parameter(SparseParameter, 'empty.csr', spsparse_csr) traj.f_add_parameter(SparseParameter, 'empty.bsr', spsparse_bsr) traj.f_add_parameter(SparseParameter, 'empty.dia', spsparse_dia) traj.f_add_result(SparseResult, 'empty.all', dict={}, list=[], series=pd.Series(), frame=pd.DataFrame(), panel=pd.Panel(), **traj.par.f_to_dict(short_names=True, fast_access=True)) traj.f_store() newtraj = load_trajectory(index=-1, filename=filename) newtraj.f_load(load_data=2) epg = newtraj.par.empty self.assertTrue(type(epg.tuple) is tuple) self.assertTrue(len(epg.tuple) == 0) self.assertTrue(type(epg.array) is np.ndarray) self.assertTrue(epg.array.size == 0) self.assertTrue(spsp.isspmatrix_csr(epg.csr)) self.assertTrue(epg.csr.size == 0) self.assertTrue(spsp.isspmatrix_csc(epg.csc)) self.assertTrue(epg.csc.size == 0) self.assertTrue(spsp.isspmatrix_bsr(epg.bsr)) self.assertTrue(epg.bsr.size == 0) self.assertTrue(spsp.isspmatrix_dia(epg.dia)) self.assertTrue(epg.dia.size == 0) self.compare_trajectories(traj, newtraj)
def test_link_deletion(self): filename = make_temp_dir('linktest2.hdf5') traj = Trajectory(filename=filename) traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') res = traj.f_add_result('kk', 42) traj.par.f_add_link('gg', res) traj.test.f_add_link('circle1', traj.test2) traj.test2.f_add_link('circle2', traj.test) traj.f_store() traj.f_delete_link('par.gg') traj2 = Trajectory(filename=filename) traj2.f_load(name=traj.v_name, load_data=2) with self.assertRaises(AttributeError): traj2.gg
def test_links_according_to_run(self): traj = Trajectory() traj.f_add_parameter('test.hi', 44) traj.f_explore({'hi': [1, 2, 3]}) traj.f_add_parameter_group('test.test.test2') traj.f_add_parameter_group('test2') traj.test2.f_add_link('test', traj.test) traj.v_idx = 1
def test_link_of_link(self): traj = Trajectory() traj.f_add_parameter_group('test') traj.f_add_parameter_group('test2') traj.test.f_add_link('circle1', traj.test2) traj.test2.f_add_link('circle2', traj.test) traj.test.f_add_link('circle2', traj.test.circle1.circle2) self.assertTrue(traj.test.circle2 is traj.test)
def test_wildcard_search(self): traj = Trajectory(name='Testwildcard', filename=make_temp_dir('wilcard.hdf5'), add_time=True) traj.f_add_parameter('expl', 2) traj.f_explore({'expl': [1, 2, 3, 4]}) traj.f_add_result('wc2test.$.hhh', 333) traj.f_add_leaf('results.wctest.run_00000000.jjj', 42) traj.f_add_result('results.wctest.run_00000001.jjj', 43) traj.f_add_result('results.wctest.%s.jjj' % traj.f_wildcard('$', -1), 43) traj.v_crun = 1 self.assertTrue(traj.results.wctest['$'].jjj == 43) self.assertTrue(traj.results.wc2test.crun.hhh == 333) traj.f_store() get_root_logger().info('Removing child1') traj.f_remove_child('results', recursive=True) get_root_logger().info('Doing auto-load') traj.v_auto_load = True self.assertTrue(traj.results.wctest['$'].jjj == 43) self.assertTrue(traj.results.wc2test.crun.hhh == 333) get_root_logger().info('Removing child2') traj.f_remove_child('results', recursive=True) get_root_logger().info('auto-loading') traj.v_auto_load = True self.assertTrue(traj.results.wctest[-1].jjj == 43) self.assertTrue(traj.results.wc2test[-1].hhh == 333) get_root_logger().info('Removing child3') traj.f_remove_child('results', recursive=True) get_root_logger().info('auto-loading') traj.v_auto_load = True self.assertTrue(traj.results.wctest[1].jjj == 43) self.assertTrue(traj.results.wc2test[-1].hhh == 333) get_root_logger().info('Done with wildcard test')
def test_compacting(self): filename = make_temp_dir('hdf5compacting.hdf5') traj = Trajectory(name=make_trajectory_name(self), filename=filename) trajname = traj.v_name traj.v_storage_service.complevel = 7 first_row = {'ha': compat.tobytes('hi'), 'haha': np.zeros((3, 3))} traj.f_store(only_init=True) traj.f_add_result('My.Tree.Will.Be.Deleted', 42) traj.f_add_result('Mine.Too.HomeBoy', 42, comment='Don`t cry for me!') res = traj.f_add_result(SharedResult, 'myres') res['myres'] = SharedTable() res['myres'].create_shared_data(first_row=first_row) with StorageContextManager(traj): traj.myres for irun in range(10000): row = traj.myres.row for key in first_row: row[key] = first_row[key] row.append() traj.f_store() del traj traj = load_trajectory(name=trajname, filename=filename, load_all=2) with StorageContextManager(traj) as cm: tb = traj.myres.get_data_node() ptcompat.remove_rows(tb, 1000, 10000) cm.flush_store() self.assertTrue(traj.myres.nrows == 1001) traj.f_delete_item(traj.My, recursive=True) traj.f_delete_item(traj.Mine, recursive=True) size = os.path.getsize(filename) get_root_logger().info('Filesize is %s' % str(size)) name_wo_ext, ext = os.path.splitext(filename) backup_file_name = name_wo_ext + '_backup' + ext code = compact_hdf5_file(filename, keep_backup=True) if code != 0: raise RuntimeError('ptrepack fail') backup_size = os.path.getsize(backup_file_name) self.assertTrue(backup_size == size) new_size = os.path.getsize(filename) get_root_logger().info('New filesize is %s' % str(new_size)) self.assertTrue(new_size < size, "%s > %s" % (str(new_size), str(size)))
def test_file_renaming(self): traj_name = 'test' traj = Trajectory('test', add_time=False) traj.f_add_parameter('x', 42) traj.f_explore({'x': [1,2,3]}) rename_string = '$traj_$set_$run' solution_1 = 'test_run_set_ALL_run_ALL' solution_2 = 'test_run_set_00000_run_00000000' renaming_1 = rename_log_file(rename_string, traj) self.assertEqual(renaming_1, solution_1) traj.v_idx = 0 renaming_2 = rename_log_file(rename_string, traj) self.assertEqual(renaming_2, solution_2)