def test_63_mpi_run_hyprid_from_disk(self): data_path= self.home+'/results/unittest/my_population/mpi_run_hybrid_from_disk/' script_name=os.getcwd()+'/test_scripts_MPI/my_population_mpi_run_hybrid_from_disk.py' np=4 # os.environ['OMP_NUM_THREADS'] = '2' fileName = data_path + 'data_in.pkl' fileOut = data_path + 'data_out.pkl' pickle_save([self.sim_time, self.args, self.kwargs], fileName) p = subprocess.Popen(['mpirun', '-np', str(np), 'python', script_name, fileName, fileOut, data_path], # stdout=subprocess.PIPE, # stderr=subprocess.PIPE, stderr=subprocess.STDOUT) out, err = p.communicate() # print out # print err l = self.sim_group().get_spike_signal() mr0 = l.mean_rate() fr0 = l.firing_rate(1) return fileOut, mr0, fr0 g=pickle_load(fileOut) ss=g.get_spike_signal() mr1=ss.mean_rate() fr1=ss.firing_rate(1) self.assertEqual(mr0, mr1) self.assertListEqual(list(fr0), list(fr1))
def test_63_mpi_run_hyprid_from_disk(self): data_path = self.home + '/results/unittest/my_population/mpi_run_hybrid_from_disk/' script_name = os.getcwd( ) + '/test_scripts_MPI/my_population_mpi_run_hybrid_from_disk.py' np = 4 # os.environ['OMP_NUM_THREADS'] = '2' fileName = data_path + 'data_in.pkl' fileOut = data_path + 'data_out.pkl' pickle_save([self.sim_time, self.args, self.kwargs], fileName) p = subprocess.Popen( [ 'mpirun', '-np', str(np), 'python', script_name, fileName, fileOut, data_path ], # stdout=subprocess.PIPE, # stderr=subprocess.PIPE, stderr=subprocess.STDOUT) out, err = p.communicate() # print out # print err l = self.sim_group().get_spike_signal() mr0 = l.mean_rate() fr0 = l.firing_rate(1) return fileOut, mr0, fr0 g = pickle_load(fileOut) ss = g.get_spike_signal() mr1 = ss.mean_rate() fr1 = ss.firing_rate(1) self.assertEqual(mr0, mr1) self.assertListEqual(list(fr0), list(fr1))
def save_to_database(path_results): l=os.listdir(path_results) db=None for fn in l: path=path_results+fn if not os.path.isdir(path): continue if not fn.split('/')[-1][:6]=='script': continue ll=os.listdir(path) for fn2 in ll: if fn2.split('.')[-1]!='db_dump': continue data=data_to_disk.pickle_load(path+'/'+fn2, '.db_dump') db_name, db_table, keys_db, values_db, to_binary=data to_binary=data[-1] values_db=[psycopg2.Binary(a) if tb else a for a,tb in zip(values_db,to_binary)] s='Writing to database {} table {} for {}' print s.format(db_name, db_table, fn2) db=psql.insert(db_name, db_table, keys_db, values_db, db) print 'Removing '+path+'/'+fn2 db.close()
def test_61_mpi_run(self): data_path = self.home + '/results/unittest/my_population/mpi_run/' script_name = os.getcwd( ) + '/test_scripts_MPI/my_population_mpi_run.py' np = 4 fileOut, mr0, fr0 = self.do_mpi(data_path, script_name, np) mr1, fr1 = pickle_load(fileOut) self.assertEqual(mr0, mr1) self.assertListEqual(list(fr0), list(fr1))
def test_61_mpi_run(self): data_path= self.home+'/results/unittest/my_population/mpi_run/' script_name=os.getcwd()+'/test_scripts_MPI/my_population_mpi_run.py' np=4 fileOut, mr0, fr0 = self.do_mpi(data_path, script_name, np) mr1, fr1=pickle_load(fileOut) self.assertEqual(mr0, mr1) self.assertListEqual(list(fr0), list(fr1))
def test_do_mpi_python(self): host='mpi_python' cb=self.create_job_admin(Job_admin_mpi_python, host) obj=self.create_obj(host) cb.save_obj(obj) args=cb.get_subp_args() p=do(*args,**{'debug':False}) p.wait() l=data_to_disk.pickle_load(self.p_out_data(host)+'/data_out.pkl') self.assertListEqual(l, [1]) self.assertTrue(os.path.isfile(cb.p_subp_out)) self.assertTrue(os.path.isfile(cb.p_subp_err))
def _get_weights(self): x = self.weight if 'constant' == x['type']: return numpy.ones(self.n) * x['params'] if 'uniform' == x['type']: # low=min(x['params']['min'],1.0) # nest scheduler low = x['params']['min'] return numpy.random.uniform(low=low, high=x['params']['max'], size=self.n) if 'learned' == x['type']: weights = data_to_disk.pickle_load(x['path']) conns = numpy.zeros(self.n) for i, sl in enumerate(self.sets): conns[sl] = weights[i] return conns * x['params']
def _get_weights(self): x=self.weight if 'constant' == x['type']: return numpy.ones(self.n)*x['params'] if 'uniform' == x['type']: # low=min(x['params']['min'],1.0) # nest scheduler low=x['params']['min'] return numpy.random.uniform(low=low, high=x['params']['max'], size=self.n) if 'learned' == x['type']: weights=data_to_disk.pickle_load(x['path']) conns=numpy.zeros(self.n) for i, sl in enumerate(self.sets): conns[sl]=weights[i] return conns*x['params']
def test_62_mpi_run_from_disk(self): data_path= self.home+'/results/unittest/my_population/mpi_run_from_disk/' script_name=os.getcwd()+'/test_scripts_MPI/my_population_mpi_run_from_disk.py' np=4 for filename in os.listdir(data_path): path=data_path+'/'+filename if os.path.isfile(path): os.remove(path) fileOut, mr0, fr0 = self.do_mpi(data_path, script_name, np) ss=pickle_load(fileOut) mr1=ss.mean_rate() fr1=ss.firing_rate(1) self.assertEqual(mr0, mr1) self.assertListEqual(list(fr0), list(fr1))
def test_62_mpi_run_from_disk(self): data_path = self.home + '/results/unittest/my_population/mpi_run_from_disk/' script_name = os.getcwd( ) + '/test_scripts_MPI/my_population_mpi_run_from_disk.py' np = 4 for filename in os.listdir(data_path): path = data_path + '/' + filename if os.path.isfile(path): os.remove(path) fileOut, mr0, fr0 = self.do_mpi(data_path, script_name, np) ss = pickle_load(fileOut) mr1 = ss.mean_rate() fr1 = ss.firing_rate(1) self.assertEqual(mr0, mr1) self.assertListEqual(list(fr0), list(fr1))
def set(self, surfs, display_print=True): t=time.time() # Convergent connections when driver = target and pool = source. # The for each node driver not surfs from the pool is considered # Divergent connections when driver = source and pool = target. # Then for each node driver not surfs from the pool is considered driver=surfs[self.target] pool=surfs[self.source] if not (self.save['active'] and not self.save['overwrite'] and os.path.isfile(self.save['path'] +'.pkl')): self._set(driver, pool) self._save() else: d=data_to_disk.pickle_load(self.save['path'] ) self.pre=sparse.coo_matrix(d[0]) self.post=sparse.coo_matrix(d[1]) self.sets=d[2] # self.pre=numpy.array([[0]])#sparse.coo_matrix(d[0]) # self.post=numpy.array([[0]])#sparse.coo_matrix(d[1]) # self.sets=[0]#d[2] # t=time.time()-t if display_print and comm.rank()==0: s='Conn: {0:18} Connections: {1:8} Fan pool:{2:6} ({3:6}) Time:{4:5} sec Rule:{5}' a=[self.name, self.n, round(float(self.n)/driver.get_n(),0), self.fan_in, round(t,2), self.rule] print s.format(*a)
def test_do_batch(self): host='batch' cb=self.create_job_admin(Job_admin_batch, host) obj=self.create_obj(host) cb.save_obj(obj) cb.gen_job_script() args=cb.get_subp_args() p=do(*args,**{'debug':False}) p.wait() time.sleep(1) l=data_to_disk.pickle_load(self.p_out_data(host)+'/data_out.pkl') self.assertListEqual(l, [1]) self.assertTrue(os.path.isfile(cb.p_subp_out)) self.assertTrue(os.path.isfile(cb.p_subp_err)) self.assertTrue(os.path.isfile(cb.p_tee_out))
def test_set_save_load_mpi(self): import subprocess rules = [ '1-1', 'all-all', 'set-set', 'set-not_set', 'all_set-all_set', 'divergent' ] data_path = self.path_base + 'set_save_load_mpi/' script_name = os.getcwd() + ('/test_scripts_MPI/' + 'structure_set_save_load_mpi.py') np = 1 p = subprocess.Popen( ['mpirun', '-np', str(np), 'python', script_name, data_path], # stdout=subprocess.PIPE, # stderr=subprocess.PIPE, stderr=subprocess.STDOUT, ) out, err = p.communicate() # print out # print err threads = np for i in range(threads): fileName = data_path + 'data' + str(i) l1, l2 = data_to_disk.pickle_load(fileName) print l1 print l2 self.assertListEqual(l1, l2) path_clear = data_path + '*' os.system('rm ' + path_clear + ' 2>/dev/null') path_clear = data_path + 'conn/*' os.system('rm ' + path_clear + ' 2>/dev/null')
def set(self, surfs, display_print=True): t = time.time() # Convergent connections when driver = target and pool = source. # The for each node driver not surfs from the pool is considered # Divergent connections when driver = source and pool = target. # Then for each node driver not surfs from the pool is considered driver = surfs[self.target] pool = surfs[self.source] if not (self.save['active'] and not self.save['overwrite'] and os.path.isfile(self.save['path'] + '.pkl')): self._set(driver, pool) self._save() else: d = data_to_disk.pickle_load(self.save['path']) self.pre = sparse.coo_matrix(d[0]) self.post = sparse.coo_matrix(d[1]) self.sets = d[2] # self.pre=numpy.array([[0]])#sparse.coo_matrix(d[0]) # self.post=numpy.array([[0]])#sparse.coo_matrix(d[1]) # self.sets=[0]#d[2] # t = time.time() - t if display_print and comm.rank() == 0: s = 'Conn: {0:18} Connections: {1:8} Fan pool:{2:6} ({3:6}) Time:{4:5} sec Rule:{5}' a = [ self.name, self.n, round(float(self.n) / driver.get_n(), 0), self.fan_in, round(t, 2), self.rule ] print s.format(*a)
def test_set_save_load_mpi(self): import subprocess rules=['1-1', 'all-all', 'set-set', 'set-not_set', 'all_set-all_set', 'divergent'] data_path= self.path_base+'set_save_load_mpi/' script_name=os.getcwd()+('/test_scripts_MPI/' +'structure_set_save_load_mpi.py') np=1 p=subprocess.Popen(['mpirun', '-np', str(np), 'python', script_name, data_path], # stdout=subprocess.PIPE, # stderr=subprocess.PIPE, stderr=subprocess.STDOUT, ) out, err = p.communicate() # print out # print err threads=np for i in range(threads): fileName= data_path+'data'+str(i) l1,l2=data_to_disk.pickle_load(fileName) print l1 print l2 self.assertListEqual(l1, l2) path_clear=data_path+'*' os.system('rm ' + path_clear + ' 2>/dev/null' ) path_clear=data_path+'conn/*' os.system('rm ' + path_clear + ' 2>/dev/null' )
''' Created on Sep 29, 2014 @author: mikael ''' import os import sys currdir = os.getcwd() basedir = '/'.join(currdir.split('/')[:-1]) from core import data_to_disk from core.misc import Stopwatch from core.parallel_excecution import Mockup_class from scripts_inhibition import base_Go_NoGo_compete import pprint pp = pprint.pprint # pp(sys.modules) fileName = sys.argv[1] print fileName obj, script = data_to_disk.pickle_load(fileName) # print 'Running '+str(obj)+' as ' + script with Stopwatch('Running ' + str(obj) + ' as ' + script): obj.do()
''' Created on Nov 12, 2014 @author: mikael ''' from core.data_to_disk import pickle_load import pprint import pylab pp=pprint.pprint path=('/home/mikael/results/papers/inhibition/single' +'/single_FSN/IF/Net_2-FS-IF_curve-3935409.pkl') d=pickle_load(path) pp(d) d.plot(pylab.subplot(111)) pylab.show()
''' Created on Jul 15, 2014 @author: mikael ''' from core.data_to_disk import pickle_load from core.parallelization import comm import sys path=sys.argv[1]+str(comm.rank()) # path='/home/mikael/git/bgmodel/core_old/misc_folder/test_subprocess/00' print path fun, args, kwargs=pickle_load(path) fun(*args, **kwargs)
def test_do_milner(self): # kw={'hours':'00', # 'job_name':'lindahl_test_job', # 'minutes':'10', # 'path_sbatch_err':self.path_sbatch_err, # 'path_sbatch_out':self.path_sbatch_out, # 'path_tee_out':self.path_tee_out, # 'path_params':self.path_params, # 'path_script':self.path_script, # 'seconds':'00', # # 'threads':20 # } host = 'milner' cb = self.create_job_admin_milner(Job_admin_sbatch, host) obj = self.create_obj(host) cb.gen_job_script() args = cb.get_subp_args() # print self.path_sbatch_err # print self.path_sbatch_out # print self.path_tee_out # print self.path_params # print self.path_script # print self.path_bash # print self.path_bash0 cb.save_obj(obj) # # save_params(self.path_params, # # self.path_script, # # self.obj) # # args_call=generate_milner_bash_script(self.path_sbatch_err, # self.path_sbatch_out, # self.path_tee_out, # self.path_params, # self.path_script, # self.path_bash0, # self.path_bash, # **kwargs ) # p=do(self.path_subprocess_out, # self.path_subprocess_err, # args_call, # **kwargs) p = do(*args, **{'debug': False}) p.wait() # time.sleep(1) # if my_socket.determine_host()=='milner_login': print 'waiting 20 s' time.sleep(20) l = data_to_disk.pickle_load(self.p_out_data(host) + '/data_out.pkl') print cb.p_subp_out print cb.p_tee_out print cb.p_sbatch_out self.assertListEqual(l, [1]) self.assertTrue(os.path.isfile(cb.p_subp_out)) self.assertTrue(os.path.isfile(cb.p_subp_err)) self.assertTrue(os.path.isfile(cb.p_tee_out)) self.assertTrue(os.path.isfile(cb.p_sbatch_out)) self.assertTrue(os.path.isfile(cb.p_sbatch_err))
def test_do_milner(self): # kw={'hours':'00', # 'job_name':'lindahl_test_job', # 'minutes':'10', # 'path_sbatch_err':self.path_sbatch_err, # 'path_sbatch_out':self.path_sbatch_out, # 'path_tee_out':self.path_tee_out, # 'path_params':self.path_params, # 'path_script':self.path_script, # 'seconds':'00', # # 'threads':20 # } host='milner' cb=self.create_job_admin_milner(Job_admin_sbatch, host) obj=self.create_obj(host) cb.gen_job_script() args=cb.get_subp_args() # print self.path_sbatch_err # print self.path_sbatch_out # print self.path_tee_out # print self.path_params # print self.path_script # print self.path_bash # print self.path_bash0 cb.save_obj(obj) # # save_params(self.path_params, # # self.path_script, # # self.obj) # # args_call=generate_milner_bash_script(self.path_sbatch_err, # self.path_sbatch_out, # self.path_tee_out, # self.path_params, # self.path_script, # self.path_bash0, # self.path_bash, # **kwargs ) # p=do(self.path_subprocess_out, # self.path_subprocess_err, # args_call, # **kwargs) p=do(*args,**{'debug':False}) p.wait() # time.sleep(1) # if my_socket.determine_host()=='milner_login': print 'waiting 20 s' time.sleep(20) l=data_to_disk.pickle_load(self.p_out_data(host)+'/data_out.pkl') print cb.p_subp_out print cb.p_tee_out print cb.p_sbatch_out self.assertListEqual(l, [1]) self.assertTrue(os.path.isfile(cb.p_subp_out)) self.assertTrue(os.path.isfile(cb.p_subp_err)) self.assertTrue(os.path.isfile(cb.p_tee_out)) self.assertTrue(os.path.isfile(cb.p_sbatch_out)) self.assertTrue(os.path.isfile(cb.p_sbatch_err))
''' Created on Sep 29, 2014 @author: mikael ''' import os import sys currdir=os.getcwd() basedir='/'.join(currdir.split('/')[:-1]) from core import data_to_disk from core.misc import Stopwatch from core.parallel_excecution import Mockup_class from scripts_inhibition import base_Go_NoGo_compete import pprint pp=pprint.pprint # pp(sys.modules) fileName=sys.argv[1] print fileName obj, script=data_to_disk.pickle_load(fileName) # print 'Running '+str(obj)+' as ' + script with Stopwatch('Running '+str(obj)+' as ' + script): obj.do()