Beispiel #1
0
    def test_63_mpi_run_hyprid_from_disk(self):
        data_path = self.home + '/results/unittest/my_population/mpi_run_hybrid_from_disk/'
        script_name = os.getcwd(
        ) + '/test_scripts_MPI/my_population_mpi_run_hybrid_from_disk.py'
        np = 4

        #         os.environ['OMP_NUM_THREADS'] = '2'
        fileName = data_path + 'data_in.pkl'
        fileOut = data_path + 'data_out.pkl'
        pickle_save([self.sim_time, self.args, self.kwargs], fileName)
        p = subprocess.Popen(
            [
                'mpirun', '-np',
                str(np), 'python', script_name, fileName, fileOut, data_path
            ],
            #                            stdout=subprocess.PIPE,
            #                            stderr=subprocess.PIPE,
            stderr=subprocess.STDOUT)
        out, err = p.communicate()
        #         print out
        #         print err
        l = self.sim_group().get_spike_signal()
        mr0 = l.mean_rate()
        fr0 = l.firing_rate(1)
        return fileOut, mr0, fr0

        g = pickle_load(fileOut)
        ss = g.get_spike_signal()
        mr1 = ss.mean_rate()
        fr1 = ss.firing_rate(1)

        self.assertEqual(mr0, mr1)
        self.assertListEqual(list(fr0), list(fr1))
Beispiel #2
0
    def test_63_mpi_run_hyprid_from_disk(self):
        data_path= self.home+'/results/unittest/my_population/mpi_run_hybrid_from_disk/'
        script_name=os.getcwd()+'/test_scripts_MPI/my_population_mpi_run_hybrid_from_disk.py'
        np=4
            
            
#         os.environ['OMP_NUM_THREADS'] = '2'   
        fileName = data_path + 'data_in.pkl'
        fileOut = data_path + 'data_out.pkl'
        pickle_save([self.sim_time, self.args, self.kwargs], fileName)
        p = subprocess.Popen(['mpirun',  '-np', str(np), 'python', 
                              script_name, fileName, fileOut, data_path], 
    #                            stdout=subprocess.PIPE,
    #                            stderr=subprocess.PIPE,
            stderr=subprocess.STDOUT)
        out, err = p.communicate()
    #         print out
    #         print err
        l = self.sim_group().get_spike_signal()
        mr0 = l.mean_rate()
        fr0 = l.firing_rate(1)
        return fileOut, mr0, fr0
        
        g=pickle_load(fileOut)      
        ss=g.get_spike_signal()
        mr1=ss.mean_rate()
        fr1=ss.firing_rate(1)        

        self.assertEqual(mr0, mr1)
        self.assertListEqual(list(fr0), list(fr1))     
Beispiel #3
0
def run_data_base_dump(run, net, script_name, net_name, category, file_name,
                       **kwargs):
    import nest
    ks = nest.GetKernelStatus()

    db_name = kwargs.get('database_name', 'inhibition')
    db_table = kwargs.get('database_name', 'simulations')
    pp(ks['local_num_threads'])
    #need to supply not right picked up by nest
    lnt = kwargs.get('local_num_threads', ks['local_num_threads'])

    t = time.time()
    dd = run(net)

    par_data = cPickle.dumps(net.par.dic, -1)

    keys_db = [
        'computer',  #varchar
        'category',
        'default_params',  #bytea
        'duration',  #float
        'local_num_threads',  #int
        'net_name',  #varchar
        'num_processes',  #int
        'script',  #varchar
        'simulation',
        'size',  #int 
        'simulation_time',  #float 
        'total_num_virtual_procs'  #int
    ]

    l = script_name.split('/')
    if len(l) == 2:
        simulation, script = l
    if len(l) == 1:
        simulation = l[0]
        script = ''
    values_db = [
        my_socket.determine_computer(),
        category,
        #                psycopg2.Binary(par_data),
        par_data,
        float(round(time.time() - t, 1)),
        int(lnt),
        net_name,
        int(ks['num_processes']),
        script,
        simulation,
        int(net.par.dic['netw']['size']),
        float(net.par.dic['simu']['sim_time']),
        int(ks['num_processes'] * lnt),
    ]
    to_binary = [False, False, True] + [False] * 9

    data = [db_name, db_table, keys_db, values_db, to_binary]
    data_to_disk.pickle_save(data,
                             file_name + '/' + net_name,
                             file_extension='.db_dump')
    return dd
Beispiel #4
0
def run_data_base_dump(run, net, script_name, net_name, category, file_name, **kwargs):
    import nest
    ks=nest.GetKernelStatus()
        
    db_name=kwargs.get('database_name','inhibition')
    db_table=kwargs.get('database_name','simulations')
    pp(ks['local_num_threads'])
    #need to supply not right picked up by nest
    lnt=kwargs.get('local_num_threads',ks['local_num_threads'])
    

    t=time.time()
    dd=run(net)

    par_data = cPickle.dumps(net.par.dic, -1)
    
    keys_db=['computer',#varchar
             'category',
             'default_params', #bytea
             'duration',          #float
             'local_num_threads', #int
             'net_name',          #varchar
             'num_processes',     #int
             'script',        #varchar
             'simulation',
             'size',              #int 
             'simulation_time',   #float 
              'total_num_virtual_procs' #int
          ]
    
    l=script_name.split('/')
    if len(l)==2:
        simulation, script=l
    if len(l)==1:
        simulation=l[0]
        script=''
    values_db=[my_socket.determine_computer(),
               category,
#                psycopg2.Binary(par_data), 
               par_data,
                float(round(time.time()-t,1)),
                int(lnt),
                net_name,
                int(ks['num_processes']),
                script,
                simulation, 
                int(net.par.dic['netw']['size']),
                float(net.par.dic['simu']['sim_time']),
                int(ks['num_processes']*lnt),
            ]
    to_binary=[False,
            False,
            True
            ]+[False]*9
    
    data=[db_name, db_table, keys_db, values_db, to_binary]
    data_to_disk.pickle_save(data, file_name +'/'+net_name,
                             file_extension= '.db_dump')
    return dd
Beispiel #5
0
def create_dummy_learned_weights(path_file, n_sets):

    w=[]
    for _ in range(n_sets):
        w.append(random.random())
    
    w=numpy.array(w)+0.5

    data_to_disk.pickle_save(w, path_file)
Beispiel #6
0
def create_dummy_learned_weights(path_file, n_sets):

    w = []
    for _ in range(n_sets):
        w.append(random.random())

    w = numpy.array(w) + 0.5

    data_to_disk.pickle_save(w, path_file)
    def save_obj(self, obj):
        '''
        Function that save object. Can then be loaded by the simuation
        script. Way of passing parameters in to the simulation script
        '''

       
        data_to_disk.pickle_save([obj, 
                                  self.p_script.split('/')[-1]], 
                                  self.p_par)
Beispiel #8
0
    def save(self):

        d=self.get_params_as_dic()
        
        path=dr.HOME_DATA+'/'+self.script_name+'/'+self.date_time
        
        if not os.path.isdir(path):
            data_to_disk.mkdir(path)
        
        l=os.listdir(path)
        n=len(l)
        
        data_to_disk.pickle_save(d, path+'/data_'+str(n)+'_'+self.data_label)
Beispiel #9
0
    def save(self):

        d = self.get_params_as_dic()

        path = dr.HOME_DATA + '/' + self.script_name + '/' + self.date_time

        if not os.path.isdir(path):
            data_to_disk.mkdir(path)

        l = os.listdir(path)
        n = len(l)

        data_to_disk.pickle_save(
            d, path + '/data_' + str(n) + '_' + self.data_label)
Beispiel #10
0
 def do_mpi(self, data_path, script_name, np):
     fileName = data_path + 'data_in.pkl'
     fileOut = data_path + 'data_out.pkl'
     pickle_save([self.sim_time, self.args, self.kwargs], fileName)
     p = subprocess.Popen(['mpirun', '-np', str(np), 'python', 
                           script_name, fileName, fileOut, data_path], 
 #                            stdout=subprocess.PIPE,
 #                            stderr=subprocess.PIPE,
         stderr=subprocess.STDOUT)
     out, err = p.communicate()
 #         print out
 #         print err
     l = self.sim_group().get_spike_signal()
     mr0 = l.mean_rate()
     fr0 = l.firing_rate(1)
     return fileOut, mr0, fr0
Beispiel #11
0
 def do_mpi(self, data_path, script_name, np):
     fileName = data_path + 'data_in.pkl'
     fileOut = data_path + 'data_out.pkl'
     pickle_save([self.sim_time, self.args, self.kwargs], fileName)
     p = subprocess.Popen(
         [
             'mpirun', '-np',
             str(np), 'python', script_name, fileName, fileOut, data_path
         ],
         #                            stdout=subprocess.PIPE,
         #                            stderr=subprocess.PIPE,
         stderr=subprocess.STDOUT)
     out, err = p.communicate()
     #         print out
     #         print err
     l = self.sim_group().get_spike_signal()
     mr0 = l.mean_rate()
     fr0 = l.firing_rate(1)
     return fileOut, mr0, fr0
Beispiel #12
0
from core_old.misc_folder import test_subprocess_fun

la = range(4)
lb = range(4)
threads = 2

paths = []

for i, v in enumerate(zip(la, lb)):
    for thread in range(threads):
        a, b = v
        c = test_subprocess_fun.fin().do
        data = [c, [a], {'b': b}]
        p = path + str(i)
        pickle_save(data, p + str(thread))
    paths.append(p)

processes = []
f = []
for i, p in enumerate(paths):

    #Popen do not block, call does

    f.append(open(path + 'out' + str(i), "wb", 0))
    f.append(open(path + 'err' + str(i), "wb", 0))
    #

    pr = subprocess.Popen([
        'mpirun', '-np',
        str(threads), 'python', 'test_subprocess_wrap_fun.py', p
Beispiel #13
0
 def _save(self):
     data_to_disk.pickle_save([self.pre, self.post, 
                               self.sets], self.save['path'] ) 
     comm.barrier()
Beispiel #14
0
 def _save(self):
     data_to_disk.pickle_save([self.pre, self.post, self.sets],
                              self.save['path'])
     comm.barrier()
Beispiel #15
0
from core_old.misc_folder import test_subprocess_fun
    
    
la=range(4)
lb=range(4)
threads=2   

paths=[]

for i, v in enumerate(zip(la,lb)):
    for thread in range(threads):
        a,b=v
        c=test_subprocess_fun.fin().do
        data=[c, [a], {'b':b}]
        p=path+str(i)
        pickle_save(data, p+str(thread))
    paths.append(p)
        
processes=[]
f=[]
for i, p in enumerate(paths):
    
    #Popen do not block, call does

    f.append(open(path+'out'+str(i), "wb", 0))
    f.append(open(path+'err'+str(i), "wb", 0))
#     
    
    pr=subprocess.Popen(['mpirun', 
                        '-np',
                        str(threads),