Esempio n. 1
0
def epoch(*args):
    obj, kw=args
    
    path_results=kw.get('path_results')
    
    if not os.path.isdir(path_results):
        data_to_disk.mkdir(path_results)   
        
         
    ja=kw.get('job_admin')(**kw)
    wp=kw.get('wrapper_process')
    
    ja.save_obj(obj)
    ja.gen_job_script()
    args=ja.get_subp_args()
    
    p=do(*args, **kw)
    ja.process=p   
    
    job_id=ja.get_job_id()
            
    script_name=obj.get_name()
    
    p=wp(p, job_id, script_name)
    return p
Esempio n. 2
0
def do(*args, **kwargs):
    
    path_out, path_err=args[0:2]
    args_call=args[2:]
        
    path='/'.join(path_out.split('/')[0:-1])
    if not os.path.isdir(path):
        data_to_disk.mkdir(path)
        
    f_out=open(path_out, "wb", 0)
    f_err=open(path_err, "wb", 0)
    
    if kwargs.get('debug', False):
        p=subprocess.Popen(args_call, stderr=subprocess.STDOUT)
    else:
        print args_call
        p=subprocess.Popen(args_call,
                            stdout=f_out,
                            stderr=f_err,
#                     stdout=subprocess.PIPE,
#                     stderr=subprocess.PIPE,
#                     stderr=subprocess.STDOUT,
               )
        f_out.close()
        f_err.close()
        
    # Needed such that process finish before evaluation in test
    # script
 
#     out, err = p.communicate()
#     print out
#     print err

    return p
Esempio n. 3
0
 def setUp(self):
     from core import data_to_disk
     import os
     
     self.path='/home/mikael/results/papers/inhibition/network/unittest/plot_settings/'
     
     if not os.path.isdir(self.path):
         data_to_disk.mkdir(self.path)
Esempio n. 4
0
    def setUp(self):
        from core import data_to_disk
        import os

        self.path = '/home/mikael/results/papers/inhibition/network/unittest/plot_settings/'

        if not os.path.isdir(self.path):
            data_to_disk.mkdir(self.path)
Esempio n. 5
0
    def setUp(self):
        from core.network import default_params
        from core import data_to_disk

        self.path = dr.HOME + '/results/unittest/my_nest/'
        self.path_nest = self.path + 'nest/'
        data_to_disk.mkdir(self.path_nest)

        ResetKernel()
Esempio n. 6
0
 def setUp(self):
     from core.network import default_params
     from core import data_to_disk
     
     self.path=dr.HOME+'/results/unittest/my_nest/'
     self.path_nest=self.path+'nest/'
     data_to_disk.mkdir(self.path_nest)
     
     ResetKernel()
Esempio n. 7
0
def get_path_nest(script_name, keys, par=None):
    if not par:
        par = default_params.Inhibition()
    path = par.get_path_data()
    file_name = path + script_name + '/' + '_'.join(keys) + '/nest/'
    #     file_name = home + '/results/papers/inhibition/network/' + script_name
    data_to_disk.mkdir(file_name)

    return file_name
Esempio n. 8
0
    def test_41_phase_diff_mpi(self):
        from core.data_to_disk import mkdir
        
        import pickle
        import os
        import subprocess
        fs=1000.0
        kwargs = get_kwargs_phase_diff(fs)
         
        n_pop, sim_time=10, 500.0 
        x=dummy_data_pop(n_pop, **{'fs':fs,
                                   'scale':0.5,'sim_time':sim_time})
        y=dummy_data_pop(n_pop, **{'fs':fs,
                                   'scale':0.5,'sim_time':sim_time,
                                   'shift':0.})
        kwargs['inspect']=False
        kwargs['local_num_threads']=2

        
        data_path= self.home+('/results/unittest/signal_processing'
                         +'/signal_processing_phase_diff_mpi/')
        script_name=os.getcwd()+('/test_scripts_MPI/'
                                 +'signal_processing_phase_diff_mpi.py')
        
        fileName=data_path+'data_in.pkl'
        fileOut=data_path+'data_out.pkl'
        mkdir(data_path)
        
        f=open(fileName, 'w') #open in binary mode 
        pickle.dump([x,y, kwargs], f, -1)
        f.close()
                
        np=4        
        p=subprocess.Popen(['mpirun', '-np', str(np), 'python', 
                            script_name, fileName, fileOut],
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE
#                             stderr=subprocess.STDOUT
                            )
        
        out, err = p.communicate()
#         print out
#         print err
        kwargs['local_num_threads']=2*4
        p0=phases_diff(x, y,  **kwargs)   
        
        f=open(fileOut, 'rb') #open in binary mode
        p1=pickle.load(f)
        f.close()
        l0=numpy.round(p0.ravel(),2)
        l1=numpy.round(p1.ravel(),2)
        
        # abs since order of signal comparisons are random
        # se phase_diff
        l0,l1=list(numpy.abs(l0)), list(numpy.abs(l1))
        self.assertListEqual(l0,l1 )
Esempio n. 9
0
 def setUp(self):
     self.home = expanduser("~")
     self.n = 12
     self.n_sets = 3
     self.args = ['unittest']
     self.kwargs = default_kwargs_net(self.n, self.n_sets)
     self.sim_time = 10000.
     dp = self.home + '/results/unittest/my_population/nest/'
     data_to_disk.mkdir(dp)
     my_nest.ResetKernel(display=False, data_path=dp)
     my_nest.SetKernelStatus({'overwrite_files': True})
Esempio n. 10
0
 def setUp(self):
     self.home=expanduser("~")
     self.n=12
     self.n_sets=3
     self.args=['unittest']
     self.kwargs=default_kwargs_net(self.n, self.n_sets)
     self.sim_time=10000.
     dp=self.home+'/results/unittest/my_population/nest/'
     data_to_disk.mkdir(dp)
     my_nest.ResetKernel(display=False, data_path=dp)
     my_nest.SetKernelStatus({'overwrite_files':True})
Esempio n. 11
0
    def save(self):

        d=self.get_params_as_dic()
        
        path=dr.HOME_DATA+'/'+self.script_name+'/'+self.date_time
        
        if not os.path.isdir(path):
            data_to_disk.mkdir(path)
        
        l=os.listdir(path)
        n=len(l)
        
        data_to_disk.pickle_save(d, path+'/data_'+str(n)+'_'+self.data_label)
Esempio n. 12
0
    def save(self):

        d = self.get_params_as_dic()

        path = dr.HOME_DATA + '/' + self.script_name + '/' + self.date_time

        if not os.path.isdir(path):
            data_to_disk.mkdir(path)

        l = os.listdir(path)
        n = len(l)

        data_to_disk.pickle_save(
            d, path + '/data_' + str(n) + '_' + self.data_label)
Esempio n. 13
0
    def __init__(self, **kw):
        
        index=kw.get('index') #simulation index
#         path_code=kw.get('path_code')
        pr=kw.get('path_results')
        self.num_mpi_task=kw.get('num-mpi-task')
#         self.local_threads=10
    
        self.p_subp_out=pr+"/std/subp/out{0:0>4}".format(index)
        self.p_subp_err=pr+'/std/subp/err{0:0>4}'.format(index)
        self.p_par=pr+'/params/run{0:0>4}.pkl'.format(index)
        self.p_script=dr.HOME_CODE+'/core_old/core/parallel_excecution/simulation.py'     

        data_to_disk.mkdir('/'.join(self.p_subp_out.split('/')[0:-1]))
     
        for key, value in kw.items():
            self.__dict__[key] = value
Esempio n. 14
0
    def test_5_load_from_disk(self):
        from os.path import expanduser
        s = expanduser("~")
        s= s+'/results/unittest/my_population'
        data_to_disk.mkdir(s)
        my_nest.SetKernelStatus({'local_num_threads':2,
                                 'data_path':s,
                                 'overwrite_files': True,})
         
        self.kwargs['sd']['params'].update({'to_memory':False, 
                                            'to_file':True})
         
        g=self.sim_group().get_spike_signal()
        g[0].firing_rate(1, display=True)
#         pylab.show()  
        for filename in os.listdir(s):
            if filename.endswith(".gdf"):
                os.remove(s+'/'+filename)        
Esempio n. 15
0
    def setUp(self):
        global subprocess
        subprocess=Mockup_subprocess
#         print data0()
        self.path=dr.HOME+'/results/unittest/job_handler'
        
        if not os.path.isdir(self.path):
            data_to_disk.mkdir(self.path)
        if not os.path.isfile(self.path+'/data0'):
            data_to_disk.txt_save(data0(), self.path+'/data0', file_extension='')
        if not os.path.isfile(self.path+'/data1'):
            data_to_disk.txt_save(data1(), self.path+'/data1', file_extension='')
        if not os.path.isfile(self.path+'/data2'):
            data_to_disk.txt_save(data2(), self.path+'/data2', file_extension='')
        if not os.path.isfile(self.path+'/data3'):
            data_to_disk.txt_save(data3(), self.path+'/data3', file_extension='')   
                      
        p_list=[Mockup_process('1',None), Mockup_process('2',None), Mockup_process('3',None)]
        jobs=[28372, 28373, 28374]
        names=['Net_0', 'Net_1', 'Net_2']
        
        wp_list=[Mockup_wrap_process_milner(p,j,n) for p,j,n in zip(p_list, jobs,names)]
        kw={'p_list':wp_list,
            'loop_time':1,
            'log_to_file':True,
            'log_file_name':os.getcwd()+'/job_handler/log'}
        
        self.obj_milner=Handler(**kw)  
        
        p_list=[Mockup_process('1',None), Mockup_process('2',None), Mockup_process('3',None)]
        wp_list=[Mockup_wrap_process_batch(p,j,n) for p,j,n in zip(p_list, jobs,names)]
        kw={'p_list':wp_list,
            'loop_time':1,
            'log_to_file':True,
            'log_file_name':os.getcwd()+'/job_handler/log'}
        
        self.obj_super=Handler(**kw)  
              
        self.obj_empty=Handler(kw={'p_list':[],
                                   'loop_time':1,
                                   'log_to_file':True,
                                   'log_file_name':os.getcwd()+'/job_handler/log'})
Esempio n. 16
0
    def test_5_load_from_disk(self):
        from os.path import expanduser
        s = expanduser("~")
        s = s + '/results/unittest/my_population'
        data_to_disk.mkdir(s)
        my_nest.SetKernelStatus({
            'local_num_threads': 2,
            'data_path': s,
            'overwrite_files': True,
        })

        self.kwargs['sd']['params'].update({
            'to_memory': False,
            'to_file': True
        })

        g = self.sim_group().get_spike_signal()
        g[0].firing_rate(1, display=True)
        #         pylab.show()
        for filename in os.listdir(s):
            if filename.endswith(".gdf"):
                os.remove(s + '/' + filename)
Esempio n. 17
0
def loop(*args, **kwargs):
    n, m_list, args_list,  kwargs_list=args 
    

    db_save=kwargs_list[0].get('database_save', False)
    path_results=kwargs_list[0].get('path_results')
#     process_type=kwargs_list[0].get('process_type')
#     read_subp_jobs=kwargs_list[0].get('read_subp_jobs')
    
    log_file_name=path_results+'/std/job_handler_log'
    data_to_disk.mkdir(path_results+'/std/')
    

    h=job_handler.Handler(loop_time=5,  
                          log_to_file=True,
                          log_file_name=log_file_name,
                          **kwargs)
    
    for m in m_list:
        
        q=Queue.Queue()

        for _ in range(m): 
            if not kwargs_list:
                continue
            k=kwargs_list.pop(0)
            if k['active']:
                q.put([args_list.pop(0), k])
            else:
                args_list.pop(0)
    
        h.loop_with_queue(n, q,  epoch, 
                          loop_print=True)
     
             
    if db_save:
        save_to_database(path_results)
Esempio n. 18
0
    def __init__(self,**kw):
        
        index=kw.get('index') #simulation index
        pr=kw.get('path_results')
 
        self.p_subp_out=pr+"/std/subp/out{0:0>4}".format(index)
        self.p_subp_err=pr+'/std/subp/err{0:0>4}'.format(index)
        self.p_tee_out=pr+'/std/tee/out{0:0>4}'.format(index)
        self.p_par=pr+'/params/run{0:0>4}.pkl'.format(index)
        self.p_script=dr.HOME_CODE+'/core_old/core/parallel_excecution/simulation.py'
        self.p_bash0=dr.HOME_CODE+'/core_old/core/parallel_excecution/jobb0_supermicro.sh'
        self.p_bash=pr+'/jobbs/jobb_{0:0>4}.sh'.format(index)     
                
        data_to_disk.mkdir('/'.join(self.p_subp_out.split('/')[0:-1]))
        data_to_disk.mkdir('/'.join(self.p_tee_out.split('/')[0:-1]))
        data_to_disk.mkdir('/'.join(self.p_bash.split('/')[0:-1]))
            
        for key, value in kw.items():
            self.__dict__[key] = value
Esempio n. 19
0
    def test_41_phase_diff_mpi(self):
        from core.data_to_disk import mkdir

        import pickle
        import os
        import subprocess
        fs = 1000.0
        kwargs = get_kwargs_phase_diff(fs)

        n_pop, sim_time = 10, 500.0
        x = dummy_data_pop(n_pop, **{
            'fs': fs,
            'scale': 0.5,
            'sim_time': sim_time
        })
        y = dummy_data_pop(
            n_pop, **{
                'fs': fs,
                'scale': 0.5,
                'sim_time': sim_time,
                'shift': 0.
            })
        kwargs['inspect'] = False
        kwargs['local_num_threads'] = 2

        data_path = self.home + ('/results/unittest/signal_processing' +
                                 '/signal_processing_phase_diff_mpi/')
        script_name = os.getcwd() + ('/test_scripts_MPI/' +
                                     'signal_processing_phase_diff_mpi.py')

        fileName = data_path + 'data_in.pkl'
        fileOut = data_path + 'data_out.pkl'
        mkdir(data_path)

        f = open(fileName, 'w')  #open in binary mode
        pickle.dump([x, y, kwargs], f, -1)
        f.close()

        np = 4
        p = subprocess.Popen(
            [
                'mpirun', '-np',
                str(np), 'python', script_name, fileName, fileOut
            ],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE
            #                             stderr=subprocess.STDOUT
        )

        out, err = p.communicate()
        #         print out
        #         print err
        kwargs['local_num_threads'] = 2 * 4
        p0 = phases_diff(x, y, **kwargs)

        f = open(fileOut, 'rb')  #open in binary mode
        p1 = pickle.load(f)
        f.close()
        l0 = numpy.round(p0.ravel(), 2)
        l1 = numpy.round(p1.ravel(), 2)

        # abs since order of signal comparisons are random
        # se phase_diff
        l0, l1 = list(numpy.abs(l0)), list(numpy.abs(l1))
        self.assertListEqual(l0, l1)
from core.network import default_params
from core import my_nest
from core import misc
from core.my_population import MyNetworkNode
import pprint

pp = pprint.pprint

from core.network.manager import get_storage_list, save, load
from core import directories as dir
from core import data_to_disk
import os

path = dir.HOME_DATA + '/' + __file__.split('/')[-1][0:-3]
if not os.path.isdir(path):
    data_to_disk.mkdir(path)
par = default_params.Inhibition()
setup = Setup(50, 20)


def gs_builder(*args, **kwargs):
    import matplotlib.gridspec as gridspec
    n_rows = kwargs.get('n_rows', 2)
    n_cols = kwargs.get('n_cols', 1)
    order = kwargs.get('order', 'col')

    gs = gridspec.GridSpec(n_rows, n_cols)
    gs.update(wspace=kwargs.get('wspace', 0.1),
              hspace=kwargs.get('hspace', 0.1))

    iterator = [
Esempio n. 21
0
def main():

    record_from_models = [['CO', 'M1', 'M2', 'FS', 'GA', 'GI', 'ST', 'SN'],
                          [
                              'CO', 'M1', 'M2', 'F1', 'F2', 'GA', 'GI', 'ST',
                              'SN'
                          ]]
    labels = ['Control-h0', 'Control-h1']
    start = 1000.0
    stop = 1000.0 + 100. * 10.
    kwargs = {
        'kwargs_network': {
            'save_conn': False,
            'verbose': True
        },
        'par_rep': {
            'simu': {
                'threads': 2,
                'sd_params': {
                    'to_file': True,
                    'to_memory': False
                },
                'print_time': True,
                'start_rec': start,
                'stop_rec': stop,
                'sim_time': stop
            },
            'netw': {
                'size': 5000.0,
                'tata_dop': 0.8
            }
        }
    }
    use_class = [Bcpnn_h0, Bcpnn_h1]
    #Inhibition_no_parrot
    #for setup in setup_list: setup.extend([10000., 1000.0, 11000.0, Inhibition_no_parrot, {}])
    setup_list = []
    for l, uc in zip(labels, use_class):
        kwargs['class_network_construction'] = uc
        setup_list.append([l, deepcopy(kwargs)])

    nms = Network_models_dic(setup_list, Network_model)
    nms.simulate([0] * 2, labels, record_from_models)

    plot_models = [['CO', 'M1', 'M2', 'FS', 'GI', 'SN'],
                   ['M1', 'M2', 'F1', 'F2', 'GI', 'SN']]
    plot_lables_models = [[
        'Cortex', '$MSN_{D1}$', '$MSN_{D2}$', 'FSN', '$GPe_{Type I}$', 'SNr'
    ],
                          [
                              '$MSN_{D1}$', '$MSN_{D2}$', '$FSN_{1}$',
                              '$FSN_{2}$', '$GPe_{Type I}$', 'SNr'
                          ]]
    plot_lables_prefix_models = [[
        'State', 'Action', 'Action', 'Action', 'Action', 'Action'
    ], ['Action', 'Action', 'Action', 'Action', 'Action', 'Action']]
    figs = nms.show_bcpnn(labels,
                          plot_models,
                          xlim=[start, stop],
                          plot_lables_models=plot_lables_models,
                          plot_lables_prefix_models=plot_lables_prefix_models)

    pylab.show()
    i = 0

    for fig in figs:
        import os
        if not os.path.isdir(nms.path_pictures):
            data_to_disk.mkdir(nms.path_pictures)
        fig.savefig(nms.path_pictures + '-fig' + str(i) + '.svg', format='svg')
        i += 1
Esempio n. 22
0
chunked= chunks(cmd_lines,4)
      

from subprocess import Popen, call
from os import mkdir
   
for cmd_lines in chunked:

    for number, line in enumerate(cmd_lines):   
        
        script_name=line.split(' ')[1]
         
        newpath = script_name
        
        if not os.path.isdir('./'+newpath):
            data_to_disk.mkdir('./'+newpath)

            
        cmd = ('python /home/mikael/git/bgmodel/'+
               'scripts_inhibition/do.py ' 
               + line.strip())
        print 'Running %r in %r' % (cmd, newpath)
        
        stdout= newpath+"/stdout.txt"
        stderr=newpath+'/stderr.txt'
        with open(stdout,"wb") as out, open(stderr,"wb") as err:
            Popen(cmd, shell=True, stdout=out, stderr=err)
    
    
    
    
Esempio n. 23
0
    def p_out_data(self, host):
        path = dr.HOME + '/results/unittest/parallel_excecution/' + host
        data_to_disk.mkdir(path)

        return path
Esempio n. 24
0
from core.network import default_params
from core import my_nest, my_population
from core import misc
from core.my_population import MyNetworkNode
from core.my_signals import Data_generic, Data_IF_curve, Data_scatter
import pprint
pp=pprint.pprint

from core.network.manager import get_storage_list, save, load
from core import directories as dr
from core import data_to_disk
import os

path=dr.HOME_DATA+'/'+__file__.split('/')[-1][0:-3]    
if not os.path.isdir(path):
    data_to_disk.mkdir(path)
par=default_params.Inhibition()
setup=Setup(50,20) 

def gs_builder(*args, **kwargs):
    import matplotlib.gridspec as gridspec
    n_rows=kwargs.get('n_rows',2)
    n_cols=kwargs.get('n_cols',1)
    order=kwargs.get('order', 'col')
    
    gs = gridspec.GridSpec(n_rows, n_cols)
    gs.update(wspace=kwargs.get('wspace', 0.1 ), 
              hspace=kwargs.get('hspace', 0.1 ))

    iterator = [[slice(0,1),slice(0,1)],
                [slice(0,1),slice(1,2)],
Esempio n. 25
0
def create_folders(l):
    for e in l:
        path = '/'.join(e.split('/')[0:-1])
        data_to_disk.mkdir(path)
        assert os.path.isdir(path), path + ' failed to be created'
 def p_out_data(self, host):
     path=dr.HOME+'/results/unittest/parallel_excecution/'+host
     data_to_disk.mkdir(path)
     
     return path
Esempio n. 27
0
    def setUp(self):
        global subprocess
        subprocess = Mockup_subprocess
        #         print data0()
        self.path = dr.HOME + '/results/unittest/job_handler'

        if not os.path.isdir(self.path):
            data_to_disk.mkdir(self.path)
        if not os.path.isfile(self.path + '/data0'):
            data_to_disk.txt_save(data0(),
                                  self.path + '/data0',
                                  file_extension='')
        if not os.path.isfile(self.path + '/data1'):
            data_to_disk.txt_save(data1(),
                                  self.path + '/data1',
                                  file_extension='')
        if not os.path.isfile(self.path + '/data2'):
            data_to_disk.txt_save(data2(),
                                  self.path + '/data2',
                                  file_extension='')
        if not os.path.isfile(self.path + '/data3'):
            data_to_disk.txt_save(data3(),
                                  self.path + '/data3',
                                  file_extension='')

        p_list = [
            Mockup_process('1', None),
            Mockup_process('2', None),
            Mockup_process('3', None)
        ]
        jobs = [28372, 28373, 28374]
        names = ['Net_0', 'Net_1', 'Net_2']

        wp_list = [
            Mockup_wrap_process_milner(p, j, n)
            for p, j, n in zip(p_list, jobs, names)
        ]
        kw = {
            'p_list': wp_list,
            'loop_time': 1,
            'log_to_file': True,
            'log_file_name': os.getcwd() + '/job_handler/log'
        }

        self.obj_milner = Handler(**kw)

        p_list = [
            Mockup_process('1', None),
            Mockup_process('2', None),
            Mockup_process('3', None)
        ]
        wp_list = [
            Mockup_wrap_process_batch(p, j, n)
            for p, j, n in zip(p_list, jobs, names)
        ]
        kw = {
            'p_list': wp_list,
            'loop_time': 1,
            'log_to_file': True,
            'log_file_name': os.getcwd() + '/job_handler/log'
        }

        self.obj_super = Handler(**kw)

        self.obj_empty = Handler(
            kw={
                'p_list': [],
                'loop_time': 1,
                'log_to_file': True,
                'log_file_name': os.getcwd() + '/job_handler/log'
            })