Exemplo n.º 1
0
'''

from toolbox.data_to_disk import pickle_save, pickle_load
# import cPickle as pickle
import sys
from toolbox.my_population import sim_group

# Necessary for pickle se
# http://stefaanlippens.net/pickleproblem
from toolbox.signal_processing import phases_diff

from toolbox.parallelization import comm, Barrier
fileName, fileOut =sys.argv[1:]

with Barrier():
    if comm.rank()==0:   
        out=pickle_load(fileName, all_mpi=True) 
    else:
        out=None


out=comm.bcast(out, root=0)
sim_time, args, kwargs=out

g=sim_group(sim_time, *args, **kwargs)

ss=g.get_spike_signal()
mr=ss.mean_rate()
fr=ss.firing_rate(1)

pickle_save([mr, fr], fileOut)
       ]     
k={'fan_in':10.0}
l1=[]
l2=[]
for rule in rules:
    k.update({'display':False,
              'rule':rule,
              'source':source.get_name(),
              'target':target.get_name(),
              'save':{'active':True,
                      'overwrite':False,
                     'path':path_conn+rule}})

    c1=Conn('n1_n2', **k)
    c1.set(surfs, display_print=False)
    
    l1.append(c1.n)
    c2=Conn('n1_n2', **k)
    c2.set(surfs, display_print=False)
  
    l2.append(c2.n)
    
    
data_to_disk.pickle_save( [l1, l2], 
                              path+'data'+str(comm.rank()), 
                              all_mpi=True)
    
   

Created on Sep 22, 2014

@author: mikael
'''
import numpy
import pickle
import sys

from toolbox.data_to_disk import mkdir
from toolbox.my_nest import collect_spikes_mpi
from toolbox.parallelization import comm
print sys.argv
fileName, = sys.argv[1:]

fileName += 'data'
s, e = numpy.ones(2) * comm.rank(), numpy.ones(2) * comm.rank() + 1

s, e = collect_spikes_mpi(s, e)

mkdir('/'.join(fileName.split('/')[0:-1]))

if comm.rank() == 0:

    print 'File name'
    print fileName

    if 4 < len(fileName) and fileName[-4:] != '.pkl':
        fileName = fileName + '.pkl'
        f = open(fileName, 'wb')  #open in binary mode

    pickle.dump([s, e], f, -1)
Exemplo n.º 4
0
if __name__ == '__main__':
    t = 10000.0
    currdir = os.getcwd()
    home = default_params.HOME + '/results/unittest/test_milner/simulation'
    script_name = __file__.split('/')[-1][0:-3]
    if len(sys.argv) > 1:
        datadir, datadir_sd = sys.argv[1:]

    else:
        datadir = home + '/nest'
        datadir_sd = home + '/spike_dic'

    data_path = '/'.join(datadir.split('/')[0:-1])
    for path in [datadir, datadir_sd]:
        if os.path.isdir(path):
            if comm.rank() == 0:
                shutil.rmtree(path)

    for path in [datadir_sd + '.pkl', datadir_sd + '.svg']:
        if os.path.isfile(path):
            if comm.rank() == 0:
                os.remove(path)

    if comm.rank() == 0:
        data_to_disk.mkdir(datadir)

    comm.barrier()
    print comm.obj

    my_nest.SetKernelStatus({'data_path': datadir, 'overwrite_files': True})
Exemplo n.º 5
0
    'all_set-all_set',
]
k = {'fan_in': 10.0}
l1 = []
l2 = []
for rule in rules:
    k.update({
        'display': False,
        'rule': rule,
        'source': source.get_name(),
        'target': target.get_name(),
        'save': {
            'active': True,
            'overwrite': False,
            'path': path_conn + rule
        }
    })

    c1 = Conn('n1_n2', **k)
    c1.set(surfs, display_print=False)

    l1.append(c1.n)
    c2 = Conn('n1_n2', **k)
    c2.set(surfs, display_print=False)

    l2.append(c2.n)

data_to_disk.pickle_save([l1, l2],
                         path + 'data' + str(comm.rank()),
                         all_mpi=True)
Exemplo n.º 6
0
pp = pprint.pprint

path_out, = sys.argv[1:]

n = nest.Create('iaf_neuron', 10)
model = 'static_synapse'

delays = numpy.random.random(10 * 10) + 2
weights = numpy.random.random(10 * 10) + 1

post = []
for _id in n:
    post += [_id] * 10

my_nest.Connect_DC(n * 10, post, weights, delays, model)
# nest.ConvergentConnect(n, n)
conn = nest.GetConnections(n)
stat = nest.GetStatus(conn)
time.sleep(0.2 * comm.rank())
print 'hej', comm.rank(), len(conn), len(post)
# pp(stat)
# pp([[d['source'], d['target']] for d in stat ])
# print nest.GetKernelStatus().keys()
print nest.GetKernelStatus(['num_connections'])
comm.barrier()
# my_nest.Connect_DC(n,n,weights,delays,  model)

# nest_path, np,=sys.argv[1:]

# sim_group(nest_path, **{'total_num_virtual_procs':int(np)})
'''
Created on Sep 19, 2014

@author: mikael
'''
from toolbox import data_to_disk
from toolbox.parallelization import comm
import sys
# print sys.argv
_, path = sys.argv
# print path+'data.pkl'
data = data_to_disk.pickle_load(path + 'data.pkl')
data_to_disk.pickle_save(data, path + 'data' + str(comm.rank()), all_mpi=True)
'''

import numpy #just to not get segmentation fault
import sys

from toolbox.data_to_disk import pickle_save, pickle_load, mkdir
from toolbox.parallelization import (comm, Barrier, map_parallel,
                                     mockup_fun, mockup_fun_large_return_2,
                                     mockup_fun_large_return_1)
from toolbox import misc
np_local=2

fileName, fileOut, data_path =sys.argv[1:]

with Barrier():
    if comm.rank()==0:   
        out, mockup=pickle_load(fileName, all_mpi=True) 
    else:
        out=None
        mockup=None


out=comm.bcast(out, root=0)
mockup=comm.bcast(mockup, root=0)

print comm.rank()

with misc.Stopwatch('mpi'):
    a=map_parallel(mockup, out, out, **{'local_num_threads':np_local})  

pickle_save(a, fileOut)
'''

import numpy  #just to not get segmentation fault
import sys

from toolbox.data_to_disk import pickle_save, pickle_load, mkdir
from toolbox.parallelization import (comm, Barrier, map_parallel, mockup_fun,
                                     mockup_fun_large_return_2,
                                     mockup_fun_large_return_1)
from toolbox import misc
np_local = 2

fileName, fileOut, data_path = sys.argv[1:]

with Barrier():
    if comm.rank() == 0:
        out, mockup = pickle_load(fileName, all_mpi=True)
    else:
        out = None
        mockup = None

out = comm.bcast(out, root=0)
mockup = comm.bcast(mockup, root=0)

print comm.rank()

with misc.Stopwatch('mpi'):
    a = map_parallel(mockup, out, out, **{'local_num_threads': np_local})

pickle_save(a, fileOut)
Created on Sep 22, 2014

@author: mikael
'''
import numpy
import pickle
import sys

from toolbox.data_to_disk import mkdir
from toolbox.my_nest import collect_spikes_mpi
from toolbox.parallelization import comm
print sys.argv
fileName, =sys.argv[1:]

fileName+='data'
s,e=numpy.ones(2)*comm.rank(),numpy.ones(2)*comm.rank()+1


s, e= collect_spikes_mpi(s, e)

mkdir('/'.join(fileName.split('/')[0:-1]))  

if comm.rank()==0:
    
    print 'File name'
    print fileName
    
    if 4<len(fileName) and fileName[-4:]!='.pkl':
        fileName=fileName+'.pkl'
        f=open(fileName, 'wb') #open in binary mode
         
Exemplo n.º 11
0
path_out,=sys.argv[1:]

n=nest.Create('iaf_neuron', 10)
model='static_synapse'

delays=numpy.random.random(10*10)+2
weights=numpy.random.random(10*10)+1
    
post=[]
for _id in n:
    post+=[_id]*10 

my_nest.Connect_DC(n*10, post,weights, delays,  model)
# nest.ConvergentConnect(n, n)
conn=nest.GetConnections(n)
stat=nest.GetStatus(conn)
time.sleep(0.2*comm.rank())
print 'hej', comm.rank(), len(conn), len(post)
# pp(stat)
# pp([[d['source'], d['target']] for d in stat ])
# print nest.GetKernelStatus().keys()
print nest.GetKernelStatus(['num_connections'])
comm.barrier()
# my_nest.Connect_DC(n,n,weights,delays,  model)


# nest_path, np,=sys.argv[1:]

# sim_group(nest_path, **{'total_num_virtual_procs':int(np)})