예제 #1
0
파일: helpers.py 프로젝트: dguarino/SlowDyn
def run_simulation(sim,Params):
    print "Running Network"
    timer = Timer()
    timer.reset()
    sim.run(Params['run_time'])
    simCPUtime = timer.elapsedTime()
    print "Simulation Time: %s" % str(simCPUtime)
예제 #2
0
def run_simulation(sim, params):
    print "Running Network ..."
    timer = Timer()
    timer.reset()
    sim.run(params['run_time'])
    simCPUtime = timer.elapsedTime()
    print "... The simulation took %s ms to run." % str(simCPUtime)
예제 #3
0
    def setup(self, load_tuning_prop=False, times={}):

        self.projections = {}
        self.projections['ee'] = []
        self.projections['ei'] = []
        self.projections['ie'] = []
        self.projections['ii'] = []
        if not load_tuning_prop:
            self.tuning_prop_exc = utils.set_tuning_prop(self.params, mode='hexgrid', cell_type='exc')        # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
            self.tuning_prop_inh = utils.set_tuning_prop(self.params, mode='hexgrid', cell_type='inh')        # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
        else:
            self.tuning_prop_exc = np.loadtxt(self.params['tuning_prop_means_fn'])
            self.tuning_prop_inh = np.loadtxt(self.params['tuning_prop_inh_fn'])

        indices, distances = utils.sort_gids_by_distance_to_stimulus(self.tuning_prop_exc, self.params) # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params['tuning_prop_means_fn']
            np.savetxt(self.params['tuning_prop_means_fn'], self.tuning_prop_exc)
            print "Saving tuning_prop to file:", self.params['tuning_prop_inh_fn']
            np.savetxt(self.params['tuning_prop_inh_fn'], self.tuning_prop_inh)
            print 'Saving gids to record to: ', self.params['gids_to_record_fn']
            np.savetxt(self.params['gids_to_record_fn'], indices[:self.params['n_gids_to_record']], fmt='%d')

#        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer
        self.timer = Timer()
        self.timer.start()
        self.times = times
        self.times['t_all'] = 0
        # # # # # # # # # # # #
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params['delay_range']
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params['seed'])
        rng_v = NumpyRNG(seed = sim_cnt*3147 + self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(seed = self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution('normal',
                (self.params['v_init'], self.params['v_init_sigma']),
                rng=rng_v,
                constrain='redraw',
                boundaries=(-80, -60))

        self.times['t_setup'] = self.timer.diff()
        self.times['t_calc_conns'] = 0
        if self.comm != None:
            self.comm.Barrier()

        self.torus = space.Space(axes='xy', periodic_boundaries=((0., self.params['torus_width']), (0., self.params['torus_height'])))
예제 #4
0
    def setup(self, load_tuning_prop=False):

        if load_tuning_prop:
            print 'Loading tuning properties from', self.params['tuning_prop_means_fn']
            self.tuning_prop_exc = np.loadtxt(self.params['tuning_prop_means_fn'])
        else:
            print 'Preparing tuning properties with limited range....'
            x_range = (0, 1.)
            y_range = (0.2, .5)
            u_range = (.05, 1.0)
            v_range = (-.2, .2)
            tp_exc_good, tp_exc_out_of_range = utils.set_limited_tuning_properties(params, y_range, x_range, u_range, v_range, cell_type='exc')
            self.tuning_prop_exc = tp_exc_good
            print 'n_exc within range: ', tp_exc_good[:, 0].size
            print "Saving tuning_prop to file:", params['tuning_prop_means_fn']
            np.savetxt(params['tuning_prop_means_fn'], tp_exc_good)

        indices, distances = utils.sort_gids_by_distance_to_stimulus(self.tuning_prop_exc, self.params['motion_params'], self.params) # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params['tuning_prop_means_fn']
            np.savetxt(self.params['tuning_prop_means_fn'], self.tuning_prop_exc)
            print 'Saving gids to record to: ', self.params['gids_to_record_fn']
            np.savetxt(self.params['gids_to_record_fn'], indices[:self.params['n_gids_to_record']], fmt='%d')

#        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer
        self.timer = Timer()
        self.timer.start()
        self.times = {}
        # # # # # # # # # # # # 
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params['delay_range']
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params['seed'])
        rng_v = NumpyRNG(seed = sim_cnt*3147 + self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(seed = self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution('normal',
                (self.params['v_init'], self.params['v_init_sigma']),
                rng=rng_v,
                constrain='redraw',
                boundaries=(-80, -60))

        self.times['t_setup'] = self.timer.diff()
        self.times['t_calc_conns'] = 0
        if self.comm != None:
            self.comm.Barrier()
예제 #5
0
def main_pynest(parameters):
    P = parameters
    assert P.sim_name == "pynest"
    timer = Timer()
    import nest
    timer.mark("import")

    nest.SetKernelStatus({"resolution": 0.1})
    timer.mark("setup")

    p = nest.Create("iaf_psc_alpha", n=P.n, params={"I_e": 1000.0})
    timer.mark("build")

    # todo: add recording and data retrieval
    nest.Simulate(P.sim_time)
    timer.mark("run")

    mpi_rank = nest.Rank()
    num_processes = nest.NumProcesses()

    data = P.as_dict()
    data.update(num_processes=num_processes, timings=timer.marks)
    return mpi_rank, data
예제 #6
0
def main_pynest(parameters):
    P = parameters
    assert P.sim_name == "pynest"
    timer = Timer()
    import nest
    timer.mark("import")

    nest.SetKernelStatus({"resolution": 0.1})
    timer.mark("setup")

    p = nest.Create("iaf_neuron", n=P.n, params={"I_e": 1000.0})
    timer.mark("build")

    # todo: add recording and data retrieval
    nest.Simulate(P.sim_time)
    timer.mark("run")

    mpi_rank = nest.Rank()
    num_processes = nest.NumProcesses()
    
    data = P.as_dict()
    data.update(num_processes=num_processes,
                timings=timer.marks)
    return mpi_rank, data
def test_callback(data_input):
    global message
    message = data_input.actual.positions
    msg_list = list(message)

    #msg_list[0] = int(message[0].encode('hex'),16)
    #for i in
    #msg_list = int(message.encode('hex'),16)

    #print('============= Received image data.',message)
    rospy.loginfo('=====received data %r', msg_list[0])
    timer = Timer()
    dt = 0.1
    p.setup(timestep=dt)  # 0.1ms

    pub = rospy.Publisher('/arm_controller/follow_joint_trajectory/goal',
                          FollowJointTrajectoryActionGoal,
                          queue_size=10)
    command = FollowJointTrajectoryActionGoal()
    command.header.stamp = rospy.Time.now()
    command.goal.trajectory.joint_names = ['elbow']
    point = JointTrajectoryPoint()
    point.positions = [rate_command / 10]
    point.time_from_start = rospy.Duration(1)
    command.goal.trajectory.points.append(point)
    pub.publish(command)
    rospy.loginfo('=====send command %r', command.goal.trajectory.points[0])

    print("now plotting the network---------------")
    rospy.loginfo('--------now plotting---------------')
    n_panels = sum(a.shape[1]
                   for a in pop_1_data.segments[0].analogsignalarrays) + 2
    plt.subplot(n_panels, 1, 1)
    plot_spiketrains(pop_1_data.segments[0])
    panel = 3
    for array in pop_1_data.segments[0].analogsignalarrays:
        for i in range(array.shape[1]):
            plt.subplot(n_panels, 1, panel)
            plot_signal(array, i, colour='bg'[panel % 2])
            panel += 1
    plt.xlabel("time (%s)" % array.times.units._dimensionality.string)
    plt.setp(plt.gca().get_xticklabels(), visible=True)  #
예제 #8
0
def runBrunelNetwork(g=5., 
                     eta=2., 
                     dt = 0.1, 
                     simtime = 1000.0, 
                     delay = 1.5, 
                     epsilon = 0.1, 
                     order = 2500, 
                     N_rec = 50,
                     N_rec_v = 2, 
                     save=False, 
                     simulator_name='nest',
                     jnml_simulator=None,
                     extra = {}):

    exec("from pyNN.%s import *" % simulator_name) in globals()
    
    timer = Timer()

    # === Define parameters ========================================================

    downscale   = 1       # scale number of neurons down by this factor
                          # scale synaptic weights up by this factor to
                          # obtain similar dynamics independent of size
    order       = order   # determines size of network:
                          # 4*order excitatory neurons
                          # 1*order inhibitory neurons
    Nrec        = N_rec   # number of neurons to record from, per population
    epsilon     = epsilon # connectivity: proportion of neurons each neuron projects to

    # Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1
    # here: Case C, asynchronous irregular firing, ~35 Hz
    eta         = eta     # rel rate of external input
    g           = g       # rel strength of inhibitory synapses
    J           = 0.1     # synaptic weight [mV]
    delay       = delay   # synaptic delay, all connections [ms]

    # single neuron parameters
    tauMem      = 20.0    # neuron membrane time constant [ms]
    tauSyn      = 0.1     # synaptic time constant [ms]
    tauRef      = 2.0     # refractory time [ms]
    U0          = 0.0     # resting potential [mV]
    theta       = 20.0    # threshold

    # simulation-related parameters
    simtime     = simtime   # simulation time [ms]
    dt          = dt     # simulation step length [ms]

    # seed for random generator used when building connections
    connectseed = 12345789
    use_RandomArray = True  # use Python rng rather than NEST rng

    # seed for random generator(s) used during simulation
    kernelseed  = 43210987

    # === Calculate derived parameters =============================================

    # scaling: compute effective order and synaptic strength
    order_eff = int(float(order)/downscale)
    J_eff     = J*downscale

    # compute neuron numbers
    NE = int(4*order_eff)  # number of excitatory neurons
    NI = int(1*order_eff)  # number of inhibitory neurons
    N  = NI + NE           # total number of neurons

    # compute synapse numbers
    CE   = int(epsilon*NE)  # number of excitatory synapses on neuron
    CI   = int(epsilon*NI)  # number of inhibitory synapses on neuron
    C    = CE + CI          # total number of internal synapses per n.
    Cext = CE               # number of external synapses on neuron

    # synaptic weights, scaled for alpha functions, such that
    # for constant membrane potential, charge J would be deposited
    fudge = 0.00041363506632638  # ensures dV = J at V=0

    # excitatory weight: JE = J_eff / tauSyn * fudge
    JE = (J_eff/tauSyn)*fudge

    # inhibitory weight: JI = - g * JE
    JI = -g*JE

    # threshold, external, and Poisson generator rates:
    nu_thresh = theta/(J_eff*CE*tauMem)
    nu_ext    = eta*nu_thresh     # external rate per synapse
    p_rate    = 1000*nu_ext*Cext  # external input rate per neuron (Hz)

    # number of synapses---just so we know
    Nsyn = (C+1)*N + 2*Nrec  # number of neurons * (internal synapses + 1 synapse from PoissonGenerator) + 2synapses" to spike detectors
    
    print('Created Brunel network with parameters')
    for p in ['NE','NI','downscale','order','N_rec','epsilon','eta','g','J','delay','tauMem','tauSyn','tauRef','U0','theta','simtime','dt', \
              'order_eff','J_eff','N','CE','CI','C','Cext','fudge','JE','JI','nu_thresh','nu_ext','p_rate','Nsyn']:
        print('  %s%s= %s'%(p, ' '*(12-len(p)), eval(p)))

    # put cell parameters into a dict
    cell_params = {'tau_m'      : tauMem,
                   'tau_syn_E'  : tauSyn,
                   'tau_syn_I'  : tauSyn,
                   'tau_refrac' : tauRef,
                   'v_rest'     : U0,
                   'v_reset'    : U0,
                   'v_thresh'   : theta,
                   'cm'         : 0.001}     # (nF)

    # === Build the network ========================================================

    # clear all existing network elements and set resolution and limits on delays.
    # For NEST, limits must be set BEFORE connecting any elements

    #extra = {'threads' : 2}

    rank = setup(timestep=dt, max_delay=delay, **extra)
    print("rank =", rank)
    np = num_processes()
    print("np =", np)
    import socket
    host_name = socket.gethostname()
    print("Host #%d is on %s" % (rank+1, host_name))

    if 'threads' in extra:
        print("%d Initialising the simulator with %d threads..." % (rank, extra['threads']))
    else:
        print("%d Initialising the simulator with single thread..." % rank)

    # Small function to display information only on node 1
    def nprint(s):
        if rank == 0:
            print(s)

    timer.start()  # start timer on construction

    print("%d Setting up random number generator" % rank)
    rng = NumpyRNG(kernelseed, parallel_safe=True)

    print("%d Creating excitatory population with %d neurons." % (rank, NE))
    celltype = IF_curr_alpha(**cell_params)
    celltype.default_initial_values['v'] = U0 # Setting default init v, useful for NML2 export
    E_net = Population(NE, celltype, label="E_net")

    print("%d Creating inhibitory population with %d neurons." % (rank, NI))
    I_net = Population(NI, celltype, label="I_net")

    print("%d Initialising membrane potential to random values between %g mV and %g mV." % (rank, U0, theta))
    uniformDistr = RandomDistribution('uniform', low=U0, high=theta, rng=rng)
    E_net.initialize(v=uniformDistr)
    I_net.initialize(v=uniformDistr)

    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_type = SpikeSourcePoisson(rate=p_rate)
    expoisson = Population(NE, source_type, label="expoisson")

    print("%d Creating inhibitory Poisson generator with the same rate." % rank)
    inpoisson = Population(NI, source_type, label="inpoisson")

    # Record spikes
    print("%d Setting up recording in excitatory population." % rank)
    E_net.record('spikes')
    if N_rec_v>0:
        E_net[0:min(NE,N_rec_v)].record('v')

    print("%d Setting up recording in inhibitory population." % rank)
    I_net.record('spikes')
    if N_rec_v>0:
        I_net[0:min(NI,N_rec_v)].record('v')

    progress_bar = ProgressBar(width=20)
    connector = FixedProbabilityConnector(epsilon, rng=rng, callback=progress_bar)
    E_syn = StaticSynapse(weight=JE, delay=delay)
    I_syn = StaticSynapse(weight=JI, delay=delay)
    ext_Connector = OneToOneConnector(callback=progress_bar)
    ext_syn = StaticSynapse(weight=JE, delay=dt)

    print("%d Connecting excitatory population with connection probability %g, weight %g nA and delay %g ms." % (rank, epsilon, JE, delay))
    E_to_E = Projection(E_net, E_net, connector, E_syn, receptor_type="excitatory")
    print("E --> E\t\t", len(E_to_E), "connections")
    I_to_E = Projection(I_net, E_net, connector, I_syn, receptor_type="inhibitory")
    print("I --> E\t\t", len(I_to_E), "connections")
    input_to_E = Projection(expoisson, E_net, ext_Connector, ext_syn, receptor_type="excitatory")
    print("input --> E\t", len(input_to_E), "connections")

    print("%d Connecting inhibitory population with connection probability %g, weight %g nA and delay %g ms." % (rank, epsilon, JI, delay))
    E_to_I = Projection(E_net, I_net, connector, E_syn, receptor_type="excitatory")
    print("E --> I\t\t", len(E_to_I), "connections")
    I_to_I = Projection(I_net, I_net, connector, I_syn, receptor_type="inhibitory")
    print("I --> I\t\t", len(I_to_I), "connections")
    input_to_I = Projection(inpoisson, I_net, ext_Connector, ext_syn, receptor_type="excitatory")
    print("input --> I\t", len(input_to_I), "connections")

    # read out time used for building
    buildCPUTime = timer.elapsedTime()
    # === Run simulation ===========================================================

    # run, measure computer time
    timer.start()  # start timer on construction
    print("%d Running simulation for %g ms (dt=%sms)." % (rank, simtime, dt))
    run(simtime)
    print("Done")
    simCPUTime = timer.elapsedTime()

    # write data to file
    #print("%d Writing data to file." % rank)
    #(E_net + I_net).write_data("Results/brunel_np%d_%s.pkl" % (np, simulator_name))
    if save and not simulator_name=='neuroml':
        for pop in [E_net , I_net]:
            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.gdf"%(simulator_name, pop.label, rank))
            spikes =  pop.get_data('spikes', gather=False)
            for segment in spikes.segments:
                io.write_segment(segment)
                
            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.dat"%(simulator_name, pop.label, rank))
            vs =  pop.get_data('v', gather=False)
            for segment in vs.segments:
                io.write_segment(segment)
            
    spike_data = {}
    spike_data['senders'] = []
    spike_data['times'] = []
    index_offset = 1
    for pop in [E_net , I_net]:
        if rank == 0:
            spikes =  pop.get_data('spikes', gather=False)
            #print(spikes.segments[0].all_data)
            num_rec = len(spikes.segments[0].spiketrains)
            print("Extracting spike info (%i) for %i cells in %s"%(num_rec,pop.size,pop.label))
            #assert(num_rec==len(spikes.segments[0].spiketrains))
            for i in range(num_rec):
                ss = spikes.segments[0].spiketrains[i]
                for s in ss:
                    index = i+index_offset
                    #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index))
                    spike_data['senders'].append(index)
                    spike_data['times'].append(s)
            index_offset+=pop.size
        
    #from IPython.core.debugger import Tracer
    #Tracer()()

    E_rate = E_net.mean_spike_count()*1000.0/simtime
    I_rate = I_net.mean_spike_count()*1000.0/simtime

    # write a short report
    nprint("\n--- Brunel Network Simulation ---")
    nprint("Nodes              : %d" % np)
    nprint("Number of Neurons  : %d" % N)
    nprint("Number of Synapses : %d" % Nsyn)
    nprint("Input firing rate  : %g" % p_rate)
    nprint("Excitatory weight  : %g" % JE)
    nprint("Inhibitory weight  : %g" % JI)
    nprint("Excitatory rate    : %g Hz" % E_rate)
    nprint("Inhibitory rate    : %g Hz" % I_rate)
    nprint("Build time         : %g s" % buildCPUTime)
    nprint("Simulation time    : %g s" % simCPUTime)

    # === Clean up and quit ========================================================

    end()
    
    
    if simulator_name=='neuroml' and jnml_simulator:
        from pyneuroml import pynml
        lems_file = 'LEMS_Sim_PyNN_NeuroML2_Export.xml'
        
        print('Going to run generated LEMS file: %s on simulator: %s'%(lems_file,jnml_simulator))
        
        if jnml_simulator=='jNeuroML':
            results, events = pynml.run_lems_with_jneuroml(lems_file, nogui=True, load_saved_data=True, reload_events=True)
        
        elif jnml_simulator=='jNeuroML_NEURON':
            results, events = pynml.run_lems_with_jneuroml_neuron(lems_file, nogui=True, load_saved_data=True, reload_events=True)
            
        spike_data['senders'] = []
        spike_data['times'] = []
        for k in events.keys():
            values = k.split('/') 
            index = int(values[1]) if values[0]=='E_net' else NE+int(values[1])
            n = len(events[k])
            print("Loading spikes for %s (index %i): [%s, ..., %s (n=%s)] sec"%(k,index,events[k][0] if n>0 else '-',events[k][-1] if n>0 else '-',n))
            for t in events[k]:
                spike_data['senders'].append(index)
                spike_data['times'].append(t*1000)
                
    #print spike_data
    return spike_data
예제 #9
0
def do_run(seed=None):
    simulator_name = 'spiNNaker'

    timer = Timer()

    # === Define parameters =========================================

    parallel_safe = True

    n = 1500  # number of cells
    # number of excitatory cells:number of inhibitory cells
    r_ei = 4.0
    pconn = 0.02  # connection probability

    dt = 1  # (ms) simulation timestep
    tstop = 200  # (ms) simulaton duration
    delay = 1

    # Cell parameters
    area = 20000.  # (µm²)
    tau_m = 20.  # (ms)
    cm = 1.  # (µF/cm²)
    g_leak = 5e-5  # (S/cm²)
    e_leak = -49.  # (mV)
    v_thresh = -50.  # (mV)
    v_reset = -60.  # (mV)
    t_refrac = 5.  # (ms) (clamped at v_reset)
    # (mV) 'mean' membrane potential,  for calculating CUBA weights
    v_mean = -60.
    tau_exc = 5.  # (ms)
    tau_inh = 10.  # (ms)
    # (nS) #Those weights should be similar to the COBA weights
    g_exc = 0.27
    # (nS) # but the delpolarising drift should be taken into account
    g_inh = 4.5
    e_rev_exc = 0.  # (mV)
    e_rev_inh = -80.  # (mV)

    # === Calculate derived parameters ===============================

    area *= 1e-8  # convert to cm²
    cm *= area * 1000  # convert to nF
    r_m = 1e-6 / (g_leak * area)  # membrane resistance in MΩ
    assert tau_m == cm * r_m  # just to check

    # number of excitatory cells
    n_exc = int(round((n * r_ei / (1 + r_ei))))
    n_inh = n - n_exc  # number of inhibitory cells

    celltype = p.IF_curr_exp
    # (nA) weight of excitatory synapses
    w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean)
    w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean)  # (nA)
    assert w_exc > 0
    assert w_inh < 0

    # === Build the network ==========================================

    p.setup(timestep=dt, min_delay=delay, max_delay=delay)

    if simulator_name == 'spiNNaker':
        # this will set 100 neurons per core
        p.set_number_of_neurons_per_core(p.IF_curr_exp, 100)
        # this will set 50 neurons per core
        p.set_number_of_neurons_per_core(p.IF_cond_exp, 50)

    # node_id = 1
    # np = 1

    # host_name = socket.gethostname()

    cell_params = {'tau_m': tau_m, 'tau_syn_E': tau_exc, 'tau_syn_I': tau_inh,
                   'v_rest': e_leak, 'v_reset': v_reset, 'v_thresh': v_thresh,
                   'cm': cm, 'tau_refrac': t_refrac, 'i_offset': 0}

    timer.start()

    exc_cells = p.Population(n_exc, celltype, cell_params,
                             label="Excitatory_Cells")
    inh_cells = p.Population(n_inh, celltype, cell_params,
                             label="Inhibitory_Cells")
    rng = NumpyRNG(seed=seed, parallel_safe=parallel_safe)
    uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh], rng=rng)
    exc_cells.initialize(v=uniform_distr)
    inh_cells.initialize(v=uniform_distr)

    exc_conn = p.FixedProbabilityConnector(pconn, rng=rng)
    synapse_exc = p.StaticSynapse(weight=w_exc, delay=delay)
    inh_conn = p.FixedProbabilityConnector(pconn, rng=rng)
    synapse_inh = p.StaticSynapse(weight=w_inh, delay=delay)

    connections = dict()
    connections['e2e'] = p.Projection(exc_cells, exc_cells, exc_conn,
                                      synapse_type=synapse_exc,
                                      receptor_type='excitatory')
    connections['e2i'] = p.Projection(exc_cells, inh_cells, exc_conn,
                                      synapse_type=synapse_exc,
                                      receptor_type='excitatory')
    connections['i2e'] = p.Projection(inh_cells, exc_cells, inh_conn,
                                      synapse_type=synapse_inh,
                                      receptor_type='inhibitory')
    connections['i2i'] = p.Projection(inh_cells, inh_cells, inh_conn,
                                      synapse_type=synapse_inh,
                                      receptor_type='inhibitory')

    # === Setup recording ==============================
    exc_cells.record("spikes")

    # === Run simulation ================================
    p.run(tstop)

    exc_spikes = exc_cells.get_data("spikes")

    exc_cells.write_data(neo_path, "spikes")

    p.end()

    return exc_spikes
예제 #10
0
    def test_va_benchmark(self):
        try:
            simulator_name = 'spiNNaker'

            timer = Timer()

            # === Define parameters =========================================

            rngseed = 98766987
            parallel_safe = True

            n = 1500  # number of cells
            # number of excitatory cells:number of inhibitory cells
            r_ei = 4.0
            pconn = 0.02  # connection probability

            dt = 0.1  # (ms) simulation timestep
            tstop = 200  # (ms) simulaton duration
            delay = 1

            # Cell parameters
            area = 20000.  # (µm²)
            tau_m = 20.  # (ms)
            cm = 1.  # (µF/cm²)
            g_leak = 5e-5  # (S/cm²)
            e_leak = -49.  # (mV)
            v_thresh = -50.  # (mV)
            v_reset = -60.  # (mV)
            t_refrac = 5.  # (ms) (clamped at v_reset)
            # (mV) 'mean' membrane potential,  for calculating CUBA weights
            v_mean = -60.
            tau_exc = 5.  # (ms)
            tau_inh = 10.  # (ms)
            # (nS) #Those weights should be similar to the COBA weights
            g_exc = 0.27
            # (nS) # but the delpolarising drift should be taken into account
            g_inh = 4.5
            e_rev_exc = 0.  # (mV)
            e_rev_inh = -80.  # (mV)

            # === Calculate derived parameters ===============================

            area *= 1e-8  # convert to cm²
            cm *= area * 1000  # convert to nF
            r_m = 1e-6 / (g_leak * area)  # membrane resistance in MΩ
            assert tau_m == cm * r_m  # just to check

            # number of excitatory cells
            n_exc = int(round((n * r_ei / (1 + r_ei))))
            n_inh = n - n_exc  # number of inhibitory cells

            print n_exc, n_inh

            celltype = p.IF_curr_exp
            # (nA) weight of excitatory synapses
            w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean)
            w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean)  # (nA)
            assert w_exc > 0
            assert w_inh < 0

            # === Build the network ==========================================

            p.setup(timestep=dt, min_delay=delay, max_delay=delay)

            if simulator_name == 'spiNNaker':
                # this will set 100 neurons per core
                p.set_number_of_neurons_per_core('IF_curr_exp', 100)
                # this will set 50 neurons per core
                p.set_number_of_neurons_per_core('IF_cond_exp', 50)

            node_id = 1
            np = 1

            host_name = socket.gethostname()
            print "Host #%d is on %s" % (np, host_name)

            cell_params = {
                'tau_m': tau_m,
                'tau_syn_E': tau_exc,
                'tau_syn_I': tau_inh,
                'v_rest': e_leak,
                'v_reset': v_reset,
                'v_thresh': v_thresh,
                'cm': cm,
                'tau_refrac': t_refrac,
                'i_offset': 0
            }

            print cell_params

            timer.start()

            print "%s Creating cell populations..." % node_id
            exc_cells = p.Population(n_exc,
                                     celltype,
                                     cell_params,
                                     label="Excitatory_Cells")
            inh_cells = p.Population(n_inh,
                                     celltype,
                                     cell_params,
                                     label="Inhibitory_Cells")
            p.NativeRNG(12345)

            print "%s Initialising membrane potential to random values..." \
                  % node_id
            rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)
            uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh],
                                               rng=rng)
            exc_cells.initialize('v', uniform_distr)
            inh_cells.initialize('v', uniform_distr)

            print "%s Connecting populations..." % node_id
            exc_conn = p.FixedProbabilityConnector(pconn,
                                                   weights=w_exc,
                                                   delays=delay)
            inh_conn = p.FixedProbabilityConnector(pconn,
                                                   weights=w_inh,
                                                   delays=delay)

            connections = dict()
            connections['e2e'] = p.Projection(exc_cells,
                                              exc_cells,
                                              exc_conn,
                                              target='excitatory',
                                              rng=rng)
            connections['e2i'] = p.Projection(exc_cells,
                                              inh_cells,
                                              exc_conn,
                                              target='excitatory',
                                              rng=rng)
            connections['i2e'] = p.Projection(inh_cells,
                                              exc_cells,
                                              inh_conn,
                                              target='inhibitory',
                                              rng=rng)
            connections['i2i'] = p.Projection(inh_cells,
                                              inh_cells,
                                              inh_conn,
                                              target='inhibitory',
                                              rng=rng)

            # === Setup recording ==============================
            print "%s Setting up recording..." % node_id
            exc_cells.record()

            # === Run simulation ================================
            print "%d Running simulation..." % node_id

            print "timings: number of neurons:", n
            print "timings: number of synapses:", n * n * pconn

            p.run(tstop)

            exc_spikes = exc_cells.getSpikes()
            print len(exc_spikes)

            current_file_path = os.path.dirname(os.path.abspath(__file__))
            current_file_path = os.path.join(current_file_path, "spikes.data")
            exc_cells.printSpikes(current_file_path)
            pre_recorded_spikes = p.utility_calls.read_spikes_from_file(
                current_file_path, 0, n_exc, 0, tstop)

            for spike_element, read_element in zip(exc_spikes,
                                                   pre_recorded_spikes):
                self.assertEqual(round(spike_element[0], 1),
                                 round(read_element[0], 1))
                self.assertEqual(round(spike_element[1], 1),
                                 round(read_element[1], 1))

            p.end()


# System intentional overload so may error
        except SpinnmanTimeoutException as ex:
            raise SkipTest(ex)
예제 #11
0
import pacman

import struct 
import sys
import numpy
import numpy.random as r
numpy.set_printoptions(precision=4)

from pyNN.utility import Timer

#import pickle
import cPickle as pickle
import sqlite3

timer = Timer()
timer.start()

DEBUG = pacman.pacman_configuration.getboolean('synapse_writer', 'debug')

INFO = True

PARALLEL = pacman.pacman_configuration.getboolean('synapse_writer', 'parallel')     # run the synapse_writer in parallel processes
num_processes = pacman.pacman_configuration.getint('synapse_writer', 'processes')      # number of processes

DELTA_T = pacman.pacman_configuration.getfloat('synapse_writer', 'parallel_delta_t')     # delay between 2 queries
N_QUERY=500

##### INSERTED FOR LOOKUP TABLE GENERATION
LOOKUP_WORDS_PER_ENTRY = 5  # each lookup entry has 5 words
예제 #12
0
$Id: $
"""

import os
import socket
from math import *

from pyNN.utility import get_script_args, Timer, ProgressBar
usage = """Usage: python VAbenchmarks.py <simulator> <benchmark>
           <simulator> is either neuron, nest, brian or pcsim
           <benchmark> is either CUBA or COBA."""
simulator_name, benchmark = get_script_args(2, usage)
exec("from pyNN.%s import *" % simulator_name)
from pyNN.random import NumpyRNG, RandomDistribution

timer = Timer()

# === Define parameters ========================================================

threads  = 1
rngseed  = 98765
parallel_safe = True

n        = 4000  # number of cells
r_ei     = 4.0   # number of excitatory cells:number of inhibitory cells
pconn    = 0.02  # connection probability
stim_dur = 50.   # (ms) duration of random stimulation
rate     = 100.  # (Hz) frequency of the random stimulation

dt       = 0.1   # (ms) simulation timestep
tstop    = 1000  # (ms) simulaton duration
예제 #13
0
def test(cases=[1]):

    sp = Space(periodic_boundaries=((0, 1), (0, 1), None))
    safe = False
    verbose = True
    autapse = False
    parallel_safe = True
    render = True

    for case in cases:
        #w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        #w = 0.1
        #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2)

        #delay = RandomDistribution('uniform', (0.1,5.))
        delay = "0.1 + d/0.2"
        #delay = 0.1
        #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances

        d_expression = "d < 0.1"
        #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"

        timer = Timer()
        np = num_processes()
        timer.start()
        if case is 1:
            conn = DistanceDependentProbabilityConnector(
                d_expression,
                delays=delay,
                weights=w,
                space=sp,
                safe=safe,
                verbose=verbose,
                allow_self_connections=autapse)
            fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np)
        elif case is 2:
            conn = FixedProbabilityConnector(0.05,
                                             weights=w,
                                             delays=delay,
                                             space=sp,
                                             safe=safe,
                                             verbose=verbose,
                                             allow_self_connections=autapse)
            fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np)
        elif case is 3:
            conn = AllToAllConnector(delays=delay,
                                     weights=w,
                                     space=sp,
                                     safe=safe,
                                     verbose=verbose,
                                     allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np)
        elif case is 4:
            conn = FixedNumberPostConnector(50,
                                            weights=w,
                                            delays=delay,
                                            space=sp,
                                            safe=safe,
                                            verbose=verbose,
                                            allow_self_connections=autapse)
            fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np)
        elif case is 5:
            conn = FixedNumberPreConnector(50,
                                           weights=w,
                                           delays=delay,
                                           space=sp,
                                           safe=safe,
                                           verbose=verbose,
                                           allow_self_connections=autapse)
            fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np)
        elif case is 6:
            conn = OneToOneConnector(safe=safe,
                                     weights=w,
                                     delays=delay,
                                     verbose=verbose)
            fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np)
        elif case is 7:
            conn = FromFileConnector('connections.dat',
                                     safe=safe,
                                     verbose=verbose)
            fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np)
        elif case is 8:
            conn = SmallWorldConnector(degree=0.1,
                                       rewiring=0.,
                                       weights=w,
                                       delays=delay,
                                       safe=safe,
                                       verbose=verbose,
                                       allow_self_connections=autapse,
                                       space=sp)
            fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np)

        print "Generating data for %s" % fig_name
        rng = NumpyRNG(23434, num_processes=np, parallel_safe=parallel_safe)
        prj = Projection(x, x, conn, rng=rng)

        simulation_time = timer.elapsedTime()
        print "Building time", simulation_time
        print "Nb synapses built", len(prj)

        if render:
            if not (os.path.isdir('Results')):
                os.mkdir('Results')

            print "Saving Positions...."
            x.savePositions('Results/positions.dat')

            print "Saving Connections...."
            prj.saveConnections('Results/connections.dat',
                                compatible_output=False)

        if node_id == 0 and render:
            figure()
            print "Generating and saving %s" % fig_name
            positions = numpy.loadtxt('Results/positions.dat')
            connections = numpy.loadtxt('Results/connections.dat')
            positions = positions[numpy.argsort(positions[:, 0])]
            idx_pre = (connections[:, 0] - x.first_id).astype(int)
            idx_post = (connections[:, 1] - x.first_id).astype(int)
            d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1)
            subplot(231)
            title('Cells positions')
            plot(positions[:, 1], positions[:, 2], '.')
            subplot(232)
            title('Weights distribution')
            hist(connections[:, 2], 50)
            subplot(233)
            title('Delay distribution')
            hist(connections[:, 3], 50)
            subplot(234)
            ids = numpy.random.permutation(numpy.unique(positions[:, 0]))[0:6]
            colors = ['k', 'r', 'b', 'g', 'c', 'y']
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:, 2], '.')

            subplot(236)
            plot(d, connections[:, 3], '.')
            savefig("Results/" + fig_name)
            os.remove('Results/connections.dat')
            os.remove('Results/positions.dat')
from connector_functions import create_cortical_to_cortical_connection
from connector_functions import normalize_connection_list
from connector_functions import create_cortical_to_cortical_connection_corr
from connector_functions import create_thalamocortical_connection
from analysis_functions import calculate_tuning, visualize_conductances, visualize_conductances_and_voltage
from analysis_functions import conductance_analysis
from plot_functions import plot_spiketrains

#############################

simulator = get_script_args(1)[0]
exec("import pyNN.%s as simulator" % simulator)
#import pyNN.nest as simulator
#import pyNN.neuron as simulator

timer = Timer()

#############################
##  Parameters
#############################

# ============== Network and simulation parameters =================

contrast = 0.50  # Contrast used (possible range available in ./data)

Nside_lgn = 30  # N_lgn x N_lgn is the size of the LGN
Nside_exc = 40  # N_exc x N_exc is the  size of the cortical excitatory layer
Nside_inh = 20  # N_inh x N_inh is the size of the cortical inhibitory layer

factor = 1  # Reduction factor
예제 #15
0
    def run(self, params, verbose=True):
        """
        params are the parameters to use

        """
        tmpdir = tempfile.mkdtemp()
        myTimer = Timer()
        # === Build the network ========================================================
        if verbose:
            print "Setting up simulation"
        myTimer.start()  # start timer on construction
        sim.setup(timestep=params["dt"], max_delay=params["syn_delay"])
        N = params["N"]
        # dc_generator
        phr_ON = sim.Population((N,), "dc_generator")
        phr_OFF = sim.Population((N,), "dc_generator")

        for factor, phr in [(-params["snr"], phr_OFF), (params["snr"], phr_ON)]:
            phr.tset("amplitude", params["amplitude"] * factor)
            phr.set({"start": params["simtime"] / 4, "stop": params["simtime"] / 4 * 3})

        # internal noise model (see benchmark_noise)
        noise_ON = sim.Population((N,), "noise_generator", {"mean": 0.0, "std": params["noise_std"]})
        noise_OFF = sim.Population((N,), "noise_generator", {"mean": 0.0, "std": params["noise_std"]})

        # target ON and OFF populations (what about a tridimensional Population?)
        out_ON = sim.Population(
            (N,), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #iaf_sfa_neuron')# EIF_cond_alpha_isfa_ista, IF_cond_exp_gsfa_grr,sim.IF_cond_alpha)#'iaf_sfa_neuron',params['parameters_gc'])#'iaf_cond_neuron')# IF_cond_alpha) #
        out_OFF = sim.Population(
            (N,), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #IF_curr_alpha)#'iaf_sfa_neuron')#sim.IF_curr_alpha)#,params['parameters_gc'])

        # initialize membrane potential TODO: and conductances?
        from pyNN.random import RandomDistribution, NumpyRNG

        rng = NumpyRNG(seed=params["kernelseed"])
        vinit_distr = RandomDistribution(distribution="uniform", parameters=[-70, -55], rng=rng)
        for out_ in [out_ON, out_OFF]:
            out_.randomInit(vinit_distr)

        retina_proj_ON = sim.Projection(phr_ON, out_ON, sim.OneToOneConnector())
        retina_proj_ON.setWeights(params["weight"])
        # TODO fix setWeight, add setDelays to 10 ms (relative to stimulus onset)
        retina_proj_OFF = sim.Projection(phr_OFF, out_OFF, sim.OneToOneConnector())
        retina_proj_OFF.setWeights(params["weight"])

        noise_proj_ON = sim.Projection(noise_ON, out_ON, sim.OneToOneConnector())
        noise_proj_ON.setWeights(params["weight"])
        noise_proj_OFF = sim.Projection(
            noise_OFF, out_OFF, sim.OneToOneConnector()
        )  # implication if ON and OFF have the same noise input?
        noise_proj_OFF.setWeights(params["weight"])

        out_ON.record()
        out_OFF.record()

        # reads out time used for building
        buildCPUTime = myTimer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose:
            print "Running simulation"

        myTimer.reset()  # start timer on construction
        sim.run(params["simtime"])
        simCPUTime = myTimer.elapsedTime()

        myTimer.reset()  # start timer on construction
        # TODO LUP use something like "for pop in [phr, out]" ?
        out_ON_filename = os.path.join(tmpdir, "out_on.gdf")
        out_OFF_filename = os.path.join(tmpdir, "out_off.gdf")
        out_ON.printSpikes(out_ON_filename)  #
        out_OFF.printSpikes(out_OFF_filename)  #

        # TODO LUP  get out_ON_DATA on a 2D grid independantly of out_ON.cell.astype(int)
        out_ON_DATA = load_spikelist(out_ON_filename, range(N), t_start=0.0, t_stop=params["simtime"])
        out_OFF_DATA = load_spikelist(out_OFF_filename, range(N), t_start=0.0, t_stop=params["simtime"])

        out = {"out_ON_DATA": out_ON_DATA, "out_OFF_DATA": out_OFF_DATA}  # ,'out_ON_pos':out_ON}
        # cleans up
        os.remove(out_ON_filename)
        os.remove(out_OFF_filename)
        os.rmdir(tmpdir)
        writeCPUTime = myTimer.elapsedTime()

        if verbose:
            print "\nRetina Network Simulation:"
            print (params["description"])
            print "Number of Neurons  : ", N
            print "Output rate  (ON) : ", out_ON_DATA.mean_rate(), "Hz/neuron in ", params["simtime"], "ms"
            print "Output rate (OFF)   : ", out_OFF_DATA.mean_rate(), "Hz/neuron in ", params["simtime"], "ms"
            print ("Build time             : %g s" % buildCPUTime)
            print ("Simulation time        : %g s" % simCPUTime)
            print ("Writing time           : %g s" % writeCPUTime)

        return out
예제 #16
0
def run_model(sim, **options):
    """
    Run a simulation using the parameters read from the file "spike_train_statistics.json"

    :param sim: the PyNN backend module to be used.
    :param options: should contain a keyword "simulator" which is the name of the PyNN backend module used.
    :return: a tuple (`data`, `times`) where `data` is a Neo Block containing the recorded spikes
             and `times` is a dict containing the time taken for different phases of the simulation.
    """

    import json
    from pyNN.utility import Timer

    print("Running")

    timer = Timer()

    g = open("spike_train_statistics.json", 'r')
    d = json.load(g)

    N = d['param']['N']
    max_rate = d['param']['max_rate']
    tstop = d['param']['tstop']
    d['SpikeSourcePoisson'] = {
        "duration": tstop
    }

    if options['simulator'] == "hardware.brainscales":
        hardware_preset = d['setup'].pop('hardware_preset', None)
        if hardware_preset:
            d['setup']['hardware'] = sim.hardwareSetup[hardware_preset]
        d['SpikeSourcePoisson']['random'] = True
        place = mapper.place()

    timer.start()
    sim.setup(**d['setup'])

    spike_sources = sim.Population(N, sim.SpikeSourcePoisson, d['SpikeSourcePoisson'])
    delta_rate = max_rate/N
    rates = numpy.linspace(delta_rate, max_rate, N)
    print("Firing rates: %s" % rates)
    if PYNN07:
        spike_sources.tset("rate", rates)
    else:
        spike_sources.set(rate=rates)

    if options['simulator'] == "hardware.brainscales":
        for i, spike_source in enumerate(spike_sources):
            place.to(spike_source, hicann=i//8, neuron=i%64)
        place.commit()

    if PYNN07:
        spike_sources.record()
    else:
        spike_sources.record('spikes')

    setup_time = timer.diff()
    sim.run(tstop)
    run_time = timer.diff()

    if PYNN07:
        spike_array = spike_sources.getSpikes()
        data = spike_array_to_neo(spike_array, spike_sources, tstop)
    else:
        data = spike_sources.get_data()

    sim.end()

    closing_time = timer.diff()
    times = {'setup_time': setup_time, 'run_time': run_time, 'closing_time': closing_time}

    return data, times
예제 #17
0
class NetworkModel(object):

    def __init__(self, params, comm):

        self.params = params
        self.debug_connectivity = True
        self.comm = comm
        if self.comm != None:
            self.pc_id, self.n_proc = self.comm.rank, self.comm.size
            print "USE_MPI: yes", '\tpc_id, n_proc:', self.pc_id, self.n_proc
        else:
            self.pc_id, self.n_proc = 0, 1
            print "MPI not used"

        np.random.seed(params['np_random_seed'] + self.pc_id)

        if self.params['with_short_term_depression']:
            self.short_term_depression = SynapseDynamics(fast=TsodyksMarkramMechanism(U=0.95, tau_rec=10.0, tau_facil=0.0))

    def import_pynn(self):
        """
        This function needs only be called when this class is used in another script as imported module
        """
        import pyNN

        exec("from pyNN.%s import *" % self.params['simulator'])
        print 'import pyNN\npyNN.version: ', pyNN.__version__



    def setup(self, load_tuning_prop=False, times={}):

        self.projections = {}
        self.projections['ee'] = []
        self.projections['ei'] = []
        self.projections['ie'] = []
        self.projections['ii'] = []
        if not load_tuning_prop:
            self.tuning_prop_exc = utils.set_tuning_prop(self.params, mode='hexgrid', cell_type='exc')        # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
            self.tuning_prop_inh = utils.set_tuning_prop(self.params, mode='hexgrid', cell_type='inh')        # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
        else:
            self.tuning_prop_exc = np.loadtxt(self.params['tuning_prop_means_fn'])
            self.tuning_prop_inh = np.loadtxt(self.params['tuning_prop_inh_fn'])

        indices, distances = utils.sort_gids_by_distance_to_stimulus(self.tuning_prop_exc, self.params) # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params['tuning_prop_means_fn']
            np.savetxt(self.params['tuning_prop_means_fn'], self.tuning_prop_exc)
            print "Saving tuning_prop to file:", self.params['tuning_prop_inh_fn']
            np.savetxt(self.params['tuning_prop_inh_fn'], self.tuning_prop_inh)
            print 'Saving gids to record to: ', self.params['gids_to_record_fn']
            np.savetxt(self.params['gids_to_record_fn'], indices[:self.params['n_gids_to_record']], fmt='%d')

#        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer
        self.timer = Timer()
        self.timer.start()
        self.times = times
        self.times['t_all'] = 0
        # # # # # # # # # # # #
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params['delay_range']
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params['seed'])
        rng_v = NumpyRNG(seed = sim_cnt*3147 + self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(seed = self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution('normal',
                (self.params['v_init'], self.params['v_init_sigma']),
                rng=rng_v,
                constrain='redraw',
                boundaries=(-80, -60))

        self.times['t_setup'] = self.timer.diff()
        self.times['t_calc_conns'] = 0
        if self.comm != None:
            self.comm.Barrier()

        self.torus = space.Space(axes='xy', periodic_boundaries=((0., self.params['torus_width']), (0., self.params['torus_height'])))

    def create_neurons_with_limited_tuning_properties(self):
        n_exc = self.tuning_prop_exc[:, 0].size
        n_inh = 0
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_exp, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'IF_cond_alpha':
            self.exc_pop = Population(n_exc, IF_cond_alpha, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_alpha, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
            self.exc_pop = Population(n_exc, EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], EIF_cond_exp_isfa_ista, self.params['cell_params_inh'], label="inh_pop")
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']

        # set cell positions, required for isotropic connections
        cell_pos_exc = np.zeros((3, self.params['n_exc']))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params['n_inh']))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh

        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)

        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]
        print 'Debug, pc_id %d has local %d exc indices:' % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc
        self.exc_pop.initialize('v', self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
        print 'Debug, pc_id %d has local %d inh indices:' % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize('v', self.v_init_dist)
        self.times['t_create'] = self.timer.diff()


    def create(self, input_created=False):
        """
            # # # # # # # # # # # #
            #     C R E A T E     #
            # # # # # # # # # # # #
        """
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(self.params['n_exc'], IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_exp, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'IF_cond_alpha':
            self.exc_pop = Population(self.params['n_exc'], IF_cond_alpha, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_alpha, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
            self.exc_pop = Population(self.params['n_exc'], EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], EIF_cond_exp_isfa_ista, self.params['cell_params_inh'], label="inh_pop")
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']
        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)
        print 'Debug, pc_id %d has local %d exc indices:' % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc

        cell_pos_exc = np.zeros((3, self.params['n_exc']))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params['n_inh']))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh


        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]

        self.exc_pop.initialize('v', self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
        print 'Debug, pc_id %d has local %d inh indices:' % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize('v', self.v_init_dist)

        self.times['t_create'] = self.timer.diff()


    def connect(self):
        if self.params['n_exc'] > 5000:
            save_output = False
        else:
            save_output = True

        self.connect_input_to_exc()
        self.connect_populations('ee')
        self.connect_populations('ei')
        self.connect_populations('ie')
        self.connect_populations('ii')
        self.connect_noise()
        self.times['t_calc_conns'] = self.timer.diff()
        if self.comm != None:
            self.comm.Barrier()

    def get_motion_params_from_protocol(self, time):
        """

        """

        predictor_interval = int(time / self.params['predictor_interval_duration'])
        # based on the motion_protocol calculate the stimulus position and direction etc --> predictor_params
        if self.params['motion_protocol'] == 'congruent':
            x0, y0, u0, v0, theta = self.params['motion_params'][0], self.params['motion_params'][1],  self.params['motion_params'][2],  self.params['motion_params'][3], self.params['motion_params'][4]
            x, y = (x0 + u0 * time) % self.params['torus_width'], (y0 + v0 * time) % self.params['torus_height'] # current position of the blob at time t assuming a perfect translation
            predictor_params = (x, y, u0, v0, theta)

        elif self.params['motion_protocol'] == 'incongruent':
        # incongruent protocol means having oriented bar as stimulus that its orientation is flipped inside the CRF        
            predictor_params = self.params['motion_params']
#            if (t_check < time < t_stop_check):
#                orientation = sp.params['motion_params'][:,4] + np.pi/2.0

        # Missing CRF protocol includes a moving oriented bar which approches to CRF and disappears inside CRF     
        # --> we give noise as input
        # --> we shuffle the stimulus among all cells to get an incoherent input (the output of the CRF will be very small)
        elif protocol == 'Missing CRF':
            predictor_params = self.params['motion_params']
#            if (t_check < t < t_stop_check):
#                L = np.random.permutation(stimulus)

        # CRF only protocol includes an oriented bar which moves for a short period only inside CRF        
        elif protocol == 'CRF only':
            predictor_params = self.params['motion_params']
#            if (t_check < t < t_stop_check):
#                L = stimulus
#            else:
#                L = np.random.permutation(stimulus)
#                 L = 0

        elif self.params['motion_protocol'] == 'random predictor':
            predictor_params = self.params['motion_params']
            # we create a random sequence of orientations and segment the trajectory
#            orientation = np.random.rand(self.params['n_random_predictor_orientations']) * np.pi

        return predictor_params


    def create_input(self, load_files=False, save_output=False):


        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params['input_st_fn_base'] + str(tgt) + '.npy'
                    spike_times = np.load(fn)
                except: # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params['input_spikes_seed'])
            dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process
            time = np.arange(0, self.params['t_sim'], dt)
            blank_idx = np.arange(1./dt * self.params['t_before_blank'], 1. / dt * (self.params['t_before_blank'] + self.params['t_blank']))
            before_stim_idx = np.arange(0, self.params['t_start'] * 1./dt)
            blank_idx = np.concatenate((blank_idx, before_stim_idx))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))

            # get the input signal
            print 'Calculating input signal'
            for i_time, time_ in enumerate(time):
                predictor_params = self.get_motion_params_from_protocol(time_ / self.params['t_stimulus'])
                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, predictor_params, motion = self.params['motion_type'])
                L_input[:, i_time] *= self.params['f_max_stim']
                if (i_time % 500 == 0):
                    print "t:", time_

            # blanking
            for i_time in blank_idx:
                L_input[:, i_time] = np.random.permutation(L_input[:, i_time])
#                L_input[:, i_time] = 0.

            # create the spike trains
            print 'Creating input spiketrains for unit'
            for i_, unit in enumerate(my_units):
                rate_of_t = np.array(L_input[i_, :])
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if (r <= ((rate_of_t[i]/1000.) * dt)): # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt)
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params['input_rate_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params['input_st_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, np.array(spike_times))

        self.times['create_input'] = self.timer.diff()
        return self.spike_times_container



    def connect_input_to_exc(self):
        """
            # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    I N P U T - E X C  #
            # # # # # # # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting input spiketrains..."
        
#        self.stimulus = Population(len(self.local_idx_exc), SpikeSourceArray)
#            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
#                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
#            self.projections[conn_type].append(prj)

#        self.projections['stim'] = []
#        self.stimuli = []
#        self.pop_views = [] 
#        conn = OneToOneConnector(weights=self.params['w_input_exc'])
        for i_, unit in enumerate(self.local_idx_exc):
            spike_times = self.spike_times_container[i_]
#            ssa = create(SpikeSourceArray, {'spike_times': spike_times})
            ssa = Population(1, SpikeSourceArray, {'spike_times': spike_times})
#            ssa.set({'spike_times' : spike_times})
#            self.stimuli.append(ssa)

#            if self.params['with_short_term_depression']:

#                connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory', synapse_dynamics=self.short_term_depression)
#                selector = np.zeros(self.params['n_exc'], dtype=np.bool)
#                selector[unit] = True
#                print 'debug unit', unit, type(unit)
#                w[i_] = 1.#self.params['w_input_exc']
#                tgt = PopulationView(self.exc_pop, np.array([unit]))
#                self.pop_views.append(tgt)
#                prj = Projection(ssa, tgt, conn, target='excitatory', synapse_dynamics=self.short_term_depression)
#                prj = Projection(self.stimuli[-1], self.pop_views[-1], conn, target='excitatory', synapse_dynamics=self.short_term_depression)
#                self.projections['stim'].append(prj)
#            else:
            connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory')
        self.times['connect_input'] = self.timer.diff()


    def resolve_src_tgt(self, conn_type):
        """
        Deliver the correct source and target parameters based on conn_type
        """

        if conn_type == 'ee':
            n_src, n_tgt = self.params['n_exc'], self.params['n_exc']
            src_pop, tgt_pop = self.exc_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_exc
            syn_type = 'excitatory'

        elif conn_type == 'ei':
            n_src, n_tgt = self.params['n_exc'], self.params['n_inh']
            src_pop, tgt_pop = self.exc_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_inh
            syn_type = 'excitatory'

        elif conn_type == 'ie':
            n_src, n_tgt = self.params['n_inh'], self.params['n_exc']
            src_pop, tgt_pop = self.inh_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_exc
            syn_type = 'inhibitory'

        elif conn_type == 'ii':
            n_src, n_tgt = self.params['n_inh'], self.params['n_inh']
            src_pop, tgt_pop = self.inh_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_inh
            syn_type = 'inhibitory'

        return (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type)


    def connect_anisotropic(self, conn_type):
        """
        conn_type = ['ee', 'ei', 'ie', 'ii']
        """
        if self.pc_id == 0:
            print 'Connect anisotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)

        n_src_cells_per_neuron = int(round(self.params['p_%s' % conn_type] * n_src))
        (delay_min, delay_max) = self.params['delay_range']
        local_connlist = np.zeros((n_src_cells_per_neuron * len(tgt_cells), 4))
        for i_, tgt in enumerate(tgt_cells):
            if self.params['conn_conf'] == 'direction-based':
                p, latency = CC.get_p_conn_direction_based(tp_src, tp_tgt[tgt, :], self.params['w_sigma_x'], self.params['w_sigma_v'], self.params['connectivity_radius'])
            elif self.params['conn_conf'] == 'motion-based':
                p, latency = CC.get_p_conn_motion_based(tp_src, tp_tgt[tgt, :], self.params['w_sigma_x'], self.params['w_sigma_v'], self.params['connectivity_radius'])
            elif self.params['conn_conf'] == 'orientation-direction':
                p, latency = CC.get_p_conn_direction_and_orientation_based(tp_src, tp_tgt[tgt, :], self.params['w_sigma_x'], self.params['w_sigma_v'], self.params['w_sigma_theta'], self.params['connectivity_radius'])
            else:
                print '\n\nERROR! Wrong connection configutation conn_conf parameter provided\nShould be direction-based, motion-based or orientation-direction\n'
                exit(1)

            if conn_type[0] == conn_type[1]:
                p[tgt], latency[tgt] = 0., 0.
            # random delays? --> np.permutate(latency) or latency[sources] * self.params['delay_scale'] * np.rand

            sorted_indices = np.argsort(p)
            if conn_type[0] == 'e':
                sources = sorted_indices[-n_src_cells_per_neuron:]
            else: # source = inhibitory
                if conn_type[0] == conn_type[1]:
                    sources = sorted_indices[1:n_src_cells_per_neuron+1]  # shift indices to avoid self-connection, because p_ii = .0
                else:
                    sources = sorted_indices[:n_src_cells_per_neuron]

            eta = 1e-12
            w = (self.params['w_tgt_in_per_cell_%s' % conn_type] / (p[sources].sum() + eta)) * p[sources]
            w_ = np.minimum(np.maximum(w, self.params['w_thresh_min']), self.params['w_thresh_max'])

            delays = np.minimum(np.maximum(latency[sources] * self.params['delay_scale'], delay_min), delay_max)  # map the delay into the valid range
            conn_list = np.array((sources, tgt * np.ones(n_src_cells_per_neuron), w_, delays))
            local_connlist[i_ * n_src_cells_per_neuron : (i_ + 1) * n_src_cells_per_neuron, :] = conn_list.transpose()
            connector = FromListConnector(conn_list.transpose())
            if self.params['with_short_term_depression']:
                prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
            else:
                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
            self.projections[conn_type].append(prj)

        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            np.savetxt(conn_list_fn, local_connlist, fmt='%d\t%d\t%.4e\t%.4e')


    def connect_ee_random(self):
        """
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    E X C - E X C    R A N D O M   #
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
        """

        if self.pc_id == 0:
            print 'Drawing random connections'
        sigma_x, sigma_v = self.params['w_sigma_x'], self.params['w_sigma_v']
        (delay_min, delay_max) = self.params['delay_range']
        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_ee_fn_base'] + '%d.dat' % (self.pc_id)
            conn_file = open(conn_list_fn, 'w')
            output = ''
        for tgt in self.local_idx_exc:
            p = np.zeros(self.params['n_exc'], dtype='float32')
            latency = np.zeros(self.params['n_exc'], dtype='float32')
            for src in xrange(self.params['n_exc']):
                if (src != tgt):
                    p[src], latency[src] = CC.get_p_conn(self.tuning_prop_exc[src, :], self.tuning_prop_exc[tgt, :], sigma_x, sigma_v, params['connectivity_radius']) #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']
            sources = random.sample(xrange(self.params['n_exc']), int(self.params['n_src_cells_per_neuron']))
            idx = p[sources] > 0
            non_zero_idx = np.nonzero(idx)[0]
            p_ = p[sources][non_zero_idx]
            l_ = latency[sources][non_zero_idx] * self.params['delay_scale']

            w = utils.linear_transformation(p_, self.params['w_min'], self.params['w_max'])
            for i in xrange(len(p_)):
#                        w[i] = max(self.params['w_min'], min(w[i], self.params['w_max']))
                delay = min(max(l_[i], delay_min), delay_max)  # map the delay into the valid range
                connect(self.exc_pop[non_zero_idx[i]], self.exc_pop[tgt], w[i], delay=delay, synapse_type='excitatory')
                if self.debug_connectivity:
                    output += '%d\t%d\t%.2e\t%.2e\n' % (non_zero_idx[i], tgt, w[i], delay) #                    output += '%d\t%d\t%.2e\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], latency[sources[i]], p[sources[i]])

        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            conn_file.write(output)
            conn_file.close()

    def connect_isotropic(self, conn_type='ee'):
        """
        conn_type must be 'ee', 'ei', 'ie' or 'ii'
        Connect cells in a distant dependent manner:
            p_ij = exp(- d_ij / (2 * w_sigma_x**2))

        This will give a 'convergence constrained' connectivity, i.e. each cell will have the same sum of incoming weights
        ---> could be problematic for outlier cells
        """
        if self.pc_id == 0:
            print 'Connect isotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == 'ee':
            w_ = self.params['w_max']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        elif conn_type == 'ei':
            w_ = self.params['w_ei_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ie':
            w_ = self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ii':
            w_ = self.params['w_ii_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
#            conn_file = open(conn_list_fn, 'w')
#            output = ''
#            output_dist = ''

        w_mean = w_tgt_in / (self.params['p_%s' % conn_type] * n_max_conn / n_tgt)
        w_sigma = self.params['w_sigma_distribution'] * w_mean

        w_dist = RandomDistribution('normal',
                (w_mean, w_sigma),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(0, w_mean * 10.))
        delay_dist = RandomDistribution('normal',
                (self.params['standard_delay'], self.params['standard_delay_sigma']),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(self.params['delay_range'][0], self.params['delay_range'][1]))

        p_max = utils.get_pmax(self.params['p_%s' % conn_type], self.params['w_sigma_isotropic'], conn_type)
        connector = DistanceDependentProbabilityConnector('%f * exp(-d/(2*%f**2))' % (p_max, params['w_sigma_isotropic']), allow_self_connections=False, \
                weights=w_dist, delays=delay_dist, space=self.torus)#, n_connections=n_conn_ee)
        if self.params['with_short_term_depression']:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)#, synapse_dynamics=self.STD)
        self.projections[conn_type].append(prj)
        if self.debug_connectivity:
#                if self.pc_id == 0:
#                    print 'DEBUG writing to file:', conn_list_fn
            prj.saveConnections(self.params['conn_list_%s_fn_base' % conn_type] + '.dat', gather=True)
#            prj.saveConnections(self.params['conn_list_%s_fn_base' % conn_type] + 'gid%d.dat' % tgt, gather=False)
#                conn_file.close()


#            w = np.zeros(n_src, dtype='float32')
#            delays = np.zeros(n_src, dtype='float32')
#            for src in xrange(n_src):
#                if conn_type[0] == conn_type[1]:
#                    if (src != tgt): # no self-connections / autapses
#                        d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
#                        p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
#                        if np.random.rand() <= p_ij:
#                            w[src] = w_
#                            delays[src] = d_ij * params['delay_scale']
#                else:
#                    d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
#                    p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
#                    if np.random.rand() <= p_ij:
#                        w[src] = w_
#                        delays[src] = d_ij * params['delay_scale']
#            w *= w_tgt_in / w.sum()
#            srcs = w.nonzero()[0]
#            weights = w[srcs]
#            for src in srcs:
#                if w[src] > self.params['w_thresh_connection']:
#                delay = min(max(delays[src], self.params['delay_range'][0]), self.params['delay_range'][1])  # map the delay into the valid range
#                connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=delay, synapse_type=syn_type)
#                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], delay)

#        if self.debug_connectivity:
#            if self.pc_id == 0:
#                print 'DEBUG writing to file:', conn_list_fn
#            conn_file.write(output)
#            conn_file.close()


    def connect_random(self, conn_type):
        """
        There exist different possibilities to draw random connections:
        1) Calculate the weights as for the anisotropic case and sample sources randomly
        2) Load a file which stores some random connectivity --> # connector = FromFileConnector(self.params['conn_list_.... ']
        3) Create a random distribution with similar parameters as the non-random connectivition distribution

        connector_ee = FastFixedProbabilityConnector(self.params['p_ee'], weights=w_ee_dist, delays=self.delay_dist)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')

        conn_list_fn = self.params['random_weight_list_fn'] + str(sim_cnt) + '.dat'
        print "Connecting exc - exc from file", conn_list_fn
        connector_ee = FromFileConnector(conn_list_fn)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')
        """
        if self.pc_id == 0:
            print 'Connect random connections %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())
        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == 'ee':
            w_ = self.params['w_max']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        elif conn_type == 'ei':
            w_ = self.params['w_ei_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ie':
            w_ = self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ii':
            w_ = self.params['w_ii_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
#            conn_file = open(conn_list_fn, 'w')
#            output = ''
#            output_dist = ''

        w_mean = w_tgt_in / (self.params['p_%s' % conn_type] * n_max_conn / n_tgt)
        w_sigma = self.params['w_sigma_distribution'] * w_mean

        weight_distr = RandomDistribution('normal',
                (w_mean, w_sigma),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(0, w_mean * 10.))

        delay_dist = RandomDistribution('normal',
                (self.params['standard_delay'], self.params['standard_delay_sigma']),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(self.params['delay_range'][0], self.params['delay_range'][1]))

        connector= FastFixedProbabilityConnector(self.params['p_%s' % conn_type], weights=weight_distr, delays=delay_dist)
        if self.params['with_short_term_depression']:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)

        conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
        print 'Saving random %s connections to %s' % (conn_type, conn_list_fn)
        prj.saveConnections(conn_list_fn, gather=False)



    def connect_populations(self, conn_type):
        """
            # # # # # # # # # # # #
            #     C O N N E C T   #
            # # # # # # # # # # # #
            Calls the right according to the flag set in simultation_parameters.py
        """
        if self.params['connectivity_%s' % conn_type] == 'anisotropic':
            self.connect_anisotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'isotropic':
            self.connect_isotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'random':
            self.connect_random(conn_type)
        else: # populations do not get connected
            pass


    def connect_noise(self):
        """
            # # # # # # # # # # # # # # # #
            #     N O I S E   I N P U T   #
            # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting noise - exc ... "
        noise_pop_exc = []
        noise_pop_inh = []
        for tgt in self.local_idx_exc:
            #new
            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
            else:
                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
            connect(noise_exc, self.exc_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
            connect(noise_inh, self.exc_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)

        if self.pc_id == 0:
            print "Connecting noise - inh ... "
        for tgt in self.local_idx_inh:
            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
            else:
                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
            connect(noise_exc, self.inh_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
            connect(noise_inh, self.inh_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)
        self.times['connect_noise'] = self.timer.diff()




    def run_sim(self, sim_cnt, record_v=False):
        # # # # # # # # # # # # # # # # # # # #
        #     P R I N T    W E I G H T S      #
        # # # # # # # # # # # # # # # # # # # #
    #    print 'Printing weights to :\n  %s\n  %s\n  %s' % (self.params['conn_list_ei_fn'], self.params['conn_list_ie_fn'], self.params['conn_list_ii_fn'])
    #    exc_inh_prj.saveConnections(self.params['conn_list_ei_fn'])
    #    inh_exc_prj.saveConnections(self.params['conn_list_ie_fn'])
    #    inh_inh_prj.saveConnections(self.params['conn_list_ii_fn'])
    #    self.times['t_save_conns'] = self.timer.diff()

        # # # # # # # # # # # #
        #     R E C O R D     #
        # # # # # # # # # # # #
    #    print "Recording spikes to file: %s" % (self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)
    #    for cell in xrange(self.params['n_exc']):
    #        record(self.exc_pop[cell], self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)

        record_exc = True
        if os.path.exists(self.params['gids_to_record_fn']):
            gids_to_record = np.loadtxt(self.params['gids_to_record_fn'], dtype='int')[:self.params['n_gids_to_record']]
            record_exc = True
            n_rnd_cells_to_record = 2
        
        else:
            n_cells_to_record = 5# self.params['n_exc'] * 0.02
            gids_to_record = np.random.randint(0, self.params['n_exc'], n_cells_to_record)
        
        

        if ps.params['anticipatory_mode']:
            record_gids, pops = utils.select_well_tuned_cells(self.tuning_prop_exc, self.params, self.params['n_gids_to_record'], 1)
            np.savetxt(self.params['gids_to_record_fn'], record_gids)
            self.exc_pop_view_anticipation = PopulationView(self.exc_pop, record_gids, label='anticipation')
            self.exc_pop_view_anticipation.record_v()
            self.exc_pop_view_anticipation.record_gsyn()
            self.anticipatory_record = True
              ###################################
              ###################################
              
              
        if record_v:
            self.exc_pop_view = PopulationView(self.exc_pop, gids_to_record, label='good_exc_neurons')
            self.exc_pop_view.record_v()
            self.inh_pop_view = PopulationView(self.inh_pop, np.random.randint(0, self.params['n_inh'], self.params['n_gids_to_record']), label='random_inh_neurons')
            self.inh_pop_view.record_v()


        self.inh_pop.record()
        self.exc_pop.record()
        self.times['t_record'] = self.timer.diff()

        # # # # # # # # # # # # # #
        #     R U N N N I N G     #
        # # # # # # # # # # # # # #
        if self.pc_id == 0:
            print "Running simulation ... "
        run(self.params['t_sim'])
        self.times['t_sim'] = self.timer.diff()

    def print_results(self, print_v=True):
        """
            # # # # # # # # # # # # # # # # #
            #   P R I N T    R E S U L T S  #
            # # # # # # # # # # # # # # # # #
        """
        if print_v:
            if self.pc_id == 0:
                print 'print_v to file: %s.v' % (self.params['exc_volt_fn_base'])
            self.exc_pop_view.print_v("%s.v" % (self.params['exc_volt_fn_base']), compatible_output=False)
            if self.pc_id == 0:
                print "Printing inhibitory membrane potentials"
            self.inh_pop_view.print_v("%s.v" % (self.params['inh_volt_fn_base']), compatible_output=False)

        print 'DEBUG printing anticipatory cells', self.anticipatory_record
        if self.anticipatory_record == True:   
            print 'print_v to file: %s' % (self.params['exc_volt_anticipation'])
            self.exc_pop_view_anticipation.print_v("%s" % (self.params['exc_volt_anticipation']), compatible_output=False)
            print 'print_gsyn to file: %s' % (self.params['exc_gsyn_anticipation'])
            self.exc_pop_view_anticipation.print_gsyn("%s" % (self.params['exc_gsyn_anticipation']), compatible_output=False)


        if self.pc_id == 0:
            print "Printing excitatory spikes"
        self.exc_pop.printSpikes(self.params['exc_spiketimes_fn_merged'] + '.ras')
        if self.pc_id == 0:
            print "Printing inhibitory spikes"
        self.inh_pop.printSpikes(self.params['inh_spiketimes_fn_merged'] + '.ras')

        self.times['t_print'] = self.timer.diff()
        if self.pc_id == 0:
            print "calling pyNN.end() ...."
        end()
        self.times['t_end'] = self.timer.diff()

        if self.pc_id == 0:
            self.times['t_all'] = 0.
            for k in self.times.keys():
                self.times['t_all'] += self.times[k]

            self.n_cells = {}
            self.n_cells['n_exc'] = self.params['n_exc']
            self.n_cells['n_inh'] = self.params['n_inh']
            self.n_cells['n_cells'] = self.params['n_cells']
            self.n_cells['n_proc'] = self.n_proc
            output = {'times' : self.times, 'n_cells_proc' : self.n_cells}
            print "Proc %d Simulation time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_sim'], (self.times['t_sim'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            print "Proc %d Full pyNN run time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_all'], (self.times['t_all'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            fn = utils.convert_to_url(params['folder_name'] + 'times_dict_np%d.py' % self.n_proc)
예제 #18
0
import pacman

import struct
import sys
import numpy
import numpy.random as r
numpy.set_printoptions(precision=4)

from pyNN.utility import Timer

#import pickle
import cPickle as pickle
import sqlite3

timer = Timer()
timer.start()

DEBUG = pacman.pacman_configuration.getboolean('synapse_writer', 'debug')

INFO = True

PARALLEL = pacman.pacman_configuration.getboolean(
    'synapse_writer',
    'parallel')  # run the synapse_writer in parallel processes
num_processes = pacman.pacman_configuration.getint(
    'synapse_writer', 'processes')  # number of processes

DELTA_T = pacman.pacman_configuration.getfloat(
    'synapse_writer', 'parallel_delta_t')  # delay between 2 queries
N_QUERY = 500
예제 #19
0
class NetworkModel(object):

    def __init__(self, params, comm):

        self.params = params
        self.debug_connectivity = True
        self.comm = comm
        if self.comm != None:
            self.pc_id, self.n_proc = self.comm.rank, self.comm.size
            print "USE_MPI: yes", '\tpc_id, n_proc:', self.pc_id, self.n_proc
        else:
            self.pc_id, self.n_proc = 0, 1
            print "MPI not used"


    def import_pynn(self):
        """
        This function needs only be called when this class is used in another script as imported module
        """
        import pyNN
        exec("from pyNN.%s import *" % self.params['simulator'])
        print 'import pyNN\npyNN.version: ', pyNN.__version__



    def setup(self, load_tuning_prop=False):

        if load_tuning_prop:
            print 'Loading tuning properties from', self.params['tuning_prop_means_fn']
            self.tuning_prop_exc = np.loadtxt(self.params['tuning_prop_means_fn'])
        else:
            print 'Preparing tuning properties with limited range....'
            x_range = (0, 1.)
            y_range = (0.2, .5)
            u_range = (.05, 1.0)
            v_range = (-.2, .2)
            tp_exc_good, tp_exc_out_of_range = utils.set_limited_tuning_properties(params, y_range, x_range, u_range, v_range, cell_type='exc')
            self.tuning_prop_exc = tp_exc_good
            print 'n_exc within range: ', tp_exc_good[:, 0].size
            print "Saving tuning_prop to file:", params['tuning_prop_means_fn']
            np.savetxt(params['tuning_prop_means_fn'], tp_exc_good)

        indices, distances = utils.sort_gids_by_distance_to_stimulus(self.tuning_prop_exc, self.params['motion_params'], self.params) # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params['tuning_prop_means_fn']
            np.savetxt(self.params['tuning_prop_means_fn'], self.tuning_prop_exc)
            print 'Saving gids to record to: ', self.params['gids_to_record_fn']
            np.savetxt(self.params['gids_to_record_fn'], indices[:self.params['n_gids_to_record']], fmt='%d')

#        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer
        self.timer = Timer()
        self.timer.start()
        self.times = {}
        # # # # # # # # # # # # 
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params['delay_range']
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params['seed'])
        rng_v = NumpyRNG(seed = sim_cnt*3147 + self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(seed = self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution('normal',
                (self.params['v_init'], self.params['v_init_sigma']),
                rng=rng_v,
                constrain='redraw',
                boundaries=(-80, -60))

        self.times['t_setup'] = self.timer.diff()
        self.times['t_calc_conns'] = 0
        if self.comm != None:
            self.comm.Barrier()

    def create_neurons_with_limited_tuning_properties(self, input_created):
        n_exc = self.tuning_prop_exc[:, 0].size
        n_inh = 0
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
            self.exc_pop = Population(n_exc, EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']


        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)
        self.exc_pop.initialize('v', self.v_init_dist)
        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]

#        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
#        print 'Debug, pc_id %d has local %d inh indices:' % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
#        self.inh_pop.initialize('v', self.v_init_dist)
        self.times['t_create'] = self.timer.diff()


    def create(self):
        """
            # # # # # # # # # # # # 
            #     C R E A T E     #
            # # # # # # # # # # # #
        """
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(self.params['n_exc'], IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_exp, self.params['cell_params_inh'], label="inh_pop")
#        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
        elif self.params['neuron_model'] == 'EIF_cond_alpha_isfa_ista':
            self.exc_pop = Population(self.params['n_exc'], EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], EIF_cond_exp_isfa_ista, self.params['cell_params_inh'], label="inh_pop")
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']
        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)

        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]

        self.exc_pop.initialize('v', self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
        self.inh_pop.initialize('v', self.v_init_dist)

        self.times['t_create'] = self.timer.diff()


    def connect(self):
        self.connect_input_to_exc()
        self.connect_populations('ee')
#        self.connect_populations('ei')
#        self.connect_populations('ie')
#        self.connect_populations('ii')
        self.connect_noise()
        self.times['t_connect'] = self.timer.diff()
        if self.comm != None:
            self.comm.Barrier()

    def create_input(self, load_files=False, save_output=False):


        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params['input_st_fn_base'] + str(tgt) + '.npy'
                    spike_times = np.load(fn)
                except: # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params['input_spikes_seed'])
            dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process 
            time = np.arange(0, self.params['t_sim'], dt)
            blank_idx = np.arange(1./dt * self.params['t_before_blank'], 1. / dt * (self.params['t_before_blank'] + self.params['t_blank']))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))
            # get the input signal
            for i_time, time_ in enumerate(time):
                if (i_time % 500 == 0):
                    print "t:", time_
                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, time_/1000.)
#                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, time_/self.params['t_stimulus'])
                L_input[:, i_time] *= self.params['f_max_stim']
            # blanking 
            for i_time in blank_idx:
                L_input[:, i_time] = 0.

            # create the spike trains
            for i_, unit in enumerate(my_units):
                rate_of_t = np.array(L_input[i_, :]) 
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if (r <= ((rate_of_t[i]/1000.) * dt)): # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt) 
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params['input_rate_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params['input_st_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, np.array(spike_times))
        self.times['create_input'] = self.timer.diff()
        return self.spike_times_container

    def connect_input_to_exc(self):
        """
            # # # # # # # # # # # # # # # # # # # # # # 
            #     C O N N E C T    I N P U T - E X C  #
            # # # # # # # # # # # # # # # # # # # # # # 
        """
        if self.pc_id == 0:
            print "Connecting input spiketrains..."
        for i_, unit in enumerate(self.local_idx_exc):
            spike_times = self.spike_times_container[i_]
            ssa = create(SpikeSourceArray, {'spike_times': spike_times})
            connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory')
        self.times['connect_input'] = self.timer.diff()


    def connect_anisotropic(self, conn_type):
        """
        """
        if self.pc_id == 0:
            print 'Connect anisotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
            conn_file = open(conn_list_fn, 'w')
            output = ''

        n_src_cells_per_neuron = int(round(self.params['p_%s' % conn_type] * n_src))
        (delay_min, delay_max) = self.params['delay_range']
        for tgt in tgt_cells:
            p = np.zeros(n_src)
            latency = np.zeros(n_src)
            for src in xrange(n_src):
                if conn_type[0] == conn_type[1]: # no self-connection
                    if (src != tgt):
                        p[src], latency[src] = CC.get_p_conn(tp_src[src, :], tp_tgt[tgt, :], params['w_sigma_x'], params['w_sigma_v']) #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']
                else: # different populations --> same indices mean different cells, no check for src != tgt
                    p[src], latency[src] = CC.get_p_conn(tp_src[src, :], tp_tgt[tgt, :], params['w_sigma_x'], params['w_sigma_v']) #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']

            sorted_indices = np.argsort(p)
            if conn_type[0] == 'e':
                sources = sorted_indices[-n_src_cells_per_neuron:] 
            else:
                if conn_type == 'ii':
                    sources = sorted_indices[1:n_src_cells_per_neuron+1]  # shift indices to avoid self-connection, because p_ii = .0
                else:
                    sources = sorted_indices[:n_src_cells_per_neuron] 
            w = (self.params['w_tgt_in_per_cell_%s' % conn_type] / p[sources].sum()) * p[sources]
            for i in xrange(len(sources)):
                if w[i] > self.params['w_thresh_connection']:
#                        w[i] = max(self.params['w_min'], min(w[i], self.params['w_max']))
                    delay = min(max(latency[sources[i]] * self.params['t_stimulus'], delay_min), delay_max)  # map the delay into the valid range
    #                print 'debug ', delay , ' latency', latency[sources[i]]
    #                delay = min(max(latency[sources[i]] * self.params['delay_scale'], delay_min), delay_max)  # map the delay into the valid range
                    connect(src_pop[sources[i]], tgt_pop[tgt], w[i], delay=delay, synapse_type=syn_type)
                    if self.debug_connectivity:
                        output += '%d\t%d\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], delay) #                    output += '%d\t%d\t%.2e\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], latency[sources[i]], p[sources[i]])


        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            conn_file.write(output)
            conn_file.close()



    def resolve_src_tgt(self, conn_type):
        """
        Deliver the correct source and target parameters based on conn_type
        """

        if conn_type == 'ee':
            n_src, n_tgt = self.tuning_prop_exc[:, 0].size, self.tuning_prop_exc[:, 0].size
            src_pop, tgt_pop = self.exc_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_exc
            syn_type = 'excitatory'

        elif conn_type == 'ei':
            n_src, n_tgt = self.tuning_prop_exc[:, 0].size, self.tuning_prop_inh[:, 0].size
            src_pop, tgt_pop = self.exc_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_inh
            syn_type = 'excitatory'

        elif conn_type == 'ie':
            n_src, n_tgt = self.tuning_prop_inh[:, 0].size, self.tuning_prop_exc[:, 0].size
            src_pop, tgt_pop = self.inh_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_exc
            syn_type = 'inhibitory'

        elif conn_type == 'ii':
            n_src, n_tgt = self.tuning_prop_inh[:, 0].size, self.tuning_prop_inh[:, 0].size
            src_pop, tgt_pop = self.inh_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_inh
            syn_type = 'inhibitory'

        return (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type)





    def connect_isotropic(self, conn_type='ee'):
        """
        conn_type must be 'ee', 'ei', 'ie' or 'ii'
        Connect cells in a distant dependent manner:
            p_ij = exp(- d_ij / (2 * w_sigma_x**2))

        This will give a 'convergence constrained' connectivity, i.e. each cell will have the same sum of incoming weights 
        ---> could be problematic for outlier cells
        """
        if self.pc_id == 0:
            print 'Connect isotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == 'ee':
            w_= self.params['w_max']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        elif conn_type == 'ei':
            w_= self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        elif conn_type == 'ie':
            w_= self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        elif conn_type == 'ii':
            w_= self.params['w_ii_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
            conn_file = open(conn_list_fn, 'w')
            output = ''

        p_max = utils.get_pmax(self.params['p_%s' % conn_type])
        for tgt in tgt_cells:
            w = np.zeros(n_src, dtype='float32') 
            delays = np.zeros(n_src, dtype='float32')
            for src in xrange(n_src):
                if (src != tgt):
#                    d_ij = np.sqrt((tp_src[src, 0] - tp_tgt[tgt, 0])**2 + (tp_src[src, 1] - tp_tgt[tgt, 1])**2)
                    d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
                    p_ij = p_max * np.exp(-d_ij / (2 * params['w_sigma_x']**2))
                    if np.random.rand() <= p_ij:
                        w[src] = w_
                        delays[src] = d_ij * self.params['delay_scale']
            w *= w_tgt_in / w.sum()
            srcs = w.nonzero()[0]
            for src in srcs:
                if w[src] > self.params['w_thresh_connection']:
                    delay = min(max(delays[src], self.params['delay_range'][0]), self.params['delay_range'][1])  # map the delay into the valid range
                    connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=delay, synapse_type=syn_type)
                    output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], delay) 
    #                connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=params['standard_delay'], synapse_type=syn_type)
    #                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], params['standard_delay']) 
                    
        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            conn_file.write(output)
            conn_file.close()

#   isotropic nearest neighbour code:
#        for tgt in tgt_cells:
#            n_src_to_choose = int(round(p_max * n_src)) # guarantee that all cells have same number of connections
#            dist = np.zeros(n_src, dtype='float32')
#            for src in xrange(n_src):
#                if (src != tgt):
#                    dist[src] = np.sqrt((tp_src[src, 0] - tp_tgt[tgt, 0])**2 + (tp_src[src, 1] - tp_tgt[tgt, 1])**2)
#            src_idx = dist.argsort()[:n_src_to_choose] # choose cells closest to the target
#            for src in src_idx:
#                connect(src_pop[int(src)], tgt_pop[int(tgt)], w_, delay=params['standard_delay'], synapse_type='excitatory')
#                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w_, params['standard_delay']) 



    def connect_random(self, conn_type):
        """
        There exist different possibilities to draw random connections:
        1) Calculate the weights as for the anisotropic case and sample sources randomly
        2) Load a file which stores some random connectivity --> # connector = FromFileConnector(self.params['conn_list_.... ']
        3) Create a random distribution with similar parameters as the non-random connectivition distribution

        connector_ee = FastFixedProbabilityConnector(self.params['p_ee'], weights=w_ee_dist, delays=self.delay_dist)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')

        conn_list_fn = self.params['random_weight_list_fn'] + str(sim_cnt) + '.dat'
        print "Connecting exc - exc from file", conn_list_fn
        connector_ee = FromFileConnector(conn_list_fn)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')
        """
        if self.pc_id == 0:
            print 'Connect random connections %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())
        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        w_mean = self.params['w_tgt_in_per_cell_%s' % conn_type] / (n_src * self.params['p_%s' % conn_type])
        w_sigma = w_mean * .5 * (self.params['w_sigma_x'] + self.params['w_sigma_v'])

        weight_distr = RandomDistribution('normal',
                (w_mean, w_sigma),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(0, w_mean * 10.))

        delay_dist = RandomDistribution('normal',
                (self.params['standard_delay'], self.params['standard_delay_sigma']),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(self.params['delay_range'][0], self.params['delay_range'][1]))

        connector= FastFixedProbabilityConnector(self.params['p_%s' % conn_type], weights=weight_distr, delays=delay_dist)
        prj = Projection(src_pop, tgt_pop, connector, target=syn_type)

        conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
        print 'Saving random %s connections to %s' % (conn_type, conn_list_fn)
        prj.saveConnections(conn_list_fn, gather=False)



    def connect_populations(self, conn_type):
        """
            # # # # # # # # # # # # 
            #     C O N N E C T   #
            # # # # # # # # # # # # 
            Calls the right according to the flag set in simultation_parameters.py
        """
        if self.params['connectivity_%s' % conn_type] == 'anisotropic':
            self.connect_anisotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'isotropic':
            self.connect_isotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'random':
            self.connect_random(conn_type)
        else: # populations do not get connected
            pass
        self.times['t_calc_conns'] += self.timer.diff()


    def connect_noise(self):
        """
            # # # # # # # # # # # # # # # # 
            #     N O I S E   I N P U T   #
            # # # # # # # # # # # # # # # # 
        """
        if self.pc_id == 0:
            print "Connecting noise - exc ... "
        noise_pop_exc = []
        noise_pop_inh = []
        for tgt in self.local_idx_exc:
            #new
            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
            else:
                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
            connect(noise_exc, self.exc_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
            connect(noise_inh, self.exc_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)

#        if self.pc_id == 0:
#            print "Connecting noise - inh ... "
#        for tgt in self.local_idx_inh:
#            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
#                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
#                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
#            else:
#                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
#                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
#            connect(noise_exc, self.inh_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
#            connect(noise_inh, self.inh_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)





    def run_sim(self, sim_cnt, record_v=True):
        # # # # # # # # # # # # # # # # # # # #
        #     P R I N T    W E I G H T S      # 
        # # # # # # # # # # # # # # # # # # # #
        record_exc = True
        if os.path.exists(self.params['gids_to_record_fn']):
            gids_to_record = np.loadtxt(self.params['gids_to_record_fn'], dtype='int')[:self.params['n_gids_to_record']]
            record_exc = True
            n_rnd_cells_to_record = 2
        else:
            n_cells_to_record = 5# self.params['n_exc'] * 0.02
            gids_to_record = np.random.randint(0, self.params['n_exc'], n_cells_to_record)


        if record_v:
            self.exc_pop_view = PopulationView(self.exc_pop, gids_to_record, label='good_exc_neurons')
            self.exc_pop_view.record_v()

        self.exc_pop.record()
        self.times['t_record'] = self.timer.diff()

        if self.pc_id == 0:
            print "Running simulation ... "
        run(self.params['t_sim'])
        self.times['t_sim'] = self.timer.diff()


    def print_results(self, print_v=True):
        """
            # # # # # # # # # # # # # # # # #
            #     P R I N T    R E S U L T S 
            # # # # # # # # # # # # # # # # #
        """
        if print_v:
            if self.pc_id == 0:
                print 'print_v to file: %s.v' % (self.params['exc_volt_fn_base'])
            self.exc_pop_view.print_v("%s.v" % (self.params['exc_volt_fn_base']), compatible_output=False)

        if self.pc_id == 0:
            print "Printing excitatory spikes"
        self.exc_pop.printSpikes(self.params['exc_spiketimes_fn_merged'] + '.ras')
        # print a dummy file for inhibitory
        np.savetxt(self.params['inh_spiketimes_fn_merged'] + '.ras', np.array([]))

        self.times['t_print'] = self.timer.diff()
        if self.pc_id == 0:
            print "calling pyNN.end() ...."
        end()
        self.times['t_end'] = self.timer.diff()

        if self.pc_id == 0:
            self.times['t_all'] = 0.
            for k in self.times.keys():
                self.times['t_all'] += self.times[k]

            self.n_cells = {}
            self.n_cells['n_exc'] = self.params['n_exc']
            self.n_cells['n_inh'] = self.params['n_inh']
            self.n_cells['n_cells'] = self.params['n_cells']
            self.n_cells['n_proc'] = self.n_proc
            output = {'times' : self.times, 'n_cells_proc' : self.n_cells}
            print "Proc %d Simulation time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_sim'], (self.times['t_sim'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            print "Proc %d Full pyNN run time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_all'], (self.times['t_all'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            fn = utils.convert_to_url(params['folder_name'] + 'times_dict_np%d.py' % self.n_proc)
            output = ntp.ParameterSet(output)
            output.save(fn)
예제 #20
0
    def run(self, params, verbose=True):
        """
        params are the parameters to use

        """
        tmpdir = tempfile.mkdtemp()
        myTimer = Timer()
        # === Build the network ========================================================
        if verbose: print "Setting up simulation"
        myTimer.start()  # start timer on construction
        sim.setup(timestep=params['dt'], max_delay=params['syn_delay'])
        N = params['N']
        #dc_generator
        phr_ON = sim.Population((N, ), 'dc_generator')
        phr_OFF = sim.Population((N, ), 'dc_generator')

        for factor, phr in [(-params['snr'], phr_OFF),
                            (params['snr'], phr_ON)]:
            phr.tset('amplitude', params['amplitude'] * factor)
            phr.set({
                'start': params['simtime'] / 4,
                'stop': params['simtime'] / 4 * 3
            })

        # internal noise model (see benchmark_noise)
        noise_ON = sim.Population((N, ), 'noise_generator', {
            'mean': 0.,
            'std': params['noise_std']
        })
        noise_OFF = sim.Population((N, ), 'noise_generator', {
            'mean': 0.,
            'std': params['noise_std']
        })

        # target ON and OFF populations (what about a tridimensional Population?)
        out_ON = sim.Population(
            (N, ), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #iaf_sfa_neuron')# EIF_cond_alpha_isfa_ista, IF_cond_exp_gsfa_grr,sim.IF_cond_alpha)#'iaf_sfa_neuron',params['parameters_gc'])#'iaf_cond_neuron')# IF_cond_alpha) #
        out_OFF = sim.Population(
            (N, ), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #IF_curr_alpha)#'iaf_sfa_neuron')#sim.IF_curr_alpha)#,params['parameters_gc'])

        # initialize membrane potential TODO: and conductances?
        from pyNN.random import RandomDistribution, NumpyRNG
        rng = NumpyRNG(seed=params['kernelseed'])
        vinit_distr = RandomDistribution(distribution='uniform',
                                         parameters=[-70, -55],
                                         rng=rng)
        for out_ in [out_ON, out_OFF]:
            out_.randomInit(vinit_distr)

        retina_proj_ON = sim.Projection(phr_ON, out_ON,
                                        sim.OneToOneConnector())
        retina_proj_ON.setWeights(params['weight'])
        # TODO fix setWeight, add setDelays to 10 ms (relative to stimulus onset)
        retina_proj_OFF = sim.Projection(phr_OFF, out_OFF,
                                         sim.OneToOneConnector())
        retina_proj_OFF.setWeights(params['weight'])

        noise_proj_ON = sim.Projection(noise_ON, out_ON,
                                       sim.OneToOneConnector())
        noise_proj_ON.setWeights(params['weight'])
        noise_proj_OFF = sim.Projection(
            noise_OFF, out_OFF, sim.OneToOneConnector(
            ))  # implication if ON and OFF have the same noise input?
        noise_proj_OFF.setWeights(params['weight'])

        out_ON.record()
        out_OFF.record()

        # reads out time used for building
        buildCPUTime = myTimer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose: print "Running simulation"

        myTimer.reset()  # start timer on construction
        sim.run(params['simtime'])
        simCPUTime = myTimer.elapsedTime()

        myTimer.reset()  # start timer on construction
        # TODO LUP use something like "for pop in [phr, out]" ?
        out_ON_filename = os.path.join(tmpdir, 'out_on.gdf')
        out_OFF_filename = os.path.join(tmpdir, 'out_off.gdf')
        out_ON.printSpikes(out_ON_filename)  #
        out_OFF.printSpikes(out_OFF_filename)  #

        # TODO LUP  get out_ON_DATA on a 2D grid independantly of out_ON.cell.astype(int)
        out_ON_DATA = load_spikelist(out_ON_filename,
                                     range(N),
                                     t_start=0.0,
                                     t_stop=params['simtime'])
        out_OFF_DATA = load_spikelist(out_OFF_filename,
                                      range(N),
                                      t_start=0.0,
                                      t_stop=params['simtime'])

        out = {
            'out_ON_DATA': out_ON_DATA,
            'out_OFF_DATA': out_OFF_DATA
        }  #,'out_ON_pos':out_ON}
        # cleans up
        os.remove(out_ON_filename)
        os.remove(out_OFF_filename)
        os.rmdir(tmpdir)
        writeCPUTime = myTimer.elapsedTime()

        if verbose:
            print "\nRetina Network Simulation:"
            print(params['description'])
            print "Number of Neurons  : ", N
            print "Output rate  (ON) : ", out_ON_DATA.mean_rate(
            ), "Hz/neuron in ", params['simtime'], "ms"
            print "Output rate (OFF)   : ", out_OFF_DATA.mean_rate(
            ), "Hz/neuron in ", params['simtime'], "ms"
            print("Build time             : %g s" % buildCPUTime)
            print("Simulation time        : %g s" % simCPUTime)
            print("Writing time           : %g s" % writeCPUTime)

        return out
예제 #21
0
def test(cases=[1]):    
    
    sp            = Space(periodic_boundaries=((0,1), (0,1), None))
    safe          = False
    verbose       = True
    autapse       = False
    parallel_safe = True    
    render        = True
        
    for case in cases:
        #w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        #w = 0.1
        #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2) 
        
        #delay = RandomDistribution('uniform', (0.1,5.))
        delay = "0.1 + d/0.2"
        #delay = 0.1    
        #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances 
    
        d_expression = "d < 0.1"
        #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"
    
        timer   = Timer()
        np      = num_processes()
        timer.start()    
        if case is 1:
            conn  = DistanceDependentProbabilityConnector(d_expression, delays=delay, weights=w, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "DistanceDependent_%s_np_%d.png" %(simulator_name, np)
        elif case is 2:
            conn  = FixedProbabilityConnector(0.05, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "FixedProbability_%s_np_%d.png" %(simulator_name, np)
        elif case is 3:
            conn  = AllToAllConnector(delays=delay, weights=w, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" %(simulator_name, np)
        elif case is 4:
            conn  = FixedNumberPostConnector(50, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "FixedNumberPost_%s_np_%d.png" %(simulator_name, np)
        elif case is 5:
            conn  = FixedNumberPreConnector(50, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "FixedNumberPre_%s_np_%d.png" %(simulator_name, np)
        elif case is 6:
            conn  = OneToOneConnector(safe=safe, weights=w, delays=delay, verbose=verbose)
            fig_name = "OneToOne_%s_np_%d.png" %(simulator_name, np)
        elif case is 7:
            conn  = FromFileConnector('connections.dat', safe=safe, verbose=verbose)
            fig_name = "FromFile_%s_np_%d.png" %(simulator_name, np)
        elif case is 8:
            conn  = SmallWorldConnector(degree=0.1, rewiring=0., weights=w, delays=delay, safe=safe, verbose=verbose, allow_self_connections=autapse, space=sp)
            fig_name = "SmallWorld_%s_np_%d.png" %(simulator_name, np)
        
        
        print "Generating data for %s" %fig_name
        rng   = NumpyRNG(23434, num_processes=np, parallel_safe=parallel_safe)
        prj   = Projection(x, x, conn, rng=rng)

        simulation_time = timer.elapsedTime()
        print "Building time", simulation_time
        print "Nb synapses built", len(prj)

        if render : 
            if not(os.path.isdir('Results')):
                os.mkdir('Results')

            print "Saving Positions...."
            x.savePositions('Results/positions.dat')

            print "Saving Connections...."
            prj.saveConnections('Results/connections.dat', compatible_output=False)
            
        if node_id == 0 and render:
            figure()
            print "Generating and saving %s" %fig_name
            positions   = numpy.loadtxt('Results/positions.dat')
            connections = numpy.loadtxt('Results/connections.dat')
            positions   = positions[numpy.argsort(positions[:,0])]
            idx_pre     = (connections[:,0] - x.first_id).astype(int)
            idx_post    = (connections[:,1] - x.first_id).astype(int)
            d           = distances(positions[idx_pre,1:3], positions[idx_post,1:3], 1)
            subplot(231)
            title('Cells positions')
            plot(positions[:,1], positions[:,2], '.')
            subplot(232)
            title('Weights distribution')
            hist(connections[:,2], 50)
            subplot(233)
            title('Delay distribution')
            hist(connections[:,3], 50)
            subplot(234)
            ids   = numpy.random.permutation(numpy.unique(positions[:,0]))[0:6]
            colors = ['k', 'r', 'b', 'g', 'c', 'y'] 
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:,2], '.')

            subplot(236)
            plot(d, connections[:,3], '.')
            savefig("Results/" + fig_name)            
            os.remove('Results/connections.dat')
            os.remove('Results/positions.dat')
예제 #22
0
from connector_functions import create_cortical_to_cortical_connection
from connector_functions import normalize_connection_list
from connector_functions import create_cortical_to_cortical_connection_corr
from connector_functions import create_thalamocortical_connection
from analysis_functions import calculate_tuning, visualize_conductances, visualize_conductances_and_voltage
from analysis_functions import conductance_analysis
from plot_functions import plot_spiketrains

#############################

simulator = get_script_args(1)[0]
exec("import pyNN.%s as simulator" % simulator)
#import pyNN.nest as simulator
#import pyNN.neuron as simulator

timer = Timer()

#############################
##  Parameters
#############################

# ============== Network and simulation parameters =================

contrast = 0.50  # Contrast used (possible range available in ./data)

Nside_lgn = 30  # N_lgn x N_lgn is the size of the LGN
Nside_exc = 40  # N_exc x N_exc is the  size of the cortical excitatory layer
Nside_inh = 20  # N_inh x N_inh is the size of the cortical inhibitory layer

factor = 1  # Reduction factor
def runNetwork(Be, 
               Bi, 
               nn_stim, 
               show_gui=True,
               dt = defaultParams.dt, 
               N_rec_v = 5, 
               save=False, 
               simtime = defaultParams.Tpost+defaultParams.Tstim+defaultParams.Tblank+defaultParams.Ttrans, 
               extra = {},
               kernelseed = 123):
    
    exec("from pyNN.%s import *" % simulator_name) in globals()
    
    timer = Timer()

    rec_conn={'EtoE':1, 'EtoI':1, 'ItoE':1, 'ItoI':1}

    print('####################')
    print('### (Be, Bi, nn_stim): ', Be, Bi, nn_stim)
    print('####################')

    Bee, Bei = Be, Be
    Bie, Bii = Bi, Bi

    N = defaultParams.N
    NE = defaultParams.NE
    NI = defaultParams.NI

    print('\n # -----> Num cells: %s, size of pert. inh: %s; base rate %s; pert rate %s'% (N, nn_stim, defaultParams.r_bkg, defaultParams.r_stim))

    r_extra = np.zeros(N)
    r_extra[NE:NE+nn_stim] = defaultParams.r_stim

    rr1 = defaultParams.r_bkg*np.random.uniform(.75,1.25, N)
    rr2 = rr1 + r_extra
    
    rank = setup(timestep=dt, max_delay=defaultParams.delay_default, reference='ISN', save_format='hdf5', **extra)
    
    print("rank =", rank)
    nump = num_processes()
    print("num_processes =", nump)
    import socket
    host_name = socket.gethostname()
    print("Host #%d is on %s" % (rank+1, host_name))

    if 'threads' in extra:
        print("%d Initialising the simulator with %d threads..." % (rank, extra['threads']))
    else:
        print("%d Initialising the simulator with single thread..." % rank)
        
        
    timer.start()  # start timer on construction
    
    print("%d Setting up random number generator using seed %s" % (rank, kernelseed))
    
    ks = open('kernelseed','w')
    ks.write('%i'%kernelseed)
    ks.close()
    
    rng = NumpyRNG(kernelseed, parallel_safe=True)
    
    
    nesp = defaultParams.neuron_params_default
    cell_parameters = {
        'cm':         nesp['C_m']/1000,   # Capacitance of the membrane in nF
        'tau_refrac': nesp['t_ref'],     # Duration of refractory period in ms.
        'v_spike':    0.0 ,     # Spike detection threshold in mV.   https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp
        'v_reset':    nesp['V_reset'],     # Reset value for V_m after a spike. In mV.
        'v_rest':     nesp['E_L'],     # Resting membrane potential (Leak reversal potential) in mV.
        'tau_m':      nesp['C_m']/nesp['g_L'],  # Membrane time constant in ms = cm/tau_m*1000.0, C_m/g_L
        'i_offset':   nesp['I_e']/1000,     # Offset current in nA
        'a':          0,     # Subthreshold adaptation conductance in nS.
        'b':          0,  # Spike-triggered adaptation in nA
        'delta_T':    2 ,     # Slope factor in mV. See https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp
        'tau_w':      144.0,     # Adaptation time constant in ms. See https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp
        'v_thresh':   nesp['V_th'],     # Spike initiation threshold in mV
        'e_rev_E':    nesp['E_ex'],     # Excitatory reversal potential in mV.
        'tau_syn_E':  nesp['tau_syn_ex'],     # Rise time of excitatory synaptic conductance in ms (alpha function).
        'e_rev_I':    nesp['E_in'],     # Inhibitory reversal potential in mV.
        'tau_syn_I':  nesp['tau_syn_in'],     # Rise time of the inhibitory synaptic conductance in ms (alpha function).
    }

    print("%d Creating population with %d neurons." % (rank, N))
    celltype = EIF_cond_alpha_isfa_ista(**cell_parameters)
    celltype.default_initial_values['v'] = cell_parameters['v_rest'] # Setting default init v, useful for NML2 export
    
    layer_volume = Cuboid(1000,100,1000)
    layer_structure = RandomStructure(layer_volume, origin=(0,0,0))
    
    layer_structure_input = RandomStructure(layer_volume, origin=(0,-150,0))
             
    default_cell_radius = 15
    stim_cell_radius = 10
    
    #EI_pop = Population(N, celltype, structure=layer_structure, label="EI")
    E_pop = Population(NE, celltype, structure=layer_structure, label='E_pop')
    E_pop.annotate(color='1 0 0')
    E_pop.annotate(radius=default_cell_radius)
    E_pop.annotate(type='E') # temp indicator to use for connection arrowhead
    #print("%d Creating pop %s." % (rank, E_pop))
    I_pop = Population(NI, celltype, structure=layer_structure, label='I_pop')
    I_pop.annotate(color='0 0 .9')
    I_pop.annotate(radius=default_cell_radius)
    I_pop.annotate(type='I') # temp indicator to use for connection arrowhead
    #print("%d Creating pop %s." % (rank, I_pop))
    
    I_pert_pop = PopulationView(I_pop, np.array(range(0,nn_stim)),label='I_pert_pop')
    I_nonpert_pop = PopulationView(I_pop, np.array(range(nn_stim,NI)),label='I_nonpert_pop')
    
    p_rate = defaultParams.r_bkg
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeA_E = SpikeSourcePoisson(rate=p_rate, start=0,duration=defaultParams.Ttrans+defaultParams.Tblank+defaultParams.Tstim+defaultParams.Tpost)
    expoissonA_E = Population(NE, source_typeA_E, structure=layer_structure_input, label="stim_E")
    
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeA_I = SpikeSourcePoisson(rate=p_rate, start=0,duration=defaultParams.Ttrans+defaultParams.Tblank)
    expoissonA_I = Population(NI, source_typeA_I, structure=layer_structure_input, label="pre_pert_stim_I")
    
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeB = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank,duration=defaultParams.Tstim+defaultParams.Tpost)
    #expoissonB_E = Population(NE, source_typeB, label="non_pert_stim_E")
    expoissonB_I = Population(len(I_nonpert_pop), source_typeB, structure=layer_structure_input, label="non_pert_stim_I")
    
    p_rate = defaultParams.r_bkg+defaultParams.r_stim
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeC = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank, duration=defaultParams.Tstim)
    expoissonC = Population(nn_stim, source_typeC, structure=layer_structure_input, label="pert_stim")

    p_rate = defaultParams.r_bkg
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeD = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank+defaultParams.Tstim, duration=defaultParams.Tpost)
    expoissonD = Population(nn_stim, source_typeD, structure=layer_structure_input, label="pert_poststim")
    
    for p in [expoissonA_E,expoissonA_I,expoissonB_I,expoissonC,expoissonD]:
        p.annotate(color='0.8 0.8 0.8')
        p.annotate(radius=stim_cell_radius)

    progress_bar = ProgressBar(width=20)
    connector_E = FixedProbabilityConnector(0.15, rng=rng, callback=progress_bar)
    connector_I = FixedProbabilityConnector(1, rng=rng, callback=progress_bar)
    
    EE_syn = StaticSynapse(weight=0.001*Bee, delay=defaultParams.delay_default)
    EI_syn = StaticSynapse(weight=0.001*Bei, delay=defaultParams.delay_default)
    II_syn = StaticSynapse(weight=0.001*Bii, delay=defaultParams.delay_default)
    IE_syn = StaticSynapse(weight=0.001*Bie, delay=defaultParams.delay_default)
    
    #I_syn = StaticSynapse(weight=JI, delay=delay)
    ext_Connector = OneToOneConnector(callback=progress_bar)
    ext_syn_bkg = StaticSynapse(weight=0.001*defaultParams.Be_bkg, delay=defaultParams.delay_default)
    ext_syn_stim = StaticSynapse(weight=0.001*defaultParams.Be_stim, delay=defaultParams.delay_default)
    
    
    E_to_E = Projection(E_pop, E_pop, connector_E, EE_syn, receptor_type="excitatory")
    print("E --> E\t\t", len(E_to_E), "connections")
    E_to_I = Projection(E_pop, I_pop, connector_E, EI_syn, receptor_type="excitatory")
    print("E --> I\t\t", len(E_to_I), "connections")
    I_to_I = Projection(I_pop, I_pop, connector_I, II_syn, receptor_type="inhibitory")
    print("I --> I\t\t", len(I_to_I), "connections")
    I_to_E = Projection(I_pop, E_pop, connector_I, IE_syn, receptor_type="inhibitory")
    print("I --> E\t\t", len(I_to_E), "connections")
    
    
    input_A_E = Projection(expoissonA_E, E_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(E_pop), len(input_A_E), "connections")
    input_A_I = Projection(expoissonA_I, I_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(I_pop), len(input_A_I), "connections")
    
    ##input_B_E = Projection(expoissonB_E, E_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    ##print("input --> %s cells post pert\t"%len(E_pop), len(input_B_E), "connections")
    
    input_B_I = Projection(expoissonB_I, I_nonpert_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    print("input --> %s cells post pert\t"%len(I_nonpert_pop), len(input_B_I), "connections")
    
    
    input_C = Projection(expoissonC, I_pert_pop, ext_Connector, ext_syn_stim, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(I_pert_pop), len(input_C), "connections")
    
    input_D = Projection(expoissonD, I_pert_pop, ext_Connector, ext_syn_stim, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(I_pert_pop), len(input_D), "connections")
    
    # Can't be used for connections etc. as NeuroML export not (yet) supported
    EI_pop = Assembly(E_pop, I_pop, label='EI')
    
    # Record spikes
    print("%d Setting up recording in excitatory population." % rank)
    EI_pop.record('spikes')
    if N_rec_v>0:
        EI_pop[0:min(N,N_rec_v)].record('v')
    
    
    # read out time used for building
    buildCPUTime = timer.elapsedTime()
    # === Run simulation ===========================================================

    # run, measure computer time
    timer.start()  # start timer on construction
    print("%d Running simulation in %s for %g ms (dt=%sms)." % (rank, simulator_name, simtime, dt))
    run(simtime)
    print("Done")
    simCPUTime = timer.elapsedTime()
    
    # write data to file
    if save and not simulator_name=='neuroml':
        for pop in [EI_pop]:
            filename="ISN-%s-%s-%i.gdf"%(simulator_name, pop.label, rank)
            ff = open(filename, 'w')
            spikes =  pop.get_data('spikes', gather=False)
            spiketrains = spikes.segments[0].spiketrains
            print('Saving data recorded for %i spiketrains in pop %s, indices: %s, ids: %s to %s'% \
                (len(spiketrains),
                 pop.label, 
                 [s.annotations['source_index'] for s in spiketrains], 
                 [s.annotations['source_id'] for s in spiketrains], 
                 filename))
                 
            for spiketrain_i in range(len(spiketrains)):
                spiketrain = spiketrains[spiketrain_i]
                source_id = spiketrain.annotations['source_id']
                source_index = spiketrain.annotations['source_index']
                #print("Writing spike data for cell %s[%s] (gid: %i): %i spikes: [%s,...,%s] "%(pop.label,source_index, source_id, len(spiketrain),spiketrain[0],spiketrain[-1]))
                for t in spiketrain:
                    ff.write('%s\t%i\n'%(t.magnitude,spiketrain_i))
            ff.close()
                
            vs =  pop.get_data('v', gather=False)
            for segment in vs.segments:
                for i in range(len(segment.analogsignals[0].transpose())):
                    filename="ISN-%s-%s-cell%i.dat"%(simulator_name, pop.label, i)
                    print('Saving cell %i in %s to %s'%(i,pop.label,filename))
                    vm = segment.analogsignals[0].transpose()[i]
                    tt = np.array([t*dt/1000. for t in range(len(vm))])
                    times_vm = np.array([tt, vm/1000.]).transpose()
                    np.savetxt(filename, times_vm , delimiter = '\t', fmt='%s')
            
    spike_data = {}
    spike_data['senders'] = []
    spike_data['times'] = []
    index_offset = 1
    for pop in [EI_pop]:
        if rank == 0:
            spikes =  pop.get_data('spikes', gather=False)
            #print(spikes.segments[0].all_data)
            num_rec = len(spikes.segments[0].spiketrains)
            print("Extracting spike info (%i) for %i cells in %s"%(num_rec,pop.size,pop.label))
            #assert(num_rec==len(spikes.segments[0].spiketrains))
            for i in range(num_rec):
                ss = spikes.segments[0].spiketrains[i]
                for s in ss:
                    index = i+index_offset
                    #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index))
                    spike_data['senders'].append(index)
                    spike_data['times'].append(s)
            index_offset+=pop.size


    print("Build time         : %g s" % buildCPUTime)
    print("Simulation time    : %g s" % simCPUTime)

    # === Clean up and quit ========================================================

    end()
예제 #24
0
            os.makedirs(opts.data_folder+str(run))
        shutil.copy('./'+opts.param_file, opts.data_folder+ str(run)+'/'+opts.param_file+'_'+str(comb)+'.py')

        if not opts.analysis:
            already_computed = 0
            for pop in params['Populations'].keys():
                if os.path.exists(opts.data_folder + str(run) +'/'+pop+str(comb)+'.pkl'):
                    already_computed = already_computed + 1
            if already_computed > 0:
                print "already computed"
            else:
                Populations = h.build_network(sim,params)
                h.record_data(params, Populations)
                h.perform_injections(params, Populations)
                print "Running Network"
                timer = Timer()
                timer.reset()
                interval = 10
                sim.run(params['run_time'], callbacks = SetInput(Populations, interval, params['dt']))
                simCPUtime = timer.elapsedTime()
                print "Simulation Time: %s" % str(simCPUtime)
                h.save_data(Populations, opts.data_folder + str(run), str(comb))
                sim.end()
        else :
            if search:
                already_computed = 0
                for pop in params['Populations'].keys():
                    if os.path.exists(opts.data_folder + str(run) +'/'+pop+str(comb)+'.png'):
                        already_computed = already_computed + 1
                if already_computed > len(params['Populations']) - 1:
                    print "already analysed"
예제 #25
0
class Mode(enum.Enum):
    train_asymmetrical = 1
    train_symmetrical = 2
    test_asymmetrical = 3
    test_symmetrical = 4


mode = Mode.train_asymmetrical

hcu_grid_size = 2
num_hcu = hcu_grid_size**2
num_mcu_neurons = 100

record_membrane = False

timer = Timer()
timer.start()

spinnaker_kwargs = {
    "spinnaker_hostname": "192.168.1.1",
    "stop_on_spinnaker": True
}

tau_p = 2000

folder = "sequence_%u_%u" % (num_hcu, tau_p)
if not os.path.exists(folder):
    os.makedirs(folder)

# Bind parameters to euclidean HCU delay model
delay_model = functools.partial(network.euclidean_hcu_delay,
    The IF network is based on the CUBA and COBA models of Vogels & Abbott
    (J. Neurosci, 2005).  The model consists of a network of excitatory and
    inhibitory neurons, connected via current-based "exponential"
    synapses (instantaneous rise, exponential decay).

    Andrew Davison, UNIC, CNRS
    August 2006

Author: Bernhard Kaplan, [email protected]
"""
import time
t0 = time.time()

# to store timing information
from pyNN.utility import Timer
timer = Timer()
timer.start()
times = {} 
times['t_startup'] = time.time() - t0

# check imports
import numpy as np
import os
import socket
from math import *
import json
from pyNN.utility import get_script_args
simulator_name = 'nest'
from pyNN.nest import *
#exec("from pyNN.%s import *" % simulator_name)
try:
예제 #27
0
def test(cases=[1]):

    sp = Space(periodic_boundaries=((0, 1), (0, 1), None), axes="xy")
    safe = False
    callback = progress_bar.set_level
    autapse = False
    parallel_safe = True
    render = True
    to_file = True

    for case in cases:
        # w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        # w = 0.1
        # w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2)

        # delay = RandomDistribution('uniform', (0.1,5.))
        # delay = "0.1 + d/0.2"
        delay = 0.1
        # delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances

        d_expression = "exp(-d**2/(2*0.1**2))"
        # d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        # d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"

        timer = Timer()
        np = num_processes()
        timer.start()

        synapse = StaticSynapse(weight=w, delay=delay)
        rng = NumpyRNG(23434, parallel_safe=parallel_safe)

        if case is 1:
            conn = DistanceDependentProbabilityConnector(
                d_expression, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng
            )
            fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np)
        elif case is 2:
            conn = FixedProbabilityConnector(
                0.02, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng
            )
            fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np)
        elif case is 3:
            conn = AllToAllConnector(delays=delay, safe=safe, callback=callback, allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np)
        elif case is 4:
            conn = FixedNumberPostConnector(50, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng)
            fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np)
        elif case is 5:
            conn = FixedNumberPreConnector(50, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng)
            fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np)
        elif case is 6:
            conn = OneToOneConnector(safe=safe, callback=callback)
            fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np)
        elif case is 7:
            conn = FromFileConnector(
                files.NumpyBinaryFile("Results/connections.dat", mode="r"),
                safe=safe,
                callback=callback,
                distributed=True,
            )
            fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np)
        elif case is 8:
            conn = SmallWorldConnector(
                degree=0.1, rewiring=0.0, safe=safe, callback=callback, allow_self_connections=autapse
            )
            fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np)

        print "Generating data for %s" % fig_name

        prj = Projection(x, x, conn, synapse, space=sp)

        mytime = timer.diff()
        print "Time to connect the cell population:", mytime, "s"
        print "Nb synapses built", prj.size()

        if to_file:
            if not (os.path.isdir("Results")):
                os.mkdir("Results")
            print "Saving Connections...."
            prj.save("all", files.NumpyBinaryFile("Results/connections.dat", mode="w"), gather=True)

        mytime = timer.diff()
        print "Time to save the projection:", mytime, "s"

        if render and to_file:
            print "Saving Positions...."
            x.save_positions("Results/positions.dat")
        end()

        if node_id == 0 and render and to_file:
            figure()
            print "Generating and saving %s" % fig_name
            positions = numpy.loadtxt("Results/positions.dat")

            positions[:, 0] -= positions[:, 0].min()
            connections = files.NumpyBinaryFile("Results/connections.dat", mode="r").read()
            print positions.shape, connections.shape
            connections[:, 0] -= connections[:, 0].min()
            connections[:, 1] -= connections[:, 1].min()
            idx_pre = connections[:, 0].astype(int)
            idx_post = connections[:, 1].astype(int)
            d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1)
            subplot(231)
            title("Cells positions")
            plot(positions[:, 1], positions[:, 2], ".")
            subplot(232)
            title("Weights distribution")
            hist(connections[:, 2], 50)
            subplot(233)
            title("Delay distribution")
            hist(connections[:, 3], 50)
            subplot(234)
            numpy.random.seed(74562)
            ids = numpy.random.permutation(positions[:, 0])[0:6]
            colors = ["k", "r", "b", "g", "c", "y"]
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:, 2], ".")

            subplot(236)
            plot(d, connections[:, 3], ".")
            savefig("Results/" + fig_name)
            # os.remove('Results/connections.dat')
            # os.remove('Results/positions.dat')
            show()
예제 #28
0
def run_retina(params):
    """Run the retina using the specified parameters."""

    print "Setting up simulation"
    timer = Timer()
    timer.start()  # start timer on construction
    pyNN.setup(timestep=params['dt'],
               max_delay=params['syn_delay'],
               threads=params['threads'],
               rng_seeds=params['kernelseeds'])

    N = params['N']
    phr_ON = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    phr_OFF = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    noise_ON = pyNN.Population(
        (N, N),
        pyNN.native_cell_type('noise_generator')(mean=0.0,
                                                 std=params['noise_std']))
    noise_OFF = pyNN.Population(
        (N, N),
        pyNN.native_cell_type('noise_generator')(mean=0.0,
                                                 std=params['noise_std']))

    phr_ON.set(start=params['simtime'] / 4,
               stop=params['simtime'] / 4 * 3,
               amplitude=params['amplitude'] * params['snr'])
    phr_OFF.set(start=params['simtime'] / 4,
                stop=params['simtime'] / 4 * 3,
                amplitude=-params['amplitude'] * params['snr'])

    # target ON and OFF populations
    v_init = params['parameters_gc'].pop('Vinit')
    out_ON = pyNN.Population((N, N),
                             pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(
                                 **params['parameters_gc']))
    out_OFF = pyNN.Population((N, N),
                              pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(
                                  **params['parameters_gc']))
    out_ON.initialize(v=v_init)
    out_OFF.initialize(v=v_init)

    #print "Connecting the network"

    retina_proj_ON = pyNN.Projection(phr_ON, out_ON, pyNN.OneToOneConnector())
    retina_proj_ON.set(weight=params['weight'])
    retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF,
                                      pyNN.OneToOneConnector())
    retina_proj_OFF.set(weight=params['weight'])

    noise_proj_ON = pyNN.Projection(noise_ON, out_ON, pyNN.OneToOneConnector())
    noise_proj_ON.set(weight=params['weight'])
    noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF,
                                     pyNN.OneToOneConnector())
    noise_proj_OFF.set(weight=params['weight'])

    out_ON.record('spikes')
    out_OFF.record('spikes')

    # reads out time used for building
    buildCPUTime = timer.elapsedTime()

    print "Running simulation"

    timer.start()  # start timer on construction
    pyNN.run(params['simtime'])
    simCPUTime = timer.elapsedTime()

    out_ON_DATA = out_ON.get_data().segments[0]
    out_OFF_DATA = out_OFF.get_data().segments[0]

    print "\nRetina Network Simulation:"
    print(params['description'])
    print "Number of Neurons : ", N**2
    print "Output rate  (ON) : ", out_ON.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Output rate (OFF) : ", out_OFF.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Build time        : ", buildCPUTime, "s"
    print "Simulation time   : ", simCPUTime, "s"

    return out_ON_DATA, out_OFF_DATA
예제 #29
0
                         weights=g_e,
                         delays=None),
                     target='excitatory',
                     rng=rng)

# Recording
#pyB.record_v(10)
#inhB.record_v(10)
py.record()
inh.record()
pyB.record()
inhB.record()
#

print "Running Network"
timer = Timer()
timer.reset()
run(run_time)
simCPUtime = timer.elapsedTime()

print "Simulation Time: %s" % str(simCPUtime)

#os.chdir('Insert Data Directory Here')
#pyB.print_v('pyB_v.dat')
#inhB.print_v('inhB_v.dat')
py.printSpikes('py.dat')
inh.printSpikes('inh.dat')
pyB.printSpikes('pyB.dat')
inhB.printSpikes('inhB.dat')

#py_py.saveConnections('py_py.conn')
#############################################


from pyNN.random import RandomDistribution, NumpyRNG
from pyNN.utility import get_script_args, Timer, ProgressBar, init_logging, normalized_filename
import matplotlib.pyplot as plt
from auxRoutines import *

simulator_name = get_script_args(1)[0]  

exec("from pyNN.%s import *" % simulator_name)

print("\n")
print("Starting PyNN with simulator: %s" % simulator_name)

timer = Timer()

# Total of 8000 excitatory neurons and 2000 inhibitory neurons. Note that all these specified number of neurons in a population are disjoint.
# For instance pattern1 population has a total of 720 neurons, 180 of which comprises pattern1_stim.

numOfNeuronsExcPopulation = 5712 	# Excitatory neurons not including pattern1, pattern2 and patternIntersection populations
numOfNeuronsPattern1 = 524		# Neurons of pattern1 not including those of patternIntersection and from neuronsPattern1_stim
numOfNeuronsPattern1_stim = 196		# Sub-set of pattern1 which may receive external stimulation
numOfNeuronsPattern2 = 524		# Neurons of pattern2 not including those of patternIntersection
numOfNeuronsPattern2_stim = 196		# Sub-set of pattern2 which may receive external stimulation
numOfNeuronsPatternIntersection = 64
numOfNeuronsControl = 784

numOfNeuronsInhibPopulation = 2000

   stdp_model = STDPMechanism(
                timing_dependence = SpikePairRule(tau_plus = 30.0, tau_minus = 30.0),
                weight_dependence = AdditiveWeightDependence(w_min = 0, w_max = 20, A_plus=0.005, A_minus = 0.006) # _A_plus=0.5, _A_minus=0.6
                )

runtime = 100
#runtime = 500
#runtime = 120000
#runtime =  1000000
metric_window = 100
metric_start_offset = 0
metric_t_start = 0
metric_t_stop = runtime


timer = Timer()
timer.start()


# cell_params will be passed to the constructor of the Population Object

cell_params = {
    'tau_m'      : tau_m,    'cm'         : cm,    
    'v_rest'     : v_rest,   'v_reset'    : v_reset,  'v_thresh'   : v_thresh,
    'tau_syn_E'       : tau_syn_exc,        'tau_syn_I'       : tau_syn_inh, 'tau_refrac'       : t_refrac, 'i_offset' : i_offset
    }




print "%g - Creating input population: %d x %d" % (timer.elapsedTime(), scale.input_size[0], scale.input_size[1])
예제 #32
0
def callback(data_input):

    #====================================================================
    # Unpacking the Joint Angle Message
    #====================================================================
    global message
    message = data_input.degree
    rospy.loginfo('=====> received joint angle in degree %r', message)
    print message

    if type(message) != int:
    	input_rates = list(message)
	n_input_neurons = len(input_rates)  
    else:
	input_rates = message
	n_input_neurons = 1
	

    #msg_list= [int(msg.encode('hex'),16) for msg in message]
    

    timer = Timer()
    dt = 0.1
    p.setup(timestep=dt) # 0.1ms


    #====================================================================
    # Defining the LSM
    #====================================================================

    n_res=2000
    w_exc_b=0.2
    w_inh_b=-0.8
    rout_w_exc=20
    rout_w_inh=-80

    n_readout_neurons   = 2
    n_reservoir_neurons = n_res
    n_res = n_reservoir_neurons
    exc_rate            = 0.8 # percentage of excitatory neurons in reservoir

    n_exc = int(round(n_reservoir_neurons*exc_rate))
    n_inh = n_reservoir_neurons-n_exc
    izh_celltype = p.native_cell_type('izhikevich')
    if_celltype = p.IF_curr_exp
    celltype = if_celltype
    
    spike_source = p.native_cell_type('poisson_generator')
    inp_pop=p.Population(n_input_neurons*10,spike_source,{'rate':input_rates})
    
    exc_cells = p.Population(n_exc, celltype, label="Excitatory_Cells")
    inh_cells = p.Population(n_inh, celltype, label="Inhibitory_Cells")

    # initialize with a uniform random distributin
    # use seeding for reproducability
    rngseed = 98766987
    parallel_safe = True
    rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)

    unifDistr = RandomDistribution('uniform', (-70,-65), rng=rng)
    inh_cells.initialize('V_m',unifDistr)
    exc_cells.initialize('V_m',unifDistr)
    
    readout_neurons = p.Population(2, celltype, label="readout_neuron")
    
    inp_weight=3.
    inp_delay =1

    inp_weight_distr = RandomDistribution('normal', [inp_weight, 1e-3], rng=rng)

    # connect each input neuron to 30% of the reservoir neurons
    inp_conn = p.FixedProbabilityConnector(p_connect=0.3,weights =inp_weight_distr, delays=inp_delay)

    connections = {}
    connections['inp2e'] = p.Projection(inp_pop, exc_cells, inp_conn)
    connections['inp2i'] = p.Projection(inp_pop, inh_cells, inp_conn)

    pconn = 0.01      # sparse connection probability

    # scale the weights w.r.t. the network to keep it stable
    w_exc = w_exc_b/np.sqrt(n_res)      # nA
    w_inh = w_inh_b/np.sqrt(n_res)      # nA
    
    delay_exc = 1      # defines how long (ms) the synapse takes for transmission
    delay_inh = 1

    weight_distr_exc = RandomDistribution('normal', [w_exc, 1/n_res], rng=rng)
    weight_distr_inh = RandomDistribution('normal', [w_inh, 1/n_res], rng=rng)
    exc_conn = p.FixedProbabilityConnector(pconn, weights=weight_distr_exc, delays=delay_exc)
    inh_conn = p.FixedProbabilityConnector(pconn, weights=weight_distr_inh, delays=delay_inh)

    connections['e2e'] = p.Projection(exc_cells, exc_cells, exc_conn, target='excitatory')
    connections['e2i'] = p.Projection(exc_cells, inh_cells, exc_conn, target='excitatory')
    connections['i2e'] = p.Projection(inh_cells, exc_cells, inh_conn, target='inhibitory')
    connections['i2i'] = p.Projection(inh_cells, inh_cells, inh_conn, target='inhibitory')
    
    
    
    rout_conn_exc = p.AllToAllConnector(weights=rout_w_exc, delays=delay_exc)
    rout_conn_inh = p.AllToAllConnector(weights=rout_w_inh, delays=delay_exc)

    
    

    connections['e2rout'] = p.Projection(exc_cells, readout_neurons, rout_conn_exc, target='excitatory')
    connections['i2rout'] = p.Projection(inh_cells, readout_neurons, rout_conn_inh, target='inhibitory')
    
    readout_neurons.record()
    exc_cells.record()
    inh_cells.record()
    inp_pop.record()
    
    
    p.run(20)

    r_spikes = readout_neurons.getSpikes()
    exc_spikes = exc_cells.getSpikes()
    inh_spikes = inh_cells.getSpikes()
    inp_spikes = inp_pop.getSpikes()

    rospy.loginfo('=====> shape of r_spikes %r', np.shape(r_spikes))

    #====================================================================
    # Compute Readout Spike Rates
    #====================================================================
    
  
    alpha_rates = alpha_decoding(r_spikes,dt)
    mean_rates  = mean_decoding(r_spikes,dt)

    #====================================================================
    # Publish Readout Rates
    #====================================================================

    # TODO: error handling if r_spikes is empty
    pub = rospy.Publisher('/alpha_readout_rates', Pop_List, queue_size=10)
    alpha_readout_rates = Pop_List
    alpha_readout_rates = alpha_rates
    pub.publish(alpha_readout_rates)

    pub = rospy.Publisher('/mean_readout_rates', Pop_List, queue_size=10)
    mean_readout_rates = Pop_List
    mean_readout_rates = mean_rates
    pub.publish(mean_readout_rates)
예제 #33
0
"""

from pyNN.utility import get_script_args, Timer
import numpy as numpi
import matplotlib.pyplot as plt
from detect_oscillations_15_50 import replay, ripple, gamma
from pyNN.random import NumpyRNG, RandomDistribution

simulator_name = "neuron"
exec("from pyNN.%s import *" % simulator_name)

# seed for random generator(s) used during simulation
kernelseed  = 43210987      

timer = Timer()

run_name = "ca3net8_gamma62_lif"

dt      = 0.1    # the resolution in ms
simtime = 10000.0 # Simulation time in ms (ideally 10s)
maxdelay   = 4.0    # synaptic delay in ms

epsilon_Pyr = 0.1  # connection probability
epsilon_Bas = 0.25
epsilon_Sli = 0.1

N_Pyr_all = 3600  # number of cells
N_Bas = 200
N_Sli = 200
N_neurons = N_Pyr_all + N_Bas + N_Sli
예제 #34
0
def main_pyNN(parameters):
    timer = Timer()
    sim = import_module(parameters.simulator)
    timer.mark("import")

    sim.setup(threads=parameters.threads)
    timer.mark("setup")

    populations = {}
    for name, P in parameters.populations.parameters():
        populations[name] = sim.Population(P.n,
                                           getattr(sim,
                                                   P.celltype)(**P.params),
                                           label=name)
    timer.mark("build")

    if parameters.projections:
        projections = {}
        for name, P in parameters.projections.parameters():
            connector = getattr(sim, P.connector.type)(**P.connector.params)
            synapse_type = getattr(
                sim, P.synapse_type.type)(**P.synapse_type.params)
            projections[name] = sim.Projection(populations[P.pre],
                                               populations[P.post],
                                               connector,
                                               synapse_type,
                                               receptor_type=P.receptor_type,
                                               label=name)
        timer.mark("connect")

    if parameters.recording:
        for pop_name, to_record in parameters.recording.parameters():
            for var_name, n_record in to_record.items():
                populations[pop_name].sample(n_record).record(var_name)
        timer.mark("record")

    sim.run(parameters.sim_time)
    timer.mark("run")

    spike_counts = {}
    if parameters.recording:
        for pop_name in parameters.recording.names():
            block = populations[pop_name].get_data(
            )  # perhaps include some summary statistics in the data returned?
            spike_counts["spikes_%s" %
                         pop_name] = populations[pop_name].mean_spike_count()
        timer.mark("get_data")

    mpi_rank = sim.rank()
    num_processes = sim.num_processes()
    sim.end()

    data = dict(timer.marks)
    data.update(num_processes=num_processes)
    data.update(spike_counts)
    return mpi_rank, data
예제 #35
0
def runBrunelNetwork(g=5.,
                     eta=2.,
                     dt=0.1,
                     simtime=1000.0,
                     delay=1.5,
                     epsilon=0.1,
                     order=2500,
                     N_rec=50,
                     N_rec_v=2,
                     save=False,
                     simulator_name='nest',
                     jnml_simulator=None,
                     extra={}):

    exec("from pyNN.%s import *" % simulator_name) in globals()

    timer = Timer()

    # === Define parameters ========================================================

    downscale = 1  # scale number of neurons down by this factor
    # scale synaptic weights up by this factor to
    # obtain similar dynamics independent of size
    order = order  # determines size of network:
    # 4*order excitatory neurons
    # 1*order inhibitory neurons
    Nrec = N_rec  # number of neurons to record from, per population
    epsilon = epsilon  # connectivity: proportion of neurons each neuron projects to

    # Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1
    # here: Case C, asynchronous irregular firing, ~35 Hz
    eta = eta  # rel rate of external input
    g = g  # rel strength of inhibitory synapses
    J = 0.1  # synaptic weight [mV]
    delay = delay  # synaptic delay, all connections [ms]

    # single neuron parameters
    tauMem = 20.0  # neuron membrane time constant [ms]
    tauSyn = 0.1  # synaptic time constant [ms]
    tauRef = 2.0  # refractory time [ms]
    U0 = 0.0  # resting potential [mV]
    theta = 20.0  # threshold

    # simulation-related parameters
    simtime = simtime  # simulation time [ms]
    dt = dt  # simulation step length [ms]

    # seed for random generator used when building connections
    connectseed = 12345789
    use_RandomArray = True  # use Python rng rather than NEST rng

    # seed for random generator(s) used during simulation
    kernelseed = 43210987

    # === Calculate derived parameters =============================================

    # scaling: compute effective order and synaptic strength
    order_eff = int(float(order) / downscale)
    J_eff = J * downscale

    # compute neuron numbers
    NE = int(4 * order_eff)  # number of excitatory neurons
    NI = int(1 * order_eff)  # number of inhibitory neurons
    N = NI + NE  # total number of neurons

    # compute synapse numbers
    CE = int(epsilon * NE)  # number of excitatory synapses on neuron
    CI = int(epsilon * NI)  # number of inhibitory synapses on neuron
    C = CE + CI  # total number of internal synapses per n.
    Cext = CE  # number of external synapses on neuron

    # synaptic weights, scaled for alpha functions, such that
    # for constant membrane potential, charge J would be deposited
    fudge = 0.00041363506632638  # ensures dV = J at V=0

    # excitatory weight: JE = J_eff / tauSyn * fudge
    JE = (J_eff / tauSyn) * fudge

    # inhibitory weight: JI = - g * JE
    JI = -g * JE

    # threshold, external, and Poisson generator rates:
    nu_thresh = theta / (J_eff * CE * tauMem)
    nu_ext = eta * nu_thresh  # external rate per synapse
    p_rate = 1000 * nu_ext * Cext  # external input rate per neuron (Hz)

    # number of synapses---just so we know
    Nsyn = (
        C + 1
    ) * N + 2 * Nrec  # number of neurons * (internal synapses + 1 synapse from PoissonGenerator) + 2synapses" to spike detectors

    # put cell parameters into a dict
    cell_params = {
        'tau_m': tauMem,
        'tau_syn_E': tauSyn,
        'tau_syn_I': tauSyn,
        'tau_refrac': tauRef,
        'v_rest': U0,
        'v_reset': U0,
        'v_thresh': theta,
        'cm': 0.001
    }  # (nF)

    # === Build the network ========================================================

    # clear all existing network elements and set resolution and limits on delays.
    # For NEST, limits must be set BEFORE connecting any elements

    #extra = {'threads' : 2}

    rank = setup(timestep=dt, max_delay=delay, **extra)
    print("rank =", rank)
    np = num_processes()
    print("np =", np)
    import socket
    host_name = socket.gethostname()
    print("Host #%d is on %s" % (rank + 1, host_name))

    if 'threads' in extra:
        print("%d Initialising the simulator with %d threads..." %
              (rank, extra['threads']))
    else:
        print("%d Initialising the simulator with single thread..." % rank)

    # Small function to display information only on node 1
    def nprint(s):
        if rank == 0:
            print(s)

    timer.start()  # start timer on construction

    print("%d Setting up random number generator" % rank)
    rng = NumpyRNG(kernelseed, parallel_safe=True)

    print("%d Creating excitatory population with %d neurons." % (rank, NE))
    celltype = IF_curr_alpha(**cell_params)
    celltype.default_initial_values[
        'v'] = U0  # Setting default init v, useful for NML2 export
    E_net = Population(NE, celltype, label="E_net")

    print("%d Creating inhibitory population with %d neurons." % (rank, NI))
    I_net = Population(NI, celltype, label="I_net")

    print(
        "%d Initialising membrane potential to random values between %g mV and %g mV."
        % (rank, U0, theta))
    uniformDistr = RandomDistribution('uniform', low=U0, high=theta, rng=rng)
    E_net.initialize(v=uniformDistr)
    I_net.initialize(v=uniformDistr)

    print("%d Creating excitatory Poisson generator with rate %g spikes/s." %
          (rank, p_rate))
    source_type = SpikeSourcePoisson(rate=p_rate)
    expoisson = Population(NE, source_type, label="expoisson")

    print("%d Creating inhibitory Poisson generator with the same rate." %
          rank)
    inpoisson = Population(NI, source_type, label="inpoisson")

    # Record spikes
    print("%d Setting up recording in excitatory population." % rank)
    E_net.record('spikes')
    if N_rec_v > 0:
        E_net[0:min(NE, N_rec_v)].record('v')

    print("%d Setting up recording in inhibitory population." % rank)
    I_net.record('spikes')
    if N_rec_v > 0:
        I_net[0:min(NI, N_rec_v)].record('v')

    progress_bar = ProgressBar(width=20)
    connector = FixedProbabilityConnector(epsilon,
                                          rng=rng,
                                          callback=progress_bar)
    E_syn = StaticSynapse(weight=JE, delay=delay)
    I_syn = StaticSynapse(weight=JI, delay=delay)
    ext_Connector = OneToOneConnector(callback=progress_bar)
    ext_syn = StaticSynapse(weight=JE, delay=dt)

    print(
        "%d Connecting excitatory population with connection probability %g, weight %g nA and delay %g ms."
        % (rank, epsilon, JE, delay))
    E_to_E = Projection(E_net,
                        E_net,
                        connector,
                        E_syn,
                        receptor_type="excitatory")
    print("E --> E\t\t", len(E_to_E), "connections")
    I_to_E = Projection(I_net,
                        E_net,
                        connector,
                        I_syn,
                        receptor_type="inhibitory")
    print("I --> E\t\t", len(I_to_E), "connections")
    input_to_E = Projection(expoisson,
                            E_net,
                            ext_Connector,
                            ext_syn,
                            receptor_type="excitatory")
    print("input --> E\t", len(input_to_E), "connections")

    print(
        "%d Connecting inhibitory population with connection probability %g, weight %g nA and delay %g ms."
        % (rank, epsilon, JI, delay))
    E_to_I = Projection(E_net,
                        I_net,
                        connector,
                        E_syn,
                        receptor_type="excitatory")
    print("E --> I\t\t", len(E_to_I), "connections")
    I_to_I = Projection(I_net,
                        I_net,
                        connector,
                        I_syn,
                        receptor_type="inhibitory")
    print("I --> I\t\t", len(I_to_I), "connections")
    input_to_I = Projection(inpoisson,
                            I_net,
                            ext_Connector,
                            ext_syn,
                            receptor_type="excitatory")
    print("input --> I\t", len(input_to_I), "connections")

    # read out time used for building
    buildCPUTime = timer.elapsedTime()
    # === Run simulation ===========================================================

    # run, measure computer time
    timer.start()  # start timer on construction
    print("%d Running simulation for %g ms (dt=%sms)." % (rank, simtime, dt))
    run(simtime)
    print("Done")
    simCPUTime = timer.elapsedTime()

    # write data to file
    #print("%d Writing data to file." % rank)
    #(E_net + I_net).write_data("Results/brunel_np%d_%s.pkl" % (np, simulator_name))
    if save and not simulator_name == 'neuroml':
        for pop in [E_net, I_net]:
            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.gdf" %
                            (simulator_name, pop.label, rank))
            spikes = pop.get_data('spikes', gather=False)
            for segment in spikes.segments:
                io.write_segment(segment)

            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.dat" %
                            (simulator_name, pop.label, rank))
            vs = pop.get_data('v', gather=False)
            for segment in vs.segments:
                io.write_segment(segment)

    spike_data = {}
    spike_data['senders'] = []
    spike_data['times'] = []
    index_offset = 1
    for pop in [E_net, I_net]:
        if rank == 0:
            spikes = pop.get_data('spikes', gather=False)
            #print(spikes.segments[0].all_data)
            num_rec = len(spikes.segments[0].spiketrains)
            print("Extracting spike info (%i) for %i cells in %s" %
                  (num_rec, pop.size, pop.label))
            #assert(num_rec==len(spikes.segments[0].spiketrains))
            for i in range(num_rec):
                ss = spikes.segments[0].spiketrains[i]
                for s in ss:
                    index = i + index_offset
                    #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index))
                    spike_data['senders'].append(index)
                    spike_data['times'].append(s)
            index_offset += pop.size

    #from IPython.core.debugger import Tracer
    #Tracer()()

    E_rate = E_net.mean_spike_count() * 1000.0 / simtime
    I_rate = I_net.mean_spike_count() * 1000.0 / simtime

    # write a short report
    nprint("\n--- Brunel Network Simulation ---")
    nprint("Nodes              : %d" % np)
    nprint("Number of Neurons  : %d" % N)
    nprint("Number of Synapses : %d" % Nsyn)
    nprint("Input firing rate  : %g" % p_rate)
    nprint("Excitatory weight  : %g" % JE)
    nprint("Inhibitory weight  : %g" % JI)
    nprint("Excitatory rate    : %g Hz" % E_rate)
    nprint("Inhibitory rate    : %g Hz" % I_rate)
    nprint("Build time         : %g s" % buildCPUTime)
    nprint("Simulation time    : %g s" % simCPUTime)

    # === Clean up and quit ========================================================

    end()

    if simulator_name == 'neuroml' and jnml_simulator:
        from pyneuroml import pynml
        lems_file = 'LEMS_Sim_PyNN_NeuroML2_Export.xml'

        print('Going to run generated LEMS file: %s on simulator: %s' %
              (lems_file, jnml_simulator))

        if jnml_simulator == 'jNeuroML':
            results, events = pynml.run_lems_with_jneuroml(
                lems_file,
                nogui=True,
                load_saved_data=True,
                reload_events=True)

        elif jnml_simulator == 'jNeuroML_NEURON':
            results, events = pynml.run_lems_with_jneuroml_neuron(
                lems_file,
                nogui=True,
                load_saved_data=True,
                reload_events=True)

        spike_data['senders'] = []
        spike_data['times'] = []
        for k in events.keys():
            values = k.split('/')
            index = int(
                values[1]) if values[0] == 'E_net' else NE + int(values[1])
            n = len(events[k])
            print(
                "Loading spikes for %s (index %i): [%s, ..., %s (n=%s)] sec" %
                (k, index, events[k][0] if n > 0 else '-',
                 events[k][-1] if n > 0 else '-', n))
            for t in events[k]:
                spike_data['senders'].append(index)
                spike_data['times'].append(t * 1000)

    #print spike_data
    return spike_data
class NetworkModel(object):
    def __init__(self, params, comm):

        self.params = params
        self.debug_connectivity = True
        self.comm = comm
        if self.comm != None:
            self.pc_id, self.n_proc = self.comm.rank, self.comm.size
            print "USE_MPI: yes", "\tpc_id, n_proc:", self.pc_id, self.n_proc
        else:
            self.pc_id, self.n_proc = 0, 1
            print "MPI not used"

        np.random.seed(params["np_random_seed"] + self.pc_id)

        if self.params["with_short_term_depression"]:
            self.short_term_depression = SynapseDynamics(
                fast=TsodyksMarkramMechanism(U=0.95, tau_rec=10.0, tau_facil=0.0)
            )

    def import_pynn(self):
        """
        This function needs only be called when this class is used in another script as imported module
        """
        import pyNN

        exec ("from pyNN.%s import *" % self.params["simulator"])
        print "import pyNN\npyNN.version: ", pyNN.__version__

    def setup(self, load_tuning_prop=False, times={}):

        self.projections = {}
        self.projections["ee"] = []
        self.projections["ei"] = []
        self.projections["ie"] = []
        self.projections["ii"] = []
        if not load_tuning_prop:
            self.tuning_prop_exc = utils.set_tuning_prop(
                self.params, mode="hexgrid", cell_type="exc"
            )  # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
            self.tuning_prop_inh = utils.set_tuning_prop(
                self.params, mode="hexgrid", cell_type="inh"
            )  # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
        else:
            self.tuning_prop_exc = np.loadtxt(self.params["tuning_prop_means_fn"])
            self.tuning_prop_inh = np.loadtxt(self.params["tuning_prop_inh_fn"])

        indices, distances = utils.sort_gids_by_distance_to_stimulus(
            self.tuning_prop_exc, self.params["motion_params"], self.params
        )  # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params["tuning_prop_means_fn"]
            np.savetxt(self.params["tuning_prop_means_fn"], self.tuning_prop_exc)
            print "Saving tuning_prop to file:", self.params["tuning_prop_inh_fn"]
            np.savetxt(self.params["tuning_prop_inh_fn"], self.tuning_prop_inh)
            print "Saving gids to record to: ", self.params["gids_to_record_fn"]
            np.savetxt(self.params["gids_to_record_fn"], indices[: self.params["n_gids_to_record"]], fmt="%d")

        #        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer

        self.timer = Timer()
        self.timer.start()
        self.times = times
        self.times["t_all"] = 0
        # # # # # # # # # # # #
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params["delay_range"]
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params["seed"])
        rng_v = NumpyRNG(
            seed=sim_cnt * 3147 + self.params["seed"], parallel_safe=True
        )  # if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(
            seed=self.params["seed"], parallel_safe=True
        )  # if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution(
            "normal",
            (self.params["v_init"], self.params["v_init_sigma"]),
            rng=rng_v,
            constrain="redraw",
            boundaries=(-80, -60),
        )

        self.times["t_setup"] = self.timer.diff()
        self.times["t_calc_conns"] = 0
        if self.comm != None:
            self.comm.Barrier()

        self.torus = space.Space(
            axes="xy", periodic_boundaries=((0.0, self.params["torus_width"]), (0.0, self.params["torus_height"]))
        )

    def create_neurons_with_limited_tuning_properties(self):
        n_exc = self.tuning_prop_exc[:, 0].size
        n_inh = 0
        if self.params["neuron_model"] == "IF_cond_exp":
            self.exc_pop = Population(n_exc, IF_cond_exp, self.params["cell_params_exc"], label="exc_cells")
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_exp, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "IF_cond_alpha":
            self.exc_pop = Population(n_exc, IF_cond_alpha, self.params["cell_params_exc"], label="exc_cells")
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_alpha, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "EIF_cond_exp_isfa_ista":
            self.exc_pop = Population(n_exc, EIF_cond_exp_isfa_ista, self.params["cell_params_exc"], label="exc_cells")
            self.inh_pop = Population(
                self.params["n_inh"], EIF_cond_exp_isfa_ista, self.params["cell_params_inh"], label="inh_pop"
            )
        else:
            print "\n\nUnknown neuron model:\n\t", self.params["neuron_model"]

        # set cell positions, required for isotropic connections
        cell_pos_exc = np.zeros((3, self.params["n_exc"]))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params["n_inh"]))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh

        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)

        if not input_created:
            self.spike_times_container = [[] for i in xrange(len(self.local_idx_exc))]
            self.spike_times_container = [[] for i in xrange(len(self.local_idx_exc))]
        print "Debug, pc_id %d has local %d exc indices:" % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc
        self.exc_pop.initialize("v", self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params["n_exc"])
        print "Debug, pc_id %d has local %d inh indices:" % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize("v", self.v_init_dist)
        self.times["t_create"] = self.timer.diff()

    def create(self, input_created=False):
        """
            # # # # # # # # # # # #
            #     C R E A T E     #
            # # # # # # # # # # # #
        """
        if self.params["neuron_model"] == "IF_cond_exp":
            self.exc_pop = Population(
                self.params["n_exc"], IF_cond_exp, self.params["cell_params_exc"], label="exc_cells"
            )
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_exp, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "IF_cond_alpha":
            self.exc_pop = Population(
                self.params["n_exc"], IF_cond_alpha, self.params["cell_params_exc"], label="exc_cells"
            )
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_alpha, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "EIF_cond_exp_isfa_ista":
            self.exc_pop = Population(
                self.params["n_exc"], EIF_cond_exp_isfa_ista, self.params["cell_params_exc"], label="exc_cells"
            )
            self.inh_pop = Population(
                self.params["n_inh"], EIF_cond_exp_isfa_ista, self.params["cell_params_inh"], label="inh_pop"
            )
        else:
            print "\n\nUnknown neuron model:\n\t", self.params["neuron_model"]
        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)
        print "Debug, pc_id %d has local %d exc indices:" % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc

        cell_pos_exc = np.zeros((3, self.params["n_exc"]))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params["n_inh"]))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh

        if not input_created:
            self.spike_times_container = [[] for i in xrange(len(self.local_idx_exc))]

        self.exc_pop.initialize("v", self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params["n_exc"])
        print "Debug, pc_id %d has local %d inh indices:" % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize("v", self.v_init_dist)

        self.times["t_create"] = self.timer.diff()

    def connect(self):
        if self.params["n_exc"] > 5000:
            save_output = False
        else:
            save_output = True

        self.connect_input_to_exc()
        self.connect_populations("ee")
        self.connect_populations("ei")
        self.connect_populations("ie")
        self.connect_populations("ii")
        self.connect_noise()
        self.times["t_calc_conns"] = self.timer.diff()
        if self.comm != None:
            self.comm.Barrier()

    def create_input(self, load_files=False, save_output=False):

        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params["input_st_fn_base"] + str(tgt) + ".npy"
                    spike_times = np.load(fn)
                except:  # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params["input_spikes_seed"])
            dt = self.params["dt_rate"]  # [ms] time step for the non-homogenous Poisson process
            time = np.arange(0, self.params["t_sim"], dt)
            blank_idx = np.arange(
                1.0 / dt * self.params["t_before_blank"],
                1.0 / dt * (self.params["t_before_blank"] + self.params["t_blank"]),
            )
            before_stim_idx = np.arange(0, self.params["t_start"] * 1.0 / dt)
            blank_idx = np.concatenate((blank_idx, before_stim_idx))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))

            # get the input signal
            print "Calculating input signal"
            for i_time, time_ in enumerate(time):
                L_input[:, i_time] = utils.get_input(
                    self.tuning_prop_exc[my_units, :], self.params, time_ / self.params["t_stimulus"]
                )
                L_input[:, i_time] *= self.params["f_max_stim"]
                if i_time % 500 == 0:
                    print "t:", time_
            #                    print 'L_input[:, %d].max()', L_input[:, i_time].max()
            # blanking
            for i_time in blank_idx:
                #                L_input[:, i_time] = 0.
                L_input[:, i_time] = np.random.permutation(L_input[:, i_time])

            # create the spike trains
            print "Creating input spiketrains for unit"
            for i_, unit in enumerate(my_units):
                print unit,
                rate_of_t = np.array(L_input[i_, :])
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if r <= ((rate_of_t[i] / 1000.0) * dt):  # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt)
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params["input_rate_fn_base"] + str(unit) + ".npy"
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params["input_st_fn_base"] + str(unit) + ".npy"
                    np.save(output_fn, np.array(spike_times))

        self.times["create_input"] = self.timer.diff()
        return self.spike_times_container

    def connect_input_to_exc(self):
        """
            # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    I N P U T - E X C  #
            # # # # # # # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting input spiketrains..."

        #        self.stimulus = Population(len(self.local_idx_exc), SpikeSourceArray)
        #            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
        #                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
        #            self.projections[conn_type].append(prj)

        #        self.projections['stim'] = []
        #        self.stimuli = []
        #        self.pop_views = []
        #        conn = OneToOneConnector(weights=self.params['w_input_exc'])
        for i_, unit in enumerate(self.local_idx_exc):
            spike_times = self.spike_times_container[i_]
            #            ssa = create(SpikeSourceArray, {'spike_times': spike_times})
            ssa = Population(1, SpikeSourceArray, {"spike_times": spike_times})
            #            ssa.set({'spike_times' : spike_times})
            #            self.stimuli.append(ssa)

            #            if self.params['with_short_term_depression']:

            #                connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory', synapse_dynamics=self.short_term_depression)
            #                selector = np.zeros(self.params['n_exc'], dtype=np.bool)
            #                selector[unit] = True
            #                print 'debug unit', unit, type(unit)
            #                w[i_] = 1.#self.params['w_input_exc']
            #                tgt = PopulationView(self.exc_pop, np.array([unit]))
            #                self.pop_views.append(tgt)
            #                prj = Projection(ssa, tgt, conn, target='excitatory', synapse_dynamics=self.short_term_depression)
            #                prj = Projection(self.stimuli[-1], self.pop_views[-1], conn, target='excitatory', synapse_dynamics=self.short_term_depression)
            #                self.projections['stim'].append(prj)
            #            else:
            connect(ssa, self.exc_pop[unit], self.params["w_input_exc"], synapse_type="excitatory")
        self.times["connect_input"] = self.timer.diff()

    def resolve_src_tgt(self, conn_type):
        """
        Deliver the correct source and target parameters based on conn_type
        """

        if conn_type == "ee":
            n_src, n_tgt = self.params["n_exc"], self.params["n_exc"]
            src_pop, tgt_pop = self.exc_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_exc
            syn_type = "excitatory"

        elif conn_type == "ei":
            n_src, n_tgt = self.params["n_exc"], self.params["n_inh"]
            src_pop, tgt_pop = self.exc_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_inh
            syn_type = "excitatory"

        elif conn_type == "ie":
            n_src, n_tgt = self.params["n_inh"], self.params["n_exc"]
            src_pop, tgt_pop = self.inh_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_exc
            syn_type = "inhibitory"

        elif conn_type == "ii":
            n_src, n_tgt = self.params["n_inh"], self.params["n_inh"]
            src_pop, tgt_pop = self.inh_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_inh
            syn_type = "inhibitory"

        return (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type)

    def connect_anisotropic(self, conn_type):
        """
        conn_type = ['ee', 'ei', 'ie', 'ii']
        """
        if self.pc_id == 0:
            print "Connect anisotropic %s - %s" % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)

        if self.debug_connectivity:
            conn_list_fn = self.params["conn_list_%s_fn_base" % conn_type] + "%d.dat" % (self.pc_id)

        n_src_cells_per_neuron = int(round(self.params["p_%s" % conn_type] * n_src))
        (delay_min, delay_max) = self.params["delay_range"]
        local_connlist = np.zeros((n_src_cells_per_neuron * len(tgt_cells), 4))
        for i_, tgt in enumerate(tgt_cells):
            if self.params["direction_based_conn"]:
                p, latency = CC.get_p_conn_vec_xpred(
                    tp_src,
                    tp_tgt[tgt, :],
                    self.params["w_sigma_x"],
                    self.params["w_sigma_v"],
                    self.params["connectivity_radius"],
                )
            else:
                p, latency = CC.get_p_conn_vec(
                    tp_src,
                    tp_tgt[tgt, :],
                    self.params["w_sigma_x"],
                    self.params["w_sigma_v"],
                    self.params["connectivity_radius"],
                    self.params["maximal_latency"],
                )
            if conn_type[0] == conn_type[1]:
                p[tgt], latency[tgt] = 0.0, 0.0
            # random delays? --> np.permutate(latency) or latency[sources] * self.params['delay_scale'] * np.rand

            sorted_indices = np.argsort(p)
            if conn_type[0] == "e":
                sources = sorted_indices[-n_src_cells_per_neuron:]
            else:  # source = inhibitory
                if conn_type[0] == conn_type[1]:
                    sources = sorted_indices[
                        1 : n_src_cells_per_neuron + 1
                    ]  # shift indices to avoid self-connection, because p_ii = .0
                else:
                    sources = sorted_indices[:n_src_cells_per_neuron]

            #            eta = 1e-9
            eta = 0
            w = (self.params["w_tgt_in_per_cell_%s" % conn_type] / (p[sources].sum() + eta)) * p[sources]
            #            print 'debug p', i_, tgt, p[sources]
            #            print 'debug sources', i_, tgt, sources
            #            print 'debug w', i_, tgt, w

            delays = np.minimum(
                np.maximum(latency[sources] * self.params["delay_scale"], delay_min), delay_max
            )  # map the delay into the valid range
            conn_list = np.array((sources, tgt * np.ones(n_src_cells_per_neuron), w, delays))
            local_connlist[i_ * n_src_cells_per_neuron : (i_ + 1) * n_src_cells_per_neuron, :] = conn_list.transpose()
            connector = FromListConnector(conn_list.transpose())
            if self.params["with_short_term_depression"]:
                prj = Projection(
                    src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression
                )
            else:
                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
            self.projections[conn_type].append(prj)

        if self.debug_connectivity:
            if self.pc_id == 0:
                print "DEBUG writing to file:", conn_list_fn
            np.savetxt(conn_list_fn, local_connlist, fmt="%d\t%d\t%.4e\t%.4e")

    def connect_ee_random(self):
        """
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    E X C - E X C    R A N D O M   #
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
        """

        if self.pc_id == 0:
            print "Drawing random connections"
        sigma_x, sigma_v = self.params["w_sigma_x"], self.params["w_sigma_v"]
        (delay_min, delay_max) = self.params["delay_range"]
        if self.debug_connectivity:
            conn_list_fn = self.params["conn_list_ee_fn_base"] + "%d.dat" % (self.pc_id)
            conn_file = open(conn_list_fn, "w")
            output = ""
        for tgt in self.local_idx_exc:
            p = np.zeros(self.params["n_exc"], dtype="float32")
            latency = np.zeros(self.params["n_exc"], dtype="float32")
            for src in xrange(self.params["n_exc"]):
                if src != tgt:
                    p[src], latency[src] = CC.get_p_conn(
                        self.tuning_prop_exc[src, :],
                        self.tuning_prop_exc[tgt, :],
                        sigma_x,
                        sigma_v,
                        params["connectivity_radius"],
                    )  #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']
            sources = random.sample(xrange(self.params["n_exc"]), int(self.params["n_src_cells_per_neuron"]))
            idx = p[sources] > 0
            non_zero_idx = np.nonzero(idx)[0]
            p_ = p[sources][non_zero_idx]
            l_ = latency[sources][non_zero_idx] * self.params["delay_scale"]

            w = utils.linear_transformation(p_, self.params["w_min"], self.params["w_max"])
            for i in xrange(len(p_)):
                #                        w[i] = max(self.params['w_min'], min(w[i], self.params['w_max']))
                delay = min(max(l_[i], delay_min), delay_max)  # map the delay into the valid range
                connect(self.exc_pop[non_zero_idx[i]], self.exc_pop[tgt], w[i], delay=delay, synapse_type="excitatory")
                if self.debug_connectivity:
                    output += "%d\t%d\t%.2e\t%.2e\n" % (
                        non_zero_idx[i],
                        tgt,
                        w[i],
                        delay,
                    )  #                    output += '%d\t%d\t%.2e\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], latency[sources[i]], p[sources[i]])

        if self.debug_connectivity:
            if self.pc_id == 0:
                print "DEBUG writing to file:", conn_list_fn
            conn_file.write(output)
            conn_file.close()

    def connect_isotropic(self, conn_type="ee"):
        """
        conn_type must be 'ee', 'ei', 'ie' or 'ii'
        Connect cells in a distant dependent manner:
            p_ij = exp(- d_ij / (2 * w_sigma_x**2))

        This will give a 'convergence constrained' connectivity, i.e. each cell will have the same sum of incoming weights
        ---> could be problematic for outlier cells
        """
        if self.pc_id == 0:
            print "Connect isotropic %s - %s" % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == "ee":
            w_ = self.params["w_max"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        elif conn_type == "ei":
            w_ = self.params["w_ei_mean"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == "ie":
            w_ = self.params["w_ie_mean"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == "ii":
            w_ = self.params["w_ii_mean"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        if self.debug_connectivity:
            conn_list_fn = self.params["conn_list_%s_fn_base" % conn_type] + "%d.dat" % (self.pc_id)
        #            conn_file = open(conn_list_fn, 'w')
        #            output = ''
        #            output_dist = ''

        w_mean = w_tgt_in / (self.params["p_%s" % conn_type] * n_max_conn / n_tgt)
        w_sigma = self.params["w_sigma_distribution"] * w_mean

        w_dist = RandomDistribution(
            "normal", (w_mean, w_sigma), rng=self.rng_conn, constrain="redraw", boundaries=(0, w_mean * 10.0)
        )
        delay_dist = RandomDistribution(
            "normal",
            (self.params["standard_delay"], self.params["standard_delay_sigma"]),
            rng=self.rng_conn,
            constrain="redraw",
            boundaries=(self.params["delay_range"][0], self.params["delay_range"][1]),
        )

        p_max = utils.get_pmax(self.params["p_%s" % conn_type], self.params["w_sigma_isotropic"], conn_type)
        connector = DistanceDependentProbabilityConnector(
            "%f * exp(-d/(2*%f**2))" % (p_max, params["w_sigma_isotropic"]),
            allow_self_connections=False,
            weights=w_dist,
            delays=delay_dist,
            space=self.torus,
        )  # , n_connections=n_conn_ee)
        print "p_max for %s" % conn_type, p_max
        if self.params["with_short_term_depression"]:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)  # , synapse_dynamics=self.STD)
        self.projections[conn_type].append(prj)
        if self.debug_connectivity:
            #                if self.pc_id == 0:
            #                    print 'DEBUG writing to file:', conn_list_fn
            prj.saveConnections(self.params["conn_list_%s_fn_base" % conn_type] + ".dat", gather=True)

    #            prj.saveConnections(self.params['conn_list_%s_fn_base' % conn_type] + 'gid%d.dat' % tgt, gather=False)
    #                conn_file.close()

    #            w = np.zeros(n_src, dtype='float32')
    #            delays = np.zeros(n_src, dtype='float32')
    #            for src in xrange(n_src):
    #                if conn_type[0] == conn_type[1]:
    #                    if (src != tgt): # no self-connections / autapses
    #                        d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
    #                        p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
    #                        if np.random.rand() <= p_ij:
    #                            w[src] = w_
    #                            delays[src] = d_ij * params['delay_scale']
    #                else:
    #                    d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
    #                    p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
    #                    if np.random.rand() <= p_ij:
    #                        w[src] = w_
    #                        delays[src] = d_ij * params['delay_scale']
    #            w *= w_tgt_in / w.sum()
    #            srcs = w.nonzero()[0]
    #            weights = w[srcs]
    #            for src in srcs:
    #                if w[src] > self.params['w_thresh_connection']:
    #                delay = min(max(delays[src], self.params['delay_range'][0]), self.params['delay_range'][1])  # map the delay into the valid range
    #                connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=delay, synapse_type=syn_type)
    #                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], delay)

    #        if self.debug_connectivity:
    #            if self.pc_id == 0:
    #                print 'DEBUG writing to file:', conn_list_fn
    #            conn_file.write(output)
    #            conn_file.close()

    def connect_random(self, conn_type):
        """
        There exist different possibilities to draw random connections:
        1) Calculate the weights as for the anisotropic case and sample sources randomly
        2) Load a file which stores some random connectivity --> # connector = FromFileConnector(self.params['conn_list_.... ']
        3) Create a random distribution with similar parameters as the non-random connectivition distribution

        connector_ee = FastFixedProbabilityConnector(self.params['p_ee'], weights=w_ee_dist, delays=self.delay_dist)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')

        conn_list_fn = self.params['random_weight_list_fn'] + str(sim_cnt) + '.dat'
        print "Connecting exc - exc from file", conn_list_fn
        connector_ee = FromFileConnector(conn_list_fn)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')
        """
        if self.pc_id == 0:
            print "Connect random connections %s - %s" % (conn_type[0].capitalize(), conn_type[1].capitalize())
        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        w_mean = self.params["w_tgt_in_per_cell_%s" % conn_type] / (n_src * self.params["p_%s" % conn_type])
        w_sigma = self.params["w_sigma_distribution"] * w_sigma

        weight_distr = RandomDistribution(
            "normal", (w_mean, w_sigma), rng=self.rng_conn, constrain="redraw", boundaries=(0, w_mean * 10.0)
        )

        delay_dist = RandomDistribution(
            "normal",
            (self.params["standard_delay"], self.params["standard_delay_sigma"]),
            rng=self.rng_conn,
            constrain="redraw",
            boundaries=(self.params["delay_range"][0], self.params["delay_range"][1]),
        )

        connector = FastFixedProbabilityConnector(
            self.params["p_%s" % conn_type], weights=weight_distr, delays=delay_dist
        )
        if self.params["with_short_term_depression"]:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)

        conn_list_fn = self.params["conn_list_%s_fn_base" % conn_type] + "%d.dat" % (self.pc_id)
        print "Saving random %s connections to %s" % (conn_type, conn_list_fn)
        prj.saveConnections(conn_list_fn, gather=False)

    def connect_populations(self, conn_type):
        """
            # # # # # # # # # # # #
            #     C O N N E C T   #
            # # # # # # # # # # # #
            Calls the right according to the flag set in simultation_parameters.py
        """
        if self.params["connectivity_%s" % conn_type] == "anisotropic":
            self.connect_anisotropic(conn_type)
        elif self.params["connectivity_%s" % conn_type] == "isotropic":
            self.connect_isotropic(conn_type)
        elif self.params["connectivity_%s" % conn_type] == "random":
            self.connect_random(conn_type)
        else:  # populations do not get connected
            pass

    def connect_noise(self):
        """
            # # # # # # # # # # # # # # # #
            #     N O I S E   I N P U T   #
            # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting noise - exc ... "
        noise_pop_exc = []
        noise_pop_inh = []
        for tgt in self.local_idx_exc:
            # new
            if self.params["simulator"] == "nest":  # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type("poisson_generator"), {"rate": self.params["f_exc_noise"]})
                noise_inh = create(native_cell_type("poisson_generator"), {"rate": self.params["f_inh_noise"]})
            else:
                noise_exc = create(SpikeSourcePoisson, {"rate": self.params["f_exc_noise"]})
                noise_inh = create(SpikeSourcePoisson, {"rate": self.params["f_inh_noise"]})
            connect(
                noise_exc, self.exc_pop[tgt], weight=self.params["w_exc_noise"], synapse_type="excitatory", delay=1.0
            )
            connect(
                noise_inh, self.exc_pop[tgt], weight=self.params["w_inh_noise"], synapse_type="inhibitory", delay=1.0
            )

        if self.pc_id == 0:
            print "Connecting noise - inh ... "
        for tgt in self.local_idx_inh:
            if self.params["simulator"] == "nest":  # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type("poisson_generator"), {"rate": self.params["f_exc_noise"]})
                noise_inh = create(native_cell_type("poisson_generator"), {"rate": self.params["f_inh_noise"]})
            else:
                noise_exc = create(SpikeSourcePoisson, {"rate": self.params["f_exc_noise"]})
                noise_inh = create(SpikeSourcePoisson, {"rate": self.params["f_inh_noise"]})
            connect(
                noise_exc, self.inh_pop[tgt], weight=self.params["w_exc_noise"], synapse_type="excitatory", delay=1.0
            )
            connect(
                noise_inh, self.inh_pop[tgt], weight=self.params["w_inh_noise"], synapse_type="inhibitory", delay=1.0
            )
        self.times["connect_noise"] = self.timer.diff()

    def run_sim(self, sim_cnt, record_v=True):
        # # # # # # # # # # # # # # # # # # # #
        #     P R I N T    W E I G H T S      #
        # # # # # # # # # # # # # # # # # # # #
        #    print 'Printing weights to :\n  %s\n  %s\n  %s' % (self.params['conn_list_ei_fn'], self.params['conn_list_ie_fn'], self.params['conn_list_ii_fn'])
        #    exc_inh_prj.saveConnections(self.params['conn_list_ei_fn'])
        #    inh_exc_prj.saveConnections(self.params['conn_list_ie_fn'])
        #    inh_inh_prj.saveConnections(self.params['conn_list_ii_fn'])
        #    self.times['t_save_conns'] = self.timer.diff()

        # # # # # # # # # # # #
        #     R E C O R D     #
        # # # # # # # # # # # #
        #    print "Recording spikes to file: %s" % (self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)
        #    for cell in xrange(self.params['n_exc']):
        #        record(self.exc_pop[cell], self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)
        record_exc = True
        if os.path.exists(self.params["gids_to_record_fn"]):
            gids_to_record = np.loadtxt(self.params["gids_to_record_fn"], dtype="int")[
                : self.params["n_gids_to_record"]
            ]
            record_exc = True
            n_rnd_cells_to_record = 2
        else:
            n_cells_to_record = 5  # self.params['n_exc'] * 0.02
            gids_to_record = np.random.randint(0, self.params["n_exc"], n_cells_to_record)

        if record_v:
            self.exc_pop_view = PopulationView(self.exc_pop, gids_to_record, label="good_exc_neurons")
            self.exc_pop_view.record_v()
            self.inh_pop_view = PopulationView(
                self.inh_pop,
                np.random.randint(0, self.params["n_inh"], self.params["n_gids_to_record"]),
                label="random_inh_neurons",
            )
            self.inh_pop_view.record_v()

        self.inh_pop.record()
        self.exc_pop.record()
        self.times["t_record"] = self.timer.diff()

        # # # # # # # # # # # # # #
        #     R U N N N I N G     #
        # # # # # # # # # # # # # #
        if self.pc_id == 0:
            print "Running simulation ... "
        run(self.params["t_sim"])
        self.times["t_sim"] = self.timer.diff()

    def print_results(self, print_v=True):
        """
            # # # # # # # # # # # # # # # # #
            #   P R I N T    R E S U L T S  #
            # # # # # # # # # # # # # # # # #
        """
        if print_v:
            if self.pc_id == 0:
                print "print_v to file: %s.v" % (self.params["exc_volt_fn_base"])
            self.exc_pop_view.print_v("%s.v" % (self.params["exc_volt_fn_base"]), compatible_output=False)
            if self.pc_id == 0:
                print "Printing inhibitory membrane potentials"
            self.inh_pop_view.print_v("%s.v" % (self.params["inh_volt_fn_base"]), compatible_output=False)

        if self.pc_id == 0:
            print "Printing excitatory spikes"
        self.exc_pop.printSpikes(self.params["exc_spiketimes_fn_merged"] + ".ras")
        if self.pc_id == 0:
            print "Printing inhibitory spikes"
        self.inh_pop.printSpikes(self.params["inh_spiketimes_fn_merged"] + ".ras")

        self.times["t_print"] = self.timer.diff()
        if self.pc_id == 0:
            print "calling pyNN.end() ...."
        end()
        self.times["t_end"] = self.timer.diff()

        if self.pc_id == 0:
            self.times["t_all"] = 0.0
            for k in self.times.keys():
                self.times["t_all"] += self.times[k]

            self.n_cells = {}
            self.n_cells["n_exc"] = self.params["n_exc"]
            self.n_cells["n_inh"] = self.params["n_inh"]
            self.n_cells["n_cells"] = self.params["n_cells"]
            self.n_cells["n_proc"] = self.n_proc
            output = {"times": self.times, "n_cells_proc": self.n_cells}
            print "Proc %d Simulation time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (
                self.pc_id,
                self.times["t_sim"],
                (self.times["t_sim"]) / 60.0,
                self.params["n_cells"],
                self.params["n_exc"],
                self.params["n_inh"],
            )
            print "Proc %d Full pyNN run time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (
                self.pc_id,
                self.times["t_all"],
                (self.times["t_all"]) / 60.0,
                self.params["n_cells"],
                self.params["n_exc"],
                self.params["n_inh"],
            )
            fn = utils.convert_to_url(params["folder_name"] + "times_dict_np%d.py" % self.n_proc)
            output = ntp.ParameterSet(output)
            output.save(fn)
예제 #37
0
Andrew Davison, UNIC, CNRS
August 2006, November 2009

"""

import socket, os
import csa
import numpy
from pyNN.utility import get_script_args, Timer

simulator_name = get_script_args(1)[0]
exec("from pyNN.%s import *" % simulator_name)

from pyNN.random import NumpyRNG

timer = Timer()
seed = 764756387
tstop = 1000.0 # ms
input_rate = 100.0 # Hz
cell_params = {'tau_refrac': 2.0,  # ms
               'v_thresh':  -50.0, # mV
               'tau_syn_E':  2.0,  # ms
               'tau_syn_I':  2.0}  # ms
n_record = 5

node = setup(timestep=0.025, min_delay=1.0, max_delay=10.0, debug=True, quit_on_end=False)
print "Process with rank %d running on %s" % (node, socket.gethostname())


rng = NumpyRNG(seed=seed, parallel_safe=True)
Credits to Stephan Reith for developing the ROS-SpiNNaker interface
"""
#
import spynnaker.pyNN as pynn

from pyNN.random import NumpyRNG, RandomDistribution
from pyNN.utility import Timer

from ros_spinnaker_interface import ROS_Spinnaker_Interface
# import transfer_functions as tf
from ros_spinnaker_interface import SpikeSourcePoisson
from ros_spinnaker_interface import SpikeSinkSmoothing, SpikeSinkMultipleReadoutsConvolution

import numpy as np

timer = Timer()
# === Define parameters ===
threads = 1
rngseed = 98766987
parallel_safe = True

ts = 0.1  # simulation timestep in ms
simulation_time = 2000  # ms

n_input_neurons = 4
n_readout_neurons = 2  #
n_reservoir_neurons = 59
exc_rate = 0.8  # 80% of reservoir neurons are excitatory

n_reservoir_exc = int(np.ceil(n_reservoir_neurons * exc_rate))
n_reservoir_inh = n_reservoir_neurons - n_reservoir_exc
    inhibitory neurons, connected via current-based "exponential"
    synapses (instantaneous rise, exponential decay).

    Andrew Davison, UNIC, CNRS
    August 2006

    $Id:VAbenchmarks.py 5 2007-04-16 15:01:24Z davison $

Author: Bernhard Kaplan, [email protected]
"""
import time
t0 = time.time()

# to store timing information
from pyNN.utility import Timer
timer = Timer()
timer.start()
times = {}
times['t_startup'] = time.time() - t0

# check imports
import numpy as np
import os
import socket
from math import *
import json
from pyNN.utility import get_script_args
simulator_name = 'nest'
from pyNN.nest import *
#exec("from pyNN.%s import *" % simulator_name)
try:
예제 #40
0
v_thresh = -45.     # (mV)
v_reset  = -90.     # (mV)
t_refrac = 3.       # (ms) (clamped at v_reset)
tau_syn_exc = 3
tau_syn_inh = tau_syn_exc*3

i_bias_pref = 1.0  #3.0 #4.0          
i_bias_avert = 0.6 # 0.8 #0.0         
i_bias_neut = 0.5 #0.0               

runtime = 100
#runtime = 500
#runtime = 120000
#runtime =  1000000

timer = Timer()
timer.start()


# cell_params will be passed to the constructor of the Population Object

cell_params = {
    'tau_m'      : tau_m,    'cm'         : cm,    
    'v_rest'     : -65,   'v_reset'    : -65,  'v_thresh'   : -45,
    'tau_syn_E'       : tau_syn_exc,        'tau_syn_I'       : tau_syn_inh, 'tau_refrac'       : t_refrac, 'i_offset' : 0
    }


# population and projection containers
v4_pop = []
pfc = []
def run_retina(params):
    """Run the retina using the specified parameters."""

    print "Setting up simulation"
    timer = Timer()
    timer.start()  # start timer on construction
    pyNN.setup(timestep=params['dt'], max_delay=params['syn_delay'], threads=params['threads'], rng_seeds=params['kernelseeds'])

    N = params['N']
    phr_ON = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    phr_OFF = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    noise_ON = pyNN.Population((N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std']))
    noise_OFF = pyNN.Population((N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std']))

    phr_ON.set(start=params['simtime']/4, stop=params['simtime']/4*3,
               amplitude=params['amplitude'] * params['snr'])
    phr_OFF.set(start=params['simtime']/4, stop=params['simtime']/4*3,
                amplitude=-params['amplitude'] * params['snr'])

    # target ON and OFF populations
    v_init = params['parameters_gc'].pop('Vinit')
    out_ON = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(**params['parameters_gc']))
    out_OFF = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(**params['parameters_gc']))
    out_ON.initialize(v=v_init)
    out_OFF.initialize(v=v_init)

    #print "Connecting the network"

    retina_proj_ON = pyNN.Projection(phr_ON, out_ON, pyNN.OneToOneConnector())
    retina_proj_ON.set(weight=params['weight'])
    retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF, pyNN.OneToOneConnector())
    retina_proj_OFF.set(weight=params['weight'])

    noise_proj_ON = pyNN.Projection(noise_ON, out_ON, pyNN.OneToOneConnector())
    noise_proj_ON.set(weight=params['weight'])
    noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF, pyNN.OneToOneConnector())
    noise_proj_OFF.set(weight=params['weight'])

    out_ON.record('spikes')
    out_OFF.record('spikes')

    # reads out time used for building
    buildCPUTime = timer.elapsedTime()

    print "Running simulation"

    timer.start()  # start timer on construction
    pyNN.run(params['simtime'])
    simCPUTime = timer.elapsedTime()

    out_ON_DATA = out_ON.get_data().segments[0]
    out_OFF_DATA = out_OFF.get_data().segments[0]

    print "\nRetina Network Simulation:"
    print(params['description'])
    print "Number of Neurons : ", N**2
    print "Output rate  (ON) : ", out_ON.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Output rate (OFF) : ", out_OFF.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Build time        : ", buildCPUTime, "s"
    print "Simulation time   : ", simCPUTime, "s"

    return out_ON_DATA, out_OFF_DATA
예제 #42
0
    def test_va_benchmark(self):

        simulator_name = 'spiNNaker'

        timer = Timer()

        # === Define parameters ========================================================

        rngseed = 98766987
        parallel_safe = True

        n = 1500  # number of cells
        r_ei = 4.0   # number of excitatory cells:number of inhibitory cells
        pconn = 0.02  # connection probability

        dt = 0.1        # (ms) simulation timestep
        tstop = 200    # (ms) simulaton duration
        delay = 1

        # Cell parameters
        area = 20000.  # (µm²)
        tau_m = 20.    # (ms)
        cm = 1.     # (µF/cm²)
        g_leak = 5e-5   # (S/cm²)
        e_leak = -49.  # (mV)
        v_thresh = -50.   # (mV)
        v_reset = -60.   # (mV)
        t_refrac = 5.     # (ms) (clamped at v_reset)
        v_mean = -60.   # (mV) 'mean' membrane potential, for calculating CUBA weights
        tau_exc = 5.     # (ms)
        tau_inh = 10.    # (ms)

        g_exc = 0.27   # (nS) #Those weights should be similar to the COBA weights
        g_inh = 4.5    # (nS) # but the delpolarising drift should be taken into account
        e_rev_exc = 0.     # (mV)
        e_rev_inh = -80.   # (mV)

        # === Calculate derived parameters =============================================

        area *= 1e-8                     # convert to cm²
        cm *= area * 1000                  # convert to nF
        r_m = 1e-6 / (g_leak * area)            # membrane resistance in MΩ
        assert tau_m == cm * r_m                 # just to check

        n_exc = int(round((n * r_ei / (1 + r_ei))))  # number of excitatory cells
        n_inh = n - n_exc                     # number of inhibitory cells

        print n_exc, n_inh

        celltype = IF_curr_exp
        w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean)  # (nA) weight of excitatory synapses
        w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean)  # (nA)
        assert w_exc > 0
        assert w_inh < 0

        # === Build the network ========================================================

        setup(timestep=dt, min_delay=delay, max_delay=delay)

        if simulator_name == 'spiNNaker':
            set_number_of_neurons_per_core('IF_curr_exp', 100)      # this will set 100 neurons per core
            set_number_of_neurons_per_core('IF_cond_exp', 50)      # this will set 50 neurons per core

        node_id = 1
        np = 1

        host_name = socket.gethostname()
        print "Host #%d is on %s" % (np, host_name)

        cell_params = {
            'tau_m': tau_m, 'tau_syn_E': tau_exc, 'tau_syn_I': tau_inh,
            'v_rest': e_leak, 'v_reset': v_reset, 'v_thresh': v_thresh,
            'cm': cm, 'tau_refrac': t_refrac, 'i_offset': 0}

        print cell_params

        timer.start()

        print "%s Creating cell populations..." % node_id
        exc_cells = Population(n_exc, celltype, cell_params,
                               label="Excitatory_Cells")
        inh_cells = Population(n_inh, celltype, cell_params,
                               label="Inhibitory_Cells")
        NativeRNG(12345)

        print "%s Initialising membrane potential to random values..." % node_id
        rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)
        uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh],
                                           rng=rng)
        exc_cells.initialize('v', uniform_distr)
        inh_cells.initialize('v', uniform_distr)

        print "%s Connecting populations..." % node_id
        exc_conn = FixedProbabilityConnector(pconn, weights=w_exc, delays=delay)
        inh_conn = FixedProbabilityConnector(pconn, weights=w_inh, delays=delay)

        connections = dict()
        connections['e2e'] = Projection(exc_cells, exc_cells, exc_conn,
                                        target='excitatory', rng=rng)
        connections['e2i'] = Projection(exc_cells, inh_cells, exc_conn,
                                        target='excitatory', rng=rng)
        connections['i2e'] = Projection(inh_cells, exc_cells, inh_conn,
                                        target='inhibitory', rng=rng)
        connections['i2i'] = Projection(inh_cells, inh_cells, inh_conn,
                                        target='inhibitory', rng=rng)

        # === Setup recording ==============================
        print "%s Setting up recording..." % node_id
        exc_cells.record()

        # === Run simulation ================================
        print "%d Running simulation..." % node_id

        print "timings: number of neurons:", n
        print "timings: number of synapses:", n * n * pconn

        run(tstop)

        # === Print results to file ===============================

        print "%d Writing data to file..." % node_id

        if not(os.path.isdir('Results')):
            os.mkdir('Results')

        exc_spikes = exc_cells.getSpikes()

        current_file_path = os.path.dirname(os.path.abspath(__file__))
        current_file_path = os.path.join(current_file_path, "spikes.data")
        #  exc_cells.printSpikes(current_file_path)
        pre_recorded_spikes = utility_calls.read_spikes_from_file(
            current_file_path, 0, n_exc, 0, tstop)

        end()

        for spike_element, read_element in zip(exc_spikes, pre_recorded_spikes):
                self.assertEqual(round(spike_element[0], 1),
                                 round(read_element[0], 1))
                self.assertEqual(round(spike_element[1], 1),
                                 round(read_element[1], 1))
    def setup(self, load_tuning_prop=False, times={}):

        self.projections = {}
        self.projections["ee"] = []
        self.projections["ei"] = []
        self.projections["ie"] = []
        self.projections["ii"] = []
        if not load_tuning_prop:
            self.tuning_prop_exc = utils.set_tuning_prop(
                self.params, mode="hexgrid", cell_type="exc"
            )  # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
            self.tuning_prop_inh = utils.set_tuning_prop(
                self.params, mode="hexgrid", cell_type="inh"
            )  # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
        else:
            self.tuning_prop_exc = np.loadtxt(self.params["tuning_prop_means_fn"])
            self.tuning_prop_inh = np.loadtxt(self.params["tuning_prop_inh_fn"])

        indices, distances = utils.sort_gids_by_distance_to_stimulus(
            self.tuning_prop_exc, self.params["motion_params"], self.params
        )  # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params["tuning_prop_means_fn"]
            np.savetxt(self.params["tuning_prop_means_fn"], self.tuning_prop_exc)
            print "Saving tuning_prop to file:", self.params["tuning_prop_inh_fn"]
            np.savetxt(self.params["tuning_prop_inh_fn"], self.tuning_prop_inh)
            print "Saving gids to record to: ", self.params["gids_to_record_fn"]
            np.savetxt(self.params["gids_to_record_fn"], indices[: self.params["n_gids_to_record"]], fmt="%d")

        #        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer

        self.timer = Timer()
        self.timer.start()
        self.times = times
        self.times["t_all"] = 0
        # # # # # # # # # # # #
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params["delay_range"]
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params["seed"])
        rng_v = NumpyRNG(
            seed=sim_cnt * 3147 + self.params["seed"], parallel_safe=True
        )  # if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(
            seed=self.params["seed"], parallel_safe=True
        )  # if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution(
            "normal",
            (self.params["v_init"], self.params["v_init_sigma"]),
            rng=rng_v,
            constrain="redraw",
            boundaries=(-80, -60),
        )

        self.times["t_setup"] = self.timer.diff()
        self.times["t_calc_conns"] = 0
        if self.comm != None:
            self.comm.Barrier()

        self.torus = space.Space(
            axes="xy", periodic_boundaries=((0.0, self.params["torus_width"]), (0.0, self.params["torus_height"]))
        )
#from pyNN.brian import *
#simulator_name = 'brian'
import simulation_parameters
ps = simulation_parameters.parameter_storage()
params = ps.params


# ===================================
#    G E T   P A R A M E T E R S 
# ===================================
x0, y0 = params['motion_params'][0:2]
sim_cnt = int(sys.argv[1])
mp = float(sys.argv[2]), float(sys.argv[3]), float(sys.argv[4]), float(sys.argv[5])

from pyNN.utility import Timer
timer = Timer()
timer.start()
times = {} # stores time stamps
tuning_prop = utils.set_tuning_prop(params, mode='hexgrid')
time = np.arange(0, params['t_stimulus'], params['dt_rate'])

#print 'Prepare spike trains'
#L_input = np.zeros((params['n_exc'], time.shape[0]))
#for i_time, time_ in enumerate(time):
#    if (i_time % 100 == 0):
#        print "t:", time_
#    L_input[:, i_time] = utils.get_input(tuning_prop, params, time_/params['t_sim'])
#    L_input[:, i_time] *= params['f_max_stim']

# ===============
#    S E T U P 
예제 #45
0
def run_model(sim, **options):
    """
    Run a simulation using the parameters read from the file "I_f_curve.json"

    :param sim: the PyNN backend module to be used.
    :param options: should contain a keyword "simulator" which is the name of the PyNN backend module used.
    :return: a tuple (`data`, `times`) where `data` is a Neo Block containing the recorded spikes
             and `times` is a dict containing the time taken for different phases of the simulation.
    """
    
    import json
    from pyNN.utility import Timer

    timer = Timer()

    g = open("I_f_curve.json", 'r')
    d = json.load(g)
    
    N = d['param']['N']
    max_current = d['param']['max_current']
    tstop = d['param']['tstop']

    if options['simulator'] == "hardware.brainscales":
        hardware_preset = d['setup'].pop('hardware_preset', None)
        if hardware_preset:
            d['setup']['hardware'] = sim.hardwareSetup[hardware_preset]

    timer.start()
    sim.setup(**d['setup'])

    popcell = sim.Population(N, sim.IF_cond_exp, d['IF_cond_exp'])

    #current_source = []
    #for i in xrange(N):
    #    current_source.append(sim.DCSource(amplitude=(max_current*(i+1)/N)))
    #    popcell[i:(i+1)].inject(current_source[i])
    i_offset = max_current * (1 + np.arange(N))/N
    popcell.tset("i_offset", i_offset)

    if PYNN07:
        popcell.record()
    else:
        popcell.record('spikes')
        #popcell[0, 1, N-2, N-1].record('v')  # debug

    setup_time = timer.diff()
    sim.run(tstop)
    run_time = timer.diff()

    if PYNN07:
        spike_array = popcell.getSpikes()
        data = spike_array_to_neo(spike_array, popcell, tstop)
    else:
        data = popcell.get_data()

    sim.end()

    closing_time = timer.diff()
    times = {'setup_time': setup_time, 'run_time': run_time, 'closing_time': closing_time}

    return data, times
예제 #46
0
#############################################


from pyNN.random import RandomDistribution, NumpyRNG
from pyNN.utility import get_script_args, Timer, ProgressBar, init_logging, normalized_filename
import matplotlib.pyplot as plt
from auxRoutines import *

simulator_name = get_script_args(1)[0]  

exec("from pyNN.%s import *" % simulator_name)

print("\n")
print "Starting PyNN with simulator: %s"%simulator_name

timer = Timer()

# Total of 8000 excitatory neurons and 2000 inhibitory neurons. Note that all these specified number of neurons in a population are disjoint.
# For instance pattern1 population has a total of 720 neurons, 180 of which comprises pattern1_stim.

numOfNeuronsExcPopulation = 5712 	# Excitatory neurons not including pattern1, pattern2 and patternIntersection populations
numOfNeuronsPattern1 = 524		# Neurons of pattern1 not including those of patternIntersection and from neuronsPattern1_stim
numOfNeuronsPattern1_stim = 196		# Sub-set of pattern1 which may receive external stimulation
numOfNeuronsPattern2 = 524		# Neurons of pattern2 not including those of patternIntersection
numOfNeuronsPattern2_stim = 196		# Sub-set of pattern2 which may receive external stimulation
numOfNeuronsPatternIntersection = 64
numOfNeuronsControl = 784

numOfNeuronsInhibPopulation = 2000

예제 #47
0
    def run(self, params, verbose=True):
        tmpdir = tempfile.mkdtemp()
        timer = Timer()
        timer.start()  # start timer on construction

        # === Build the network ========================================================
        if verbose: print "Setting up simulation"
        sim.setup(timestep=params.simulation.dt,
                  max_delay=params.simulation.syn_delay,
                  debug=False)

        N = params.N
        #dc_generator
        current_source = sim.DCSource(amplitude=params.snr,
                                      start=params.simulation.simtime / 4,
                                      stop=params.simulation.simtime / 4 * 3)

        # internal noise model (NEST specific)
        noise = sim.Population(N, 'noise_generator', {
            'mean': 0.,
            'std': params.noise_std
        })
        # target population
        output = sim.Population(N, sim.IF_cond_exp)

        # initialize membrane potential
        numpy.random.seed(params.simulation.kernelseed)
        V_rest, V_spike = -70., -53.
        output.tset('v_init',
                    V_rest + numpy.random.rand(N, ) * (V_spike - V_rest))

        #  Connecting the network
        conn = sim.OneToOneConnector(weights=params.weight)
        sim.Projection(noise, output, conn)

        for cell in output:
            cell.inject(current_source)

        output.record()

        # reads out time used for building
        buildCPUTime = timer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose: print "Running simulation"

        timer.reset()  # start timer on construction
        sim.run(params.simulation.simtime)
        simCPUTime = timer.elapsedTime()

        timer.reset()  # start timer on construction

        output_filename = os.path.join(tmpdir, 'output.gdf')
        #print output_filename
        output.printSpikes(output_filename)  #
        output_DATA = load_spikelist(output_filename,
                                     N,
                                     t_start=0.0,
                                     t_stop=params.simulation.simtime)
        writeCPUTime = timer.elapsedTime()

        if verbose:
            print "\nFiber Network Simulation:"
            print "Number of Neurons  : ", N
            print "Mean Output rate    : ", output_DATA.mean_rate(
            ), "Hz during ", params.simulation.simtime, "ms"
            print("Build time             : %g s" % buildCPUTime)
            print("Simulation time        : %g s" % simCPUTime)
            print("Writing time           : %g s" % writeCPUTime)

        os.remove(output_filename)
        os.rmdir(tmpdir)

        return output_DATA
class Mode(enum.Enum):
    train_asymmetrical = 1
    train_symmetrical  = 2
    test_asymmetrical  = 3
    test_symmetrical   = 4

mode = Mode.train_asymmetrical

hcu_grid_size = 2
num_hcu = hcu_grid_size ** 2
num_mcu_neurons = 100

record_membrane = False

timer = Timer()
timer.start()

spinnaker_kwargs = {"spinnaker_hostname": "192.168.1.1",
                    "stop_on_spinnaker": True}

tau_p = 2000

folder = "sequence_%u_%u" % (num_hcu, tau_p)
if not os.path.exists(folder):
    os.makedirs(folder)

# Bind parameters to euclidean HCU delay model
delay_model = functools.partial(network.euclidean_hcu_delay,
                                grid_size=hcu_grid_size, distance_scale=0.75, velocity=0.2)
#delay_model = functools.partial(network.euclidean_hcu_delay,
예제 #49
0
def main_pyNN(parameters):
    timer = Timer()
    sim = import_module(parameters.simulator)
    timer.mark("import")

    sim.setup(threads=parameters.threads)
    timer.mark("setup")

    populations = {}
    for name, P in parameters.populations.parameters():
        populations[name] = sim.Population(P.n, getattr(sim, P.celltype)(**P.params), label=name)
    timer.mark("build")

    if parameters.projections:
        projections = {}
        for name, P in parameters.projections.parameters():
            connector = getattr(sim, P.connector.type)(**P.connector.params)
            synapse_type = getattr(sim, P.synapse_type.type)(**P.synapse_type.params)
            projections[name] = sim.Projection(populations[P.pre],
                                               populations[P.post],
                                               connector,
                                               synapse_type,
                                               receptor_type=P.receptor_type,
                                               label=name)
        timer.mark("connect")

    if parameters.recording:
        for pop_name, to_record in parameters.recording.parameters():
            for var_name, n_record in to_record.items():
                populations[pop_name].sample(n_record).record(var_name)
        timer.mark("record")

    sim.run(parameters.sim_time)
    timer.mark("run")

    spike_counts = {}
    if parameters.recording:
        for pop_name in parameters.recording.names():
            block = populations[pop_name].get_data()  # perhaps include some summary statistics in the data returned?
            spike_counts["spikes_%s" % pop_name] = populations[pop_name].mean_spike_count()
        timer.mark("get_data")

    mpi_rank = sim.rank()
    num_processes = sim.num_processes()
    sim.end()
    
    data = dict(timer.marks)
    data.update(num_processes=num_processes)
    data.update(spike_counts)
    return mpi_rank, data
    def run(self,params, verbose =True):
        tmpdir = tempfile.mkdtemp()
        timer = Timer()
        timer.start() # start timer on construction

        # === Build the network ========================================================
        if verbose: print "Setting up simulation"
        sim.setup(timestep=params.simulation.dt,max_delay=params.simulation.syn_delay, debug=False)

        N = params.N
        #dc_generator
        current_source = sim.DCSource(  amplitude= params.snr,
                                        start=params.simulation.simtime/4,
                                        stop=params.simulation.simtime/4*3)
        
        # internal noise model (NEST specific)
        noise = sim.Population(N,'noise_generator',{'mean':0.,'std':params.noise_std}) 
        # target population
        output = sim.Population(N , sim.IF_cond_exp)

        # initialize membrane potential
        numpy.random.seed(params.simulation.kernelseed)
        V_rest, V_spike = -70., -53.
        output.tset('v_init',V_rest + numpy.random.rand(N,)* (V_spike -V_rest))

        #  Connecting the network
        conn = sim.OneToOneConnector(weights = params.weight)
        sim.Projection(noise, output, conn)

        for cell in output:
            cell.inject(current_source)

        output.record()

        # reads out time used for building
        buildCPUTime= timer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose: print "Running simulation"

        timer.reset() # start timer on construction
        sim.run(params.simulation.simtime)
        simCPUTime = timer.elapsedTime()

        timer.reset()  # start timer on construction

        output_filename = os.path.join(tmpdir,'output.gdf')
        #print output_filename
        output.printSpikes(output_filename)#
        output_DATA = load_spikelist(output_filename,N,
                                        t_start=0.0, t_stop=params.simulation.simtime)
        writeCPUTime = timer.elapsedTime()

        if verbose:
            print "\nFiber Network Simulation:"
            print "Number of Neurons  : ", N
            print "Mean Output rate    : ", output_DATA.mean_rate(), "Hz during ",params.simulation.simtime, "ms"
            print("Build time             : %g s" % buildCPUTime)
            print("Simulation time        : %g s" % simCPUTime)
            print("Writing time           : %g s" % writeCPUTime)

        os.remove(output_filename)
        os.rmdir(tmpdir)

        return output_DATA
예제 #51
0
Andrew Davison, UNIC, CNRS
August 2006

"""

import os
import socket
from math import *

from pyNN.utility import get_simulator, Timer, ProgressBar, init_logging, normalized_filename
sim, options = get_simulator(("benchmark", "Either CUBA or COBA"))

from pyNN.random import NumpyRNG, RandomDistribution

init_logging(None, debug=True)
timer = Timer()

# === Define parameters ========================================================

threads = 1
rngseed = 98765
parallel_safe = True

n = 4000  # number of cells
r_ei = 4.0  # number of excitatory cells:number of inhibitory cells
pconn = 0.02  # connection probability
stim_dur = 50.  # (ms) duration of random stimulation
rate = 100.  # (Hz) frequency of the random stimulation

dt = 0.1  # (ms) simulation timestep
tstop = 200  #1000  # (ms) simulaton duration
def test_callback(data_input):
    global message
    message = data_input.actual.positions
    msg_list = list(message)

    #msg_list[0] = int(message[0].encode('hex'),16)
    #for i in
    #msg_list = int(message.encode('hex'),16)

    #print('============= Received image data.',message)
    rospy.loginfo('=====received data %r', msg_list[0])
    timer = Timer()
    dt = 0.1
    p.setup(timestep=dt)  # 0.1ms

    pop_1 = p.Population(1, p.IF_curr_exp, {}, label="pop_1")
    #input = p.Population(1, p.SpikeSourceArray, {'spike_times': [[0,3,6]]}, label='input')
    input = p.Population(1, p.SpikeSourcePoisson,
                         {'rate': (msg_list[0] + 1.6) * 100})
    stat_syn = p.StaticSynapse(weight=50.0, delay=1)
    input_proj = p.Projection(input,
                              pop_1,
                              p.OneToOneConnector(),
                              synapse_type=stat_syn,
                              receptor_type='excitatory')

    pop_1.record(['v', 'spikes'])
    p.run(10)
    pop_1_data = pop_1.get_data()

    spikes = pop_1_data.segments[0].spiketrains[0]
    mean_rate = int(gaussian_convolution(spikes, dt))
    rospy.loginfo('=====mean_rate %r', mean_rate)  # mean_rate = 64
    rate_command = mean_rate
    # rate coding of the spike train
    '''
    pub = rospy.Publisher('/cmd_vel_mux/input/teleop', Twist, queue_size=10)
    # construct the output command
    command = Twist()
    command.linear.x = rate_command*0.02
    command.angular.z = rate_command/50000.
    pub.publish(command)
    '''
    pub = rospy.Publisher('/arm_controller/follow_joint_trajectory/goal',
                          FollowJointTrajectoryActionGoal,
                          queue_size=10)
    command = FollowJointTrajectoryActionGoal()
    command.header.stamp = rospy.Time.now()
    command.goal.trajectory.joint_names = ['elbow']
    point = JointTrajectoryPoint()
    point.positions = [rate_command / 10]
    point.time_from_start = rospy.Duration(1)
    command.goal.trajectory.points.append(point)
    pub.publish(command)
    rospy.loginfo('=====send command %r', command.goal.trajectory.points[0])

    fig_settings = {
        'lines.linewidth': 0.5,
        'axes.linewidth': 0.5,
        'axes.labelsize': 'small',
        'legend.fontsize': 'small',
        'font.size': 8
    }
    plt.rcParams.update(fig_settings)
    fig1 = plt.figure(1, figsize=(6, 8))

    def plot_spiketrains(segment):
        for spiketrain in segment.spiketrains:
            y = np.ones_like(spiketrain) * spiketrain.annotations['source_id']
            plt.plot(spiketrain, y, '.')
            plt.ylabel(segment.name)
            plt.setp(plt.gca().get_xticklabels(), visible=False)

    def plot_signal(signal, index, colour='b'):
        label = "Neuron %d" % signal.annotations['source_ids'][index]
        plt.plot(signal.times, signal[:, index], colour, label=label)
        plt.ylabel("%s (%s)" %
                   (signal.name, signal.units._dimensionality.string))
        plt.setp(plt.gca().get_xticklabels(), visible=False)
        plt.legend()

    print("now plotting the network---------------")
    rospy.loginfo('--------now plotting---------------')
    n_panels = sum(a.shape[1]
                   for a in pop_1_data.segments[0].analogsignalarrays) + 2
    plt.subplot(n_panels, 1, 1)
    plot_spiketrains(pop_1_data.segments[0])
    panel = 3
    for array in pop_1_data.segments[0].analogsignalarrays:
        for i in range(array.shape[1]):
            plt.subplot(n_panels, 1, panel)
            plot_signal(array, i, colour='bg'[panel % 2])
            panel += 1
    plt.xlabel("time (%s)" % array.times.units._dimensionality.string)
    plt.setp(plt.gca().get_xticklabels(), visible=True)  #
예제 #53
0
from pylab import *
from pyNN.utility import Timer, init_logging, ProgressBar
import os

simulator_name = sys.argv[1]
exec ("from pyNN.%s import *" % simulator_name)
test_cases = [int(x) for x in sys.argv[2:]]

from pyNN.recording import files
from pyNN.space import *

timer = Timer()
progress_bar = ProgressBar(mode="fixed", width=20)
init_logging("connectors_benchmark_%s.log" % simulator_name, debug=True)


def draw_rf(cell, positions, connections, color="k"):
    idx = numpy.where(connections[:, 1] == cell)[0]
    sources = connections[idx, 0]
    for src in sources:
        plot([positions[cell, 1], positions[src, 1]], [positions[cell, 2], positions[src, 2]], c=color)


def distances(pos_1, pos_2, N):
    dx = abs(pos_1[:, 0] - pos_2[:, 0])
    dy = abs(pos_1[:, 1] - pos_2[:, 1])
    dx = numpy.minimum(dx, N - dx)
    dy = numpy.minimum(dy, N - dy)
    return sqrt(dx * dx + dy * dy)

예제 #54
0
from pylab import *
from pyNN.utility import Timer, init_logging, ProgressBar
import os

simulator_name = sys.argv[1]
exec("from pyNN.%s import *" % simulator_name)
test_cases = [int(x) for x in sys.argv[2:]]

from pyNN.recording import files
from pyNN.space import *

timer = Timer()
progress_bar = ProgressBar(mode='fixed', width=20)
init_logging("connectors_benchmark_%s.log" % simulator_name, debug=True)


def draw_rf(cell, positions, connections, color='k'):
    idx = numpy.where(connections[:, 1] == cell)[0]
    sources = connections[idx, 0]
    for src in sources:
        plot([positions[cell, 1], positions[src, 1]],
             [positions[cell, 2], positions[src, 2]],
             c=color)


def distances(pos_1, pos_2, N):
    dx = abs(pos_1[:, 0] - pos_2[:, 0])
    dy = abs(pos_1[:, 1] - pos_2[:, 1])
    dx = numpy.minimum(dx, N - dx)
    dy = numpy.minimum(dy, N - dy)
    return sqrt(dx * dx + dy * dy)
예제 #55
0
Andrew Davison, UNIC, CNRS
August 2006, November 2009

"""

import socket, os
import csa
import numpy
from pyNN.utility import get_script_args, Timer

simulator_name = get_script_args(1)[0]
exec("from pyNN.%s import *" % simulator_name)

from pyNN.random import NumpyRNG

timer = Timer()
seed = 764756387
tstop = 1000.0  # ms
input_rate = 100.0  # Hz
cell_params = {'tau_refrac': 2.0,  # ms
               'v_thresh':  -50.0, # mV
               'tau_syn_E':  2.0,  # ms
               'tau_syn_I':  2.0}  # ms
n_record = 5

node = setup(timestep=0.025, min_delay=1.0, max_delay=10.0, debug=True, quit_on_end=False)
print("Process with rank %d running on %s" % (node, socket.gethostname()))


rng = NumpyRNG(seed=seed, parallel_safe=True)
예제 #56
0
def test(cases=[1]):

    sp = Space(periodic_boundaries=((0, 1), (0, 1), None), axes='xy')
    safe = False
    callback = progress_bar.set_level
    autapse = False
    parallel_safe = True
    render = True
    to_file = True

    for case in cases:
        #w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        #w = 0.1
        #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2)

        #delay = RandomDistribution('uniform', (0.1,5.))
        #delay = "0.1 + d/0.2"
        delay = 0.1
        #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances

        d_expression = "exp(-d**2/(2*0.1**2))"
        #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"

        timer = Timer()
        np = num_processes()
        timer.start()

        synapse = StaticSynapse(weight=w, delay=delay)
        rng = NumpyRNG(23434, parallel_safe=parallel_safe)

        if case is 1:
            conn = DistanceDependentProbabilityConnector(
                d_expression,
                safe=safe,
                callback=callback,
                allow_self_connections=autapse,
                rng=rng)
            fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np)
        elif case is 2:
            conn = FixedProbabilityConnector(0.02,
                                             safe=safe,
                                             callback=callback,
                                             allow_self_connections=autapse,
                                             rng=rng)
            fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np)
        elif case is 3:
            conn = AllToAllConnector(delays=delay,
                                     safe=safe,
                                     callback=callback,
                                     allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np)
        elif case is 4:
            conn = FixedNumberPostConnector(50,
                                            safe=safe,
                                            callback=callback,
                                            allow_self_connections=autapse,
                                            rng=rng)
            fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np)
        elif case is 5:
            conn = FixedNumberPreConnector(50,
                                           safe=safe,
                                           callback=callback,
                                           allow_self_connections=autapse,
                                           rng=rng)
            fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np)
        elif case is 6:
            conn = OneToOneConnector(safe=safe, callback=callback)
            fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np)
        elif case is 7:
            conn = FromFileConnector(files.NumpyBinaryFile(
                'Results/connections.dat', mode='r'),
                                     safe=safe,
                                     callback=callback,
                                     distributed=True)
            fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np)
        elif case is 8:
            conn = SmallWorldConnector(degree=0.1,
                                       rewiring=0.,
                                       safe=safe,
                                       callback=callback,
                                       allow_self_connections=autapse)
            fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np)

        print "Generating data for %s" % fig_name

        prj = Projection(x, x, conn, synapse, space=sp)

        mytime = timer.diff()
        print "Time to connect the cell population:", mytime, 's'
        print "Nb synapses built", prj.size()

        if to_file:
            if not (os.path.isdir('Results')):
                os.mkdir('Results')
            print "Saving Connections...."
            prj.save('all',
                     files.NumpyBinaryFile('Results/connections.dat',
                                           mode='w'),
                     gather=True)

        mytime = timer.diff()
        print "Time to save the projection:", mytime, 's'

        if render and to_file:
            print "Saving Positions...."
            x.save_positions('Results/positions.dat')
        end()

        if node_id == 0 and render and to_file:
            figure()
            print "Generating and saving %s" % fig_name
            positions = numpy.loadtxt('Results/positions.dat')

            positions[:, 0] -= positions[:, 0].min()
            connections = files.NumpyBinaryFile('Results/connections.dat',
                                                mode='r').read()
            print positions.shape, connections.shape
            connections[:, 0] -= connections[:, 0].min()
            connections[:, 1] -= connections[:, 1].min()
            idx_pre = connections[:, 0].astype(int)
            idx_post = connections[:, 1].astype(int)
            d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1)
            subplot(231)
            title('Cells positions')
            plot(positions[:, 1], positions[:, 2], '.')
            subplot(232)
            title('Weights distribution')
            hist(connections[:, 2], 50)
            subplot(233)
            title('Delay distribution')
            hist(connections[:, 3], 50)
            subplot(234)
            numpy.random.seed(74562)
            ids = numpy.random.permutation(positions[:, 0])[0:6]
            colors = ['k', 'r', 'b', 'g', 'c', 'y']
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:, 2], '.')

            subplot(236)
            plot(d, connections[:, 3], '.')
            savefig("Results/" + fig_name)
            #os.remove('Results/connections.dat')
            #os.remove('Results/positions.dat')
            show()
예제 #57
0
Brunel N (2000) Dynamics of sparsely connected networks of excitatory and inhibitory spiking neurons. J Comput Neurosci 8:183-208

Andrew Davison, UNIC, CNRS
May 2006

"""

from pyNN.utility import get_script_args, Timer, ProgressBar

simulator_name = get_script_args(1)[0]
exec("from pyNN.%s import *" % simulator_name)

from pyNN.random import NumpyRNG, RandomDistribution

timer = Timer()

# === Define parameters ========================================================

downscale = 50  # scale number of neurons down by this factor
# scale synaptic weights up by this factor to
# obtain similar dynamics independent of size
order = 50000  # determines size of network:
# 4*order excitatory neurons
# 1*order inhibitory neurons
Nrec = 50  # number of neurons to record from, per population
epsilon = 0.1  # connectivity: proportion of neurons each neuron projects to

# Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1
# here: Case C, asynchronous irregular firing, ~35 Hz
eta = 2.0  # rel rate of external input
예제 #58
0
        timing_dependence=SpikePairRule(tau_plus=30.0, tau_minus=30.0),
        weight_dependence=AdditiveWeightDependence(
            w_min=0, w_max=20, A_plus=0.005,
            A_minus=0.006)  # _A_plus=0.5, _A_minus=0.6
    )

runtime = 100
#runtime = 500
#runtime = 120000
#runtime =  1000000
metric_window = 100
metric_start_offset = 0
metric_t_start = 0
metric_t_stop = runtime

timer = Timer()
timer.start()

# cell_params will be passed to the constructor of the Population Object

cell_params = {
    'tau_m': tau_m,
    'cm': cm,
    'v_rest': v_rest,
    'v_reset': v_reset,
    'v_thresh': v_thresh,
    'tau_syn_E': tau_syn_exc,
    'tau_syn_I': tau_syn_inh,
    'tau_refrac': t_refrac,
    'i_offset': i_offset
}