def do_run(seed=None):
    simulator_name = 'spiNNaker'

    timer = Timer()

    # === Define parameters =========================================

    parallel_safe = True

    n = 1500  # number of cells
    # number of excitatory cells:number of inhibitory cells
    r_ei = 4.0
    pconn = 0.02  # connection probability

    dt = 1  # (ms) simulation timestep
    tstop = 200  # (ms) simulaton duration
    delay = 1

    # Cell parameters
    area = 20000.  # (µm²)
    tau_m = 20.  # (ms)
    cm = 1.  # (µF/cm²)
    g_leak = 5e-5  # (S/cm²)
    e_leak = -49.  # (mV)
    v_thresh = -50.  # (mV)
    v_reset = -60.  # (mV)
    t_refrac = 5.  # (ms) (clamped at v_reset)
    # (mV) 'mean' membrane potential,  for calculating CUBA weights
    v_mean = -60.
    tau_exc = 5.  # (ms)
    tau_inh = 10.  # (ms)
    # (nS) #Those weights should be similar to the COBA weights
    g_exc = 0.27
    # (nS) # but the delpolarising drift should be taken into account
    g_inh = 4.5
    e_rev_exc = 0.  # (mV)
    e_rev_inh = -80.  # (mV)

    # === Calculate derived parameters ===============================

    area *= 1e-8  # convert to cm²
    cm *= area * 1000  # convert to nF
    r_m = 1e-6 / (g_leak * area)  # membrane resistance in MΩ
    assert tau_m == cm * r_m  # just to check

    # number of excitatory cells
    n_exc = int(round((n * r_ei / (1 + r_ei))))
    n_inh = n - n_exc  # number of inhibitory cells

    celltype = p.IF_curr_exp
    # (nA) weight of excitatory synapses
    w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean)
    w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean)  # (nA)
    assert w_exc > 0
    assert w_inh < 0

    # === Build the network ==========================================

    p.setup(timestep=dt, min_delay=delay, max_delay=delay)

    if simulator_name == 'spiNNaker':
        # this will set 100 neurons per core
        p.set_number_of_neurons_per_core(p.IF_curr_exp, 100)
        # this will set 50 neurons per core
        p.set_number_of_neurons_per_core(p.IF_cond_exp, 50)

    # node_id = 1
    # np = 1

    # host_name = socket.gethostname()

    cell_params = {'tau_m': tau_m, 'tau_syn_E': tau_exc, 'tau_syn_I': tau_inh,
                   'v_rest': e_leak, 'v_reset': v_reset, 'v_thresh': v_thresh,
                   'cm': cm, 'tau_refrac': t_refrac, 'i_offset': 0}

    timer.start()

    exc_cells = p.Population(n_exc, celltype, cell_params,
                             label="Excitatory_Cells")
    inh_cells = p.Population(n_inh, celltype, cell_params,
                             label="Inhibitory_Cells")
    rng = NumpyRNG(seed=seed, parallel_safe=parallel_safe)
    uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh], rng=rng)
    exc_cells.initialize(v=uniform_distr)
    inh_cells.initialize(v=uniform_distr)

    exc_conn = p.FixedProbabilityConnector(pconn, rng=rng)
    synapse_exc = p.StaticSynapse(weight=w_exc, delay=delay)
    inh_conn = p.FixedProbabilityConnector(pconn, rng=rng)
    synapse_inh = p.StaticSynapse(weight=w_inh, delay=delay)

    connections = dict()
    connections['e2e'] = p.Projection(exc_cells, exc_cells, exc_conn,
                                      synapse_type=synapse_exc,
                                      receptor_type='excitatory')
    connections['e2i'] = p.Projection(exc_cells, inh_cells, exc_conn,
                                      synapse_type=synapse_exc,
                                      receptor_type='excitatory')
    connections['i2e'] = p.Projection(inh_cells, exc_cells, inh_conn,
                                      synapse_type=synapse_inh,
                                      receptor_type='inhibitory')
    connections['i2i'] = p.Projection(inh_cells, inh_cells, inh_conn,
                                      synapse_type=synapse_inh,
                                      receptor_type='inhibitory')

    # === Setup recording ==============================
    exc_cells.record("spikes")

    # === Run simulation ================================
    p.run(tstop)

    exc_spikes = exc_cells.get_data("spikes")

    exc_cells.write_data(neo_path, "spikes")

    p.end()

    return exc_spikes
Beispiel #2
0
def runBrunelNetwork(g=5.,
                     eta=2.,
                     dt=0.1,
                     simtime=1000.0,
                     delay=1.5,
                     epsilon=0.1,
                     order=2500,
                     N_rec=50,
                     N_rec_v=2,
                     save=False,
                     simulator_name='nest',
                     jnml_simulator=None,
                     extra={}):

    exec("from pyNN.%s import *" % simulator_name) in globals()

    timer = Timer()

    # === Define parameters ========================================================

    downscale = 1  # scale number of neurons down by this factor
    # scale synaptic weights up by this factor to
    # obtain similar dynamics independent of size
    order = order  # determines size of network:
    # 4*order excitatory neurons
    # 1*order inhibitory neurons
    Nrec = N_rec  # number of neurons to record from, per population
    epsilon = epsilon  # connectivity: proportion of neurons each neuron projects to

    # Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1
    # here: Case C, asynchronous irregular firing, ~35 Hz
    eta = eta  # rel rate of external input
    g = g  # rel strength of inhibitory synapses
    J = 0.1  # synaptic weight [mV]
    delay = delay  # synaptic delay, all connections [ms]

    # single neuron parameters
    tauMem = 20.0  # neuron membrane time constant [ms]
    tauSyn = 0.1  # synaptic time constant [ms]
    tauRef = 2.0  # refractory time [ms]
    U0 = 0.0  # resting potential [mV]
    theta = 20.0  # threshold

    # simulation-related parameters
    simtime = simtime  # simulation time [ms]
    dt = dt  # simulation step length [ms]

    # seed for random generator used when building connections
    connectseed = 12345789
    use_RandomArray = True  # use Python rng rather than NEST rng

    # seed for random generator(s) used during simulation
    kernelseed = 43210987

    # === Calculate derived parameters =============================================

    # scaling: compute effective order and synaptic strength
    order_eff = int(float(order) / downscale)
    J_eff = J * downscale

    # compute neuron numbers
    NE = int(4 * order_eff)  # number of excitatory neurons
    NI = int(1 * order_eff)  # number of inhibitory neurons
    N = NI + NE  # total number of neurons

    # compute synapse numbers
    CE = int(epsilon * NE)  # number of excitatory synapses on neuron
    CI = int(epsilon * NI)  # number of inhibitory synapses on neuron
    C = CE + CI  # total number of internal synapses per n.
    Cext = CE  # number of external synapses on neuron

    # synaptic weights, scaled for alpha functions, such that
    # for constant membrane potential, charge J would be deposited
    fudge = 0.00041363506632638  # ensures dV = J at V=0

    # excitatory weight: JE = J_eff / tauSyn * fudge
    JE = (J_eff / tauSyn) * fudge

    # inhibitory weight: JI = - g * JE
    JI = -g * JE

    # threshold, external, and Poisson generator rates:
    nu_thresh = theta / (J_eff * CE * tauMem)
    nu_ext = eta * nu_thresh  # external rate per synapse
    p_rate = 1000 * nu_ext * Cext  # external input rate per neuron (Hz)

    # number of synapses---just so we know
    Nsyn = (
        C + 1
    ) * N + 2 * Nrec  # number of neurons * (internal synapses + 1 synapse from PoissonGenerator) + 2synapses" to spike detectors

    # put cell parameters into a dict
    cell_params = {
        'tau_m': tauMem,
        'tau_syn_E': tauSyn,
        'tau_syn_I': tauSyn,
        'tau_refrac': tauRef,
        'v_rest': U0,
        'v_reset': U0,
        'v_thresh': theta,
        'cm': 0.001
    }  # (nF)

    # === Build the network ========================================================

    # clear all existing network elements and set resolution and limits on delays.
    # For NEST, limits must be set BEFORE connecting any elements

    #extra = {'threads' : 2}

    rank = setup(timestep=dt, max_delay=delay, **extra)
    print("rank =", rank)
    np = num_processes()
    print("np =", np)
    import socket
    host_name = socket.gethostname()
    print("Host #%d is on %s" % (rank + 1, host_name))

    if 'threads' in extra:
        print("%d Initialising the simulator with %d threads..." %
              (rank, extra['threads']))
    else:
        print("%d Initialising the simulator with single thread..." % rank)

    # Small function to display information only on node 1
    def nprint(s):
        if rank == 0:
            print(s)

    timer.start()  # start timer on construction

    print("%d Setting up random number generator" % rank)
    rng = NumpyRNG(kernelseed, parallel_safe=True)

    print("%d Creating excitatory population with %d neurons." % (rank, NE))
    celltype = IF_curr_alpha(**cell_params)
    celltype.default_initial_values[
        'v'] = U0  # Setting default init v, useful for NML2 export
    E_net = Population(NE, celltype, label="E_net")

    print("%d Creating inhibitory population with %d neurons." % (rank, NI))
    I_net = Population(NI, celltype, label="I_net")

    print(
        "%d Initialising membrane potential to random values between %g mV and %g mV."
        % (rank, U0, theta))
    uniformDistr = RandomDistribution('uniform', low=U0, high=theta, rng=rng)
    E_net.initialize(v=uniformDistr)
    I_net.initialize(v=uniformDistr)

    print("%d Creating excitatory Poisson generator with rate %g spikes/s." %
          (rank, p_rate))
    source_type = SpikeSourcePoisson(rate=p_rate)
    expoisson = Population(NE, source_type, label="expoisson")

    print("%d Creating inhibitory Poisson generator with the same rate." %
          rank)
    inpoisson = Population(NI, source_type, label="inpoisson")

    # Record spikes
    print("%d Setting up recording in excitatory population." % rank)
    E_net.record('spikes')
    if N_rec_v > 0:
        E_net[0:min(NE, N_rec_v)].record('v')

    print("%d Setting up recording in inhibitory population." % rank)
    I_net.record('spikes')
    if N_rec_v > 0:
        I_net[0:min(NI, N_rec_v)].record('v')

    progress_bar = ProgressBar(width=20)
    connector = FixedProbabilityConnector(epsilon,
                                          rng=rng,
                                          callback=progress_bar)
    E_syn = StaticSynapse(weight=JE, delay=delay)
    I_syn = StaticSynapse(weight=JI, delay=delay)
    ext_Connector = OneToOneConnector(callback=progress_bar)
    ext_syn = StaticSynapse(weight=JE, delay=dt)

    print(
        "%d Connecting excitatory population with connection probability %g, weight %g nA and delay %g ms."
        % (rank, epsilon, JE, delay))
    E_to_E = Projection(E_net,
                        E_net,
                        connector,
                        E_syn,
                        receptor_type="excitatory")
    print("E --> E\t\t", len(E_to_E), "connections")
    I_to_E = Projection(I_net,
                        E_net,
                        connector,
                        I_syn,
                        receptor_type="inhibitory")
    print("I --> E\t\t", len(I_to_E), "connections")
    input_to_E = Projection(expoisson,
                            E_net,
                            ext_Connector,
                            ext_syn,
                            receptor_type="excitatory")
    print("input --> E\t", len(input_to_E), "connections")

    print(
        "%d Connecting inhibitory population with connection probability %g, weight %g nA and delay %g ms."
        % (rank, epsilon, JI, delay))
    E_to_I = Projection(E_net,
                        I_net,
                        connector,
                        E_syn,
                        receptor_type="excitatory")
    print("E --> I\t\t", len(E_to_I), "connections")
    I_to_I = Projection(I_net,
                        I_net,
                        connector,
                        I_syn,
                        receptor_type="inhibitory")
    print("I --> I\t\t", len(I_to_I), "connections")
    input_to_I = Projection(inpoisson,
                            I_net,
                            ext_Connector,
                            ext_syn,
                            receptor_type="excitatory")
    print("input --> I\t", len(input_to_I), "connections")

    # read out time used for building
    buildCPUTime = timer.elapsedTime()
    # === Run simulation ===========================================================

    # run, measure computer time
    timer.start()  # start timer on construction
    print("%d Running simulation for %g ms (dt=%sms)." % (rank, simtime, dt))
    run(simtime)
    print("Done")
    simCPUTime = timer.elapsedTime()

    # write data to file
    #print("%d Writing data to file." % rank)
    #(E_net + I_net).write_data("Results/brunel_np%d_%s.pkl" % (np, simulator_name))
    if save and not simulator_name == 'neuroml':
        for pop in [E_net, I_net]:
            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.gdf" %
                            (simulator_name, pop.label, rank))
            spikes = pop.get_data('spikes', gather=False)
            for segment in spikes.segments:
                io.write_segment(segment)

            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.dat" %
                            (simulator_name, pop.label, rank))
            vs = pop.get_data('v', gather=False)
            for segment in vs.segments:
                io.write_segment(segment)

    spike_data = {}
    spike_data['senders'] = []
    spike_data['times'] = []
    index_offset = 1
    for pop in [E_net, I_net]:
        if rank == 0:
            spikes = pop.get_data('spikes', gather=False)
            #print(spikes.segments[0].all_data)
            num_rec = len(spikes.segments[0].spiketrains)
            print("Extracting spike info (%i) for %i cells in %s" %
                  (num_rec, pop.size, pop.label))
            #assert(num_rec==len(spikes.segments[0].spiketrains))
            for i in range(num_rec):
                ss = spikes.segments[0].spiketrains[i]
                for s in ss:
                    index = i + index_offset
                    #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index))
                    spike_data['senders'].append(index)
                    spike_data['times'].append(s)
            index_offset += pop.size

    #from IPython.core.debugger import Tracer
    #Tracer()()

    E_rate = E_net.mean_spike_count() * 1000.0 / simtime
    I_rate = I_net.mean_spike_count() * 1000.0 / simtime

    # write a short report
    nprint("\n--- Brunel Network Simulation ---")
    nprint("Nodes              : %d" % np)
    nprint("Number of Neurons  : %d" % N)
    nprint("Number of Synapses : %d" % Nsyn)
    nprint("Input firing rate  : %g" % p_rate)
    nprint("Excitatory weight  : %g" % JE)
    nprint("Inhibitory weight  : %g" % JI)
    nprint("Excitatory rate    : %g Hz" % E_rate)
    nprint("Inhibitory rate    : %g Hz" % I_rate)
    nprint("Build time         : %g s" % buildCPUTime)
    nprint("Simulation time    : %g s" % simCPUTime)

    # === Clean up and quit ========================================================

    end()

    if simulator_name == 'neuroml' and jnml_simulator:
        from pyneuroml import pynml
        lems_file = 'LEMS_Sim_PyNN_NeuroML2_Export.xml'

        print('Going to run generated LEMS file: %s on simulator: %s' %
              (lems_file, jnml_simulator))

        if jnml_simulator == 'jNeuroML':
            results, events = pynml.run_lems_with_jneuroml(
                lems_file,
                nogui=True,
                load_saved_data=True,
                reload_events=True)

        elif jnml_simulator == 'jNeuroML_NEURON':
            results, events = pynml.run_lems_with_jneuroml_neuron(
                lems_file,
                nogui=True,
                load_saved_data=True,
                reload_events=True)

        spike_data['senders'] = []
        spike_data['times'] = []
        for k in events.keys():
            values = k.split('/')
            index = int(
                values[1]) if values[0] == 'E_net' else NE + int(values[1])
            n = len(events[k])
            print(
                "Loading spikes for %s (index %i): [%s, ..., %s (n=%s)] sec" %
                (k, index, events[k][0] if n > 0 else '-',
                 events[k][-1] if n > 0 else '-', n))
            for t in events[k]:
                spike_data['senders'].append(index)
                spike_data['times'].append(t * 1000)

    #print spike_data
    return spike_data
Beispiel #3
0
def runBrunelNetwork(g=5., 
                     eta=2., 
                     dt = 0.1, 
                     simtime = 1000.0, 
                     delay = 1.5, 
                     epsilon = 0.1, 
                     order = 2500, 
                     N_rec = 50,
                     N_rec_v = 2, 
                     save=False, 
                     simulator_name='nest',
                     jnml_simulator=None,
                     extra = {}):

    exec("from pyNN.%s import *" % simulator_name) in globals()
    
    timer = Timer()

    # === Define parameters ========================================================

    downscale   = 1       # scale number of neurons down by this factor
                          # scale synaptic weights up by this factor to
                          # obtain similar dynamics independent of size
    order       = order   # determines size of network:
                          # 4*order excitatory neurons
                          # 1*order inhibitory neurons
    Nrec        = N_rec   # number of neurons to record from, per population
    epsilon     = epsilon # connectivity: proportion of neurons each neuron projects to

    # Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1
    # here: Case C, asynchronous irregular firing, ~35 Hz
    eta         = eta     # rel rate of external input
    g           = g       # rel strength of inhibitory synapses
    J           = 0.1     # synaptic weight [mV]
    delay       = delay   # synaptic delay, all connections [ms]

    # single neuron parameters
    tauMem      = 20.0    # neuron membrane time constant [ms]
    tauSyn      = 0.1     # synaptic time constant [ms]
    tauRef      = 2.0     # refractory time [ms]
    U0          = 0.0     # resting potential [mV]
    theta       = 20.0    # threshold

    # simulation-related parameters
    simtime     = simtime   # simulation time [ms]
    dt          = dt     # simulation step length [ms]

    # seed for random generator used when building connections
    connectseed = 12345789
    use_RandomArray = True  # use Python rng rather than NEST rng

    # seed for random generator(s) used during simulation
    kernelseed  = 43210987

    # === Calculate derived parameters =============================================

    # scaling: compute effective order and synaptic strength
    order_eff = int(float(order)/downscale)
    J_eff     = J*downscale

    # compute neuron numbers
    NE = int(4*order_eff)  # number of excitatory neurons
    NI = int(1*order_eff)  # number of inhibitory neurons
    N  = NI + NE           # total number of neurons

    # compute synapse numbers
    CE   = int(epsilon*NE)  # number of excitatory synapses on neuron
    CI   = int(epsilon*NI)  # number of inhibitory synapses on neuron
    C    = CE + CI          # total number of internal synapses per n.
    Cext = CE               # number of external synapses on neuron

    # synaptic weights, scaled for alpha functions, such that
    # for constant membrane potential, charge J would be deposited
    fudge = 0.00041363506632638  # ensures dV = J at V=0

    # excitatory weight: JE = J_eff / tauSyn * fudge
    JE = (J_eff/tauSyn)*fudge

    # inhibitory weight: JI = - g * JE
    JI = -g*JE

    # threshold, external, and Poisson generator rates:
    nu_thresh = theta/(J_eff*CE*tauMem)
    nu_ext    = eta*nu_thresh     # external rate per synapse
    p_rate    = 1000*nu_ext*Cext  # external input rate per neuron (Hz)

    # number of synapses---just so we know
    Nsyn = (C+1)*N + 2*Nrec  # number of neurons * (internal synapses + 1 synapse from PoissonGenerator) + 2synapses" to spike detectors
    
    print('Created Brunel network with parameters')
    for p in ['NE','NI','downscale','order','N_rec','epsilon','eta','g','J','delay','tauMem','tauSyn','tauRef','U0','theta','simtime','dt', \
              'order_eff','J_eff','N','CE','CI','C','Cext','fudge','JE','JI','nu_thresh','nu_ext','p_rate','Nsyn']:
        print('  %s%s= %s'%(p, ' '*(12-len(p)), eval(p)))

    # put cell parameters into a dict
    cell_params = {'tau_m'      : tauMem,
                   'tau_syn_E'  : tauSyn,
                   'tau_syn_I'  : tauSyn,
                   'tau_refrac' : tauRef,
                   'v_rest'     : U0,
                   'v_reset'    : U0,
                   'v_thresh'   : theta,
                   'cm'         : 0.001}     # (nF)

    # === Build the network ========================================================

    # clear all existing network elements and set resolution and limits on delays.
    # For NEST, limits must be set BEFORE connecting any elements

    #extra = {'threads' : 2}

    rank = setup(timestep=dt, max_delay=delay, **extra)
    print("rank =", rank)
    np = num_processes()
    print("np =", np)
    import socket
    host_name = socket.gethostname()
    print("Host #%d is on %s" % (rank+1, host_name))

    if 'threads' in extra:
        print("%d Initialising the simulator with %d threads..." % (rank, extra['threads']))
    else:
        print("%d Initialising the simulator with single thread..." % rank)

    # Small function to display information only on node 1
    def nprint(s):
        if rank == 0:
            print(s)

    timer.start()  # start timer on construction

    print("%d Setting up random number generator" % rank)
    rng = NumpyRNG(kernelseed, parallel_safe=True)

    print("%d Creating excitatory population with %d neurons." % (rank, NE))
    celltype = IF_curr_alpha(**cell_params)
    celltype.default_initial_values['v'] = U0 # Setting default init v, useful for NML2 export
    E_net = Population(NE, celltype, label="E_net")

    print("%d Creating inhibitory population with %d neurons." % (rank, NI))
    I_net = Population(NI, celltype, label="I_net")

    print("%d Initialising membrane potential to random values between %g mV and %g mV." % (rank, U0, theta))
    uniformDistr = RandomDistribution('uniform', low=U0, high=theta, rng=rng)
    E_net.initialize(v=uniformDistr)
    I_net.initialize(v=uniformDistr)

    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_type = SpikeSourcePoisson(rate=p_rate)
    expoisson = Population(NE, source_type, label="expoisson")

    print("%d Creating inhibitory Poisson generator with the same rate." % rank)
    inpoisson = Population(NI, source_type, label="inpoisson")

    # Record spikes
    print("%d Setting up recording in excitatory population." % rank)
    E_net.record('spikes')
    if N_rec_v>0:
        E_net[0:min(NE,N_rec_v)].record('v')

    print("%d Setting up recording in inhibitory population." % rank)
    I_net.record('spikes')
    if N_rec_v>0:
        I_net[0:min(NI,N_rec_v)].record('v')

    progress_bar = ProgressBar(width=20)
    connector = FixedProbabilityConnector(epsilon, rng=rng, callback=progress_bar)
    E_syn = StaticSynapse(weight=JE, delay=delay)
    I_syn = StaticSynapse(weight=JI, delay=delay)
    ext_Connector = OneToOneConnector(callback=progress_bar)
    ext_syn = StaticSynapse(weight=JE, delay=dt)

    print("%d Connecting excitatory population with connection probability %g, weight %g nA and delay %g ms." % (rank, epsilon, JE, delay))
    E_to_E = Projection(E_net, E_net, connector, E_syn, receptor_type="excitatory")
    print("E --> E\t\t", len(E_to_E), "connections")
    I_to_E = Projection(I_net, E_net, connector, I_syn, receptor_type="inhibitory")
    print("I --> E\t\t", len(I_to_E), "connections")
    input_to_E = Projection(expoisson, E_net, ext_Connector, ext_syn, receptor_type="excitatory")
    print("input --> E\t", len(input_to_E), "connections")

    print("%d Connecting inhibitory population with connection probability %g, weight %g nA and delay %g ms." % (rank, epsilon, JI, delay))
    E_to_I = Projection(E_net, I_net, connector, E_syn, receptor_type="excitatory")
    print("E --> I\t\t", len(E_to_I), "connections")
    I_to_I = Projection(I_net, I_net, connector, I_syn, receptor_type="inhibitory")
    print("I --> I\t\t", len(I_to_I), "connections")
    input_to_I = Projection(inpoisson, I_net, ext_Connector, ext_syn, receptor_type="excitatory")
    print("input --> I\t", len(input_to_I), "connections")

    # read out time used for building
    buildCPUTime = timer.elapsedTime()
    # === Run simulation ===========================================================

    # run, measure computer time
    timer.start()  # start timer on construction
    print("%d Running simulation for %g ms (dt=%sms)." % (rank, simtime, dt))
    run(simtime)
    print("Done")
    simCPUTime = timer.elapsedTime()

    # write data to file
    #print("%d Writing data to file." % rank)
    #(E_net + I_net).write_data("Results/brunel_np%d_%s.pkl" % (np, simulator_name))
    if save and not simulator_name=='neuroml':
        for pop in [E_net , I_net]:
            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.gdf"%(simulator_name, pop.label, rank))
            spikes =  pop.get_data('spikes', gather=False)
            for segment in spikes.segments:
                io.write_segment(segment)
                
            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.dat"%(simulator_name, pop.label, rank))
            vs =  pop.get_data('v', gather=False)
            for segment in vs.segments:
                io.write_segment(segment)
            
    spike_data = {}
    spike_data['senders'] = []
    spike_data['times'] = []
    index_offset = 1
    for pop in [E_net , I_net]:
        if rank == 0:
            spikes =  pop.get_data('spikes', gather=False)
            #print(spikes.segments[0].all_data)
            num_rec = len(spikes.segments[0].spiketrains)
            print("Extracting spike info (%i) for %i cells in %s"%(num_rec,pop.size,pop.label))
            #assert(num_rec==len(spikes.segments[0].spiketrains))
            for i in range(num_rec):
                ss = spikes.segments[0].spiketrains[i]
                for s in ss:
                    index = i+index_offset
                    #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index))
                    spike_data['senders'].append(index)
                    spike_data['times'].append(s)
            index_offset+=pop.size
        
    #from IPython.core.debugger import Tracer
    #Tracer()()

    E_rate = E_net.mean_spike_count()*1000.0/simtime
    I_rate = I_net.mean_spike_count()*1000.0/simtime

    # write a short report
    nprint("\n--- Brunel Network Simulation ---")
    nprint("Nodes              : %d" % np)
    nprint("Number of Neurons  : %d" % N)
    nprint("Number of Synapses : %d" % Nsyn)
    nprint("Input firing rate  : %g" % p_rate)
    nprint("Excitatory weight  : %g" % JE)
    nprint("Inhibitory weight  : %g" % JI)
    nprint("Excitatory rate    : %g Hz" % E_rate)
    nprint("Inhibitory rate    : %g Hz" % I_rate)
    nprint("Build time         : %g s" % buildCPUTime)
    nprint("Simulation time    : %g s" % simCPUTime)

    # === Clean up and quit ========================================================

    end()
    
    
    if simulator_name=='neuroml' and jnml_simulator:
        from pyneuroml import pynml
        lems_file = 'LEMS_Sim_PyNN_NeuroML2_Export.xml'
        
        print('Going to run generated LEMS file: %s on simulator: %s'%(lems_file,jnml_simulator))
        
        if jnml_simulator=='jNeuroML':
            results, events = pynml.run_lems_with_jneuroml(lems_file, nogui=True, load_saved_data=True, reload_events=True)
        
        elif jnml_simulator=='jNeuroML_NEURON':
            results, events = pynml.run_lems_with_jneuroml_neuron(lems_file, nogui=True, load_saved_data=True, reload_events=True)
            
        spike_data['senders'] = []
        spike_data['times'] = []
        for k in events.keys():
            values = k.split('/') 
            index = int(values[1]) if values[0]=='E_net' else NE+int(values[1])
            n = len(events[k])
            print("Loading spikes for %s (index %i): [%s, ..., %s (n=%s)] sec"%(k,index,events[k][0] if n>0 else '-',events[k][-1] if n>0 else '-',n))
            for t in events[k]:
                spike_data['senders'].append(index)
                spike_data['times'].append(t*1000)
                
    #print spike_data
    return spike_data
def test(cases=[1]):

    sp = Space(periodic_boundaries=((0, 1), (0, 1), None), axes="xy")
    safe = False
    callback = progress_bar.set_level
    autapse = False
    parallel_safe = True
    render = True
    to_file = True

    for case in cases:
        # w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        # w = 0.1
        # w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2)

        # delay = RandomDistribution('uniform', (0.1,5.))
        # delay = "0.1 + d/0.2"
        delay = 0.1
        # delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances

        d_expression = "exp(-d**2/(2*0.1**2))"
        # d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        # d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"

        timer = Timer()
        np = num_processes()
        timer.start()

        synapse = StaticSynapse(weight=w, delay=delay)
        rng = NumpyRNG(23434, parallel_safe=parallel_safe)

        if case is 1:
            conn = DistanceDependentProbabilityConnector(
                d_expression, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng
            )
            fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np)
        elif case is 2:
            conn = FixedProbabilityConnector(
                0.02, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng
            )
            fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np)
        elif case is 3:
            conn = AllToAllConnector(delays=delay, safe=safe, callback=callback, allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np)
        elif case is 4:
            conn = FixedNumberPostConnector(50, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng)
            fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np)
        elif case is 5:
            conn = FixedNumberPreConnector(50, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng)
            fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np)
        elif case is 6:
            conn = OneToOneConnector(safe=safe, callback=callback)
            fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np)
        elif case is 7:
            conn = FromFileConnector(
                files.NumpyBinaryFile("Results/connections.dat", mode="r"),
                safe=safe,
                callback=callback,
                distributed=True,
            )
            fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np)
        elif case is 8:
            conn = SmallWorldConnector(
                degree=0.1, rewiring=0.0, safe=safe, callback=callback, allow_self_connections=autapse
            )
            fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np)

        print "Generating data for %s" % fig_name

        prj = Projection(x, x, conn, synapse, space=sp)

        mytime = timer.diff()
        print "Time to connect the cell population:", mytime, "s"
        print "Nb synapses built", prj.size()

        if to_file:
            if not (os.path.isdir("Results")):
                os.mkdir("Results")
            print "Saving Connections...."
            prj.save("all", files.NumpyBinaryFile("Results/connections.dat", mode="w"), gather=True)

        mytime = timer.diff()
        print "Time to save the projection:", mytime, "s"

        if render and to_file:
            print "Saving Positions...."
            x.save_positions("Results/positions.dat")
        end()

        if node_id == 0 and render and to_file:
            figure()
            print "Generating and saving %s" % fig_name
            positions = numpy.loadtxt("Results/positions.dat")

            positions[:, 0] -= positions[:, 0].min()
            connections = files.NumpyBinaryFile("Results/connections.dat", mode="r").read()
            print positions.shape, connections.shape
            connections[:, 0] -= connections[:, 0].min()
            connections[:, 1] -= connections[:, 1].min()
            idx_pre = connections[:, 0].astype(int)
            idx_post = connections[:, 1].astype(int)
            d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1)
            subplot(231)
            title("Cells positions")
            plot(positions[:, 1], positions[:, 2], ".")
            subplot(232)
            title("Weights distribution")
            hist(connections[:, 2], 50)
            subplot(233)
            title("Delay distribution")
            hist(connections[:, 3], 50)
            subplot(234)
            numpy.random.seed(74562)
            ids = numpy.random.permutation(positions[:, 0])[0:6]
            colors = ["k", "r", "b", "g", "c", "y"]
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:, 2], ".")

            subplot(236)
            plot(d, connections[:, 3], ".")
            savefig("Results/" + fig_name)
            # os.remove('Results/connections.dat')
            # os.remove('Results/positions.dat')
            show()
Beispiel #5
0
host_name = socket.gethostname()
print "Host #%d is on %s" % (node_id+1, host_name)

print "%s Initialising the simulator with %d thread(s)..." % (node_id, extra['threads'])

cell_params = {
    'tau_m'      : tau_m,    'tau_syn_E'  : tau_exc,  'tau_syn_I'  : tau_inh,
    'v_rest'     : E_leak,   'v_reset'    : v_reset,  'v_thresh'   : v_thresh,
    'cm'         : cm,       'tau_refrac' : t_refrac}

if (benchmark == "COBA"):
    cell_params['e_rev_E'] = Erev_exc
    cell_params['e_rev_I'] = Erev_inh

timer.start()

print "%s Creating cell populations..." % node_id
all_cells = Population(n_exc+n_inh, celltype(**cell_params), label="All Cells")
exc_cells = all_cells[:n_exc]; exc_cells.label = "Excitatory cells"
inh_cells = all_cells[n_exc:]; inh_cells.label = "Inhibitory cells"
if benchmark == "COBA":
    ext_stim = Population(20, SpikeSourcePoisson(rate=rate, duration=stim_dur), label="expoisson")
    rconn = 0.01
    ext_conn = FixedProbabilityConnector(rconn)
    ext_syn = StaticSynapse(weight=0.1)

print "%s Initialising membrane potential to random values..." % node_id
rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)
uniformDistr = RandomDistribution('uniform', [v_reset,v_thresh], rng=rng)
all_cells.initialize(v=uniformDistr)
class NetworkModel(object):
    def __init__(self, params, comm):

        self.params = params
        self.debug_connectivity = True
        self.comm = comm
        if self.comm != None:
            self.pc_id, self.n_proc = self.comm.rank, self.comm.size
            print "USE_MPI: yes", "\tpc_id, n_proc:", self.pc_id, self.n_proc
        else:
            self.pc_id, self.n_proc = 0, 1
            print "MPI not used"

        np.random.seed(params["np_random_seed"] + self.pc_id)

        if self.params["with_short_term_depression"]:
            self.short_term_depression = SynapseDynamics(
                fast=TsodyksMarkramMechanism(U=0.95, tau_rec=10.0, tau_facil=0.0)
            )

    def import_pynn(self):
        """
        This function needs only be called when this class is used in another script as imported module
        """
        import pyNN

        exec ("from pyNN.%s import *" % self.params["simulator"])
        print "import pyNN\npyNN.version: ", pyNN.__version__

    def setup(self, load_tuning_prop=False, times={}):

        self.projections = {}
        self.projections["ee"] = []
        self.projections["ei"] = []
        self.projections["ie"] = []
        self.projections["ii"] = []
        if not load_tuning_prop:
            self.tuning_prop_exc = utils.set_tuning_prop(
                self.params, mode="hexgrid", cell_type="exc"
            )  # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
            self.tuning_prop_inh = utils.set_tuning_prop(
                self.params, mode="hexgrid", cell_type="inh"
            )  # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
        else:
            self.tuning_prop_exc = np.loadtxt(self.params["tuning_prop_means_fn"])
            self.tuning_prop_inh = np.loadtxt(self.params["tuning_prop_inh_fn"])

        indices, distances = utils.sort_gids_by_distance_to_stimulus(
            self.tuning_prop_exc, self.params["motion_params"], self.params
        )  # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params["tuning_prop_means_fn"]
            np.savetxt(self.params["tuning_prop_means_fn"], self.tuning_prop_exc)
            print "Saving tuning_prop to file:", self.params["tuning_prop_inh_fn"]
            np.savetxt(self.params["tuning_prop_inh_fn"], self.tuning_prop_inh)
            print "Saving gids to record to: ", self.params["gids_to_record_fn"]
            np.savetxt(self.params["gids_to_record_fn"], indices[: self.params["n_gids_to_record"]], fmt="%d")

        #        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer

        self.timer = Timer()
        self.timer.start()
        self.times = times
        self.times["t_all"] = 0
        # # # # # # # # # # # #
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params["delay_range"]
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params["seed"])
        rng_v = NumpyRNG(
            seed=sim_cnt * 3147 + self.params["seed"], parallel_safe=True
        )  # if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(
            seed=self.params["seed"], parallel_safe=True
        )  # if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution(
            "normal",
            (self.params["v_init"], self.params["v_init_sigma"]),
            rng=rng_v,
            constrain="redraw",
            boundaries=(-80, -60),
        )

        self.times["t_setup"] = self.timer.diff()
        self.times["t_calc_conns"] = 0
        if self.comm != None:
            self.comm.Barrier()

        self.torus = space.Space(
            axes="xy", periodic_boundaries=((0.0, self.params["torus_width"]), (0.0, self.params["torus_height"]))
        )

    def create_neurons_with_limited_tuning_properties(self):
        n_exc = self.tuning_prop_exc[:, 0].size
        n_inh = 0
        if self.params["neuron_model"] == "IF_cond_exp":
            self.exc_pop = Population(n_exc, IF_cond_exp, self.params["cell_params_exc"], label="exc_cells")
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_exp, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "IF_cond_alpha":
            self.exc_pop = Population(n_exc, IF_cond_alpha, self.params["cell_params_exc"], label="exc_cells")
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_alpha, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "EIF_cond_exp_isfa_ista":
            self.exc_pop = Population(n_exc, EIF_cond_exp_isfa_ista, self.params["cell_params_exc"], label="exc_cells")
            self.inh_pop = Population(
                self.params["n_inh"], EIF_cond_exp_isfa_ista, self.params["cell_params_inh"], label="inh_pop"
            )
        else:
            print "\n\nUnknown neuron model:\n\t", self.params["neuron_model"]

        # set cell positions, required for isotropic connections
        cell_pos_exc = np.zeros((3, self.params["n_exc"]))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params["n_inh"]))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh

        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)

        if not input_created:
            self.spike_times_container = [[] for i in xrange(len(self.local_idx_exc))]
            self.spike_times_container = [[] for i in xrange(len(self.local_idx_exc))]
        print "Debug, pc_id %d has local %d exc indices:" % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc
        self.exc_pop.initialize("v", self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params["n_exc"])
        print "Debug, pc_id %d has local %d inh indices:" % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize("v", self.v_init_dist)
        self.times["t_create"] = self.timer.diff()

    def create(self, input_created=False):
        """
            # # # # # # # # # # # #
            #     C R E A T E     #
            # # # # # # # # # # # #
        """
        if self.params["neuron_model"] == "IF_cond_exp":
            self.exc_pop = Population(
                self.params["n_exc"], IF_cond_exp, self.params["cell_params_exc"], label="exc_cells"
            )
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_exp, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "IF_cond_alpha":
            self.exc_pop = Population(
                self.params["n_exc"], IF_cond_alpha, self.params["cell_params_exc"], label="exc_cells"
            )
            self.inh_pop = Population(
                self.params["n_inh"], IF_cond_alpha, self.params["cell_params_inh"], label="inh_pop"
            )
        elif self.params["neuron_model"] == "EIF_cond_exp_isfa_ista":
            self.exc_pop = Population(
                self.params["n_exc"], EIF_cond_exp_isfa_ista, self.params["cell_params_exc"], label="exc_cells"
            )
            self.inh_pop = Population(
                self.params["n_inh"], EIF_cond_exp_isfa_ista, self.params["cell_params_inh"], label="inh_pop"
            )
        else:
            print "\n\nUnknown neuron model:\n\t", self.params["neuron_model"]
        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)
        print "Debug, pc_id %d has local %d exc indices:" % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc

        cell_pos_exc = np.zeros((3, self.params["n_exc"]))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params["n_inh"]))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh

        if not input_created:
            self.spike_times_container = [[] for i in xrange(len(self.local_idx_exc))]

        self.exc_pop.initialize("v", self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params["n_exc"])
        print "Debug, pc_id %d has local %d inh indices:" % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize("v", self.v_init_dist)

        self.times["t_create"] = self.timer.diff()

    def connect(self):
        if self.params["n_exc"] > 5000:
            save_output = False
        else:
            save_output = True

        self.connect_input_to_exc()
        self.connect_populations("ee")
        self.connect_populations("ei")
        self.connect_populations("ie")
        self.connect_populations("ii")
        self.connect_noise()
        self.times["t_calc_conns"] = self.timer.diff()
        if self.comm != None:
            self.comm.Barrier()

    def create_input(self, load_files=False, save_output=False):

        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params["input_st_fn_base"] + str(tgt) + ".npy"
                    spike_times = np.load(fn)
                except:  # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params["input_spikes_seed"])
            dt = self.params["dt_rate"]  # [ms] time step for the non-homogenous Poisson process
            time = np.arange(0, self.params["t_sim"], dt)
            blank_idx = np.arange(
                1.0 / dt * self.params["t_before_blank"],
                1.0 / dt * (self.params["t_before_blank"] + self.params["t_blank"]),
            )
            before_stim_idx = np.arange(0, self.params["t_start"] * 1.0 / dt)
            blank_idx = np.concatenate((blank_idx, before_stim_idx))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))

            # get the input signal
            print "Calculating input signal"
            for i_time, time_ in enumerate(time):
                L_input[:, i_time] = utils.get_input(
                    self.tuning_prop_exc[my_units, :], self.params, time_ / self.params["t_stimulus"]
                )
                L_input[:, i_time] *= self.params["f_max_stim"]
                if i_time % 500 == 0:
                    print "t:", time_
            #                    print 'L_input[:, %d].max()', L_input[:, i_time].max()
            # blanking
            for i_time in blank_idx:
                #                L_input[:, i_time] = 0.
                L_input[:, i_time] = np.random.permutation(L_input[:, i_time])

            # create the spike trains
            print "Creating input spiketrains for unit"
            for i_, unit in enumerate(my_units):
                print unit,
                rate_of_t = np.array(L_input[i_, :])
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if r <= ((rate_of_t[i] / 1000.0) * dt):  # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt)
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params["input_rate_fn_base"] + str(unit) + ".npy"
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params["input_st_fn_base"] + str(unit) + ".npy"
                    np.save(output_fn, np.array(spike_times))

        self.times["create_input"] = self.timer.diff()
        return self.spike_times_container

    def connect_input_to_exc(self):
        """
            # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    I N P U T - E X C  #
            # # # # # # # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting input spiketrains..."

        #        self.stimulus = Population(len(self.local_idx_exc), SpikeSourceArray)
        #            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
        #                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
        #            self.projections[conn_type].append(prj)

        #        self.projections['stim'] = []
        #        self.stimuli = []
        #        self.pop_views = []
        #        conn = OneToOneConnector(weights=self.params['w_input_exc'])
        for i_, unit in enumerate(self.local_idx_exc):
            spike_times = self.spike_times_container[i_]
            #            ssa = create(SpikeSourceArray, {'spike_times': spike_times})
            ssa = Population(1, SpikeSourceArray, {"spike_times": spike_times})
            #            ssa.set({'spike_times' : spike_times})
            #            self.stimuli.append(ssa)

            #            if self.params['with_short_term_depression']:

            #                connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory', synapse_dynamics=self.short_term_depression)
            #                selector = np.zeros(self.params['n_exc'], dtype=np.bool)
            #                selector[unit] = True
            #                print 'debug unit', unit, type(unit)
            #                w[i_] = 1.#self.params['w_input_exc']
            #                tgt = PopulationView(self.exc_pop, np.array([unit]))
            #                self.pop_views.append(tgt)
            #                prj = Projection(ssa, tgt, conn, target='excitatory', synapse_dynamics=self.short_term_depression)
            #                prj = Projection(self.stimuli[-1], self.pop_views[-1], conn, target='excitatory', synapse_dynamics=self.short_term_depression)
            #                self.projections['stim'].append(prj)
            #            else:
            connect(ssa, self.exc_pop[unit], self.params["w_input_exc"], synapse_type="excitatory")
        self.times["connect_input"] = self.timer.diff()

    def resolve_src_tgt(self, conn_type):
        """
        Deliver the correct source and target parameters based on conn_type
        """

        if conn_type == "ee":
            n_src, n_tgt = self.params["n_exc"], self.params["n_exc"]
            src_pop, tgt_pop = self.exc_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_exc
            syn_type = "excitatory"

        elif conn_type == "ei":
            n_src, n_tgt = self.params["n_exc"], self.params["n_inh"]
            src_pop, tgt_pop = self.exc_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_inh
            syn_type = "excitatory"

        elif conn_type == "ie":
            n_src, n_tgt = self.params["n_inh"], self.params["n_exc"]
            src_pop, tgt_pop = self.inh_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_exc
            syn_type = "inhibitory"

        elif conn_type == "ii":
            n_src, n_tgt = self.params["n_inh"], self.params["n_inh"]
            src_pop, tgt_pop = self.inh_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_inh
            syn_type = "inhibitory"

        return (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type)

    def connect_anisotropic(self, conn_type):
        """
        conn_type = ['ee', 'ei', 'ie', 'ii']
        """
        if self.pc_id == 0:
            print "Connect anisotropic %s - %s" % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)

        if self.debug_connectivity:
            conn_list_fn = self.params["conn_list_%s_fn_base" % conn_type] + "%d.dat" % (self.pc_id)

        n_src_cells_per_neuron = int(round(self.params["p_%s" % conn_type] * n_src))
        (delay_min, delay_max) = self.params["delay_range"]
        local_connlist = np.zeros((n_src_cells_per_neuron * len(tgt_cells), 4))
        for i_, tgt in enumerate(tgt_cells):
            if self.params["direction_based_conn"]:
                p, latency = CC.get_p_conn_vec_xpred(
                    tp_src,
                    tp_tgt[tgt, :],
                    self.params["w_sigma_x"],
                    self.params["w_sigma_v"],
                    self.params["connectivity_radius"],
                )
            else:
                p, latency = CC.get_p_conn_vec(
                    tp_src,
                    tp_tgt[tgt, :],
                    self.params["w_sigma_x"],
                    self.params["w_sigma_v"],
                    self.params["connectivity_radius"],
                    self.params["maximal_latency"],
                )
            if conn_type[0] == conn_type[1]:
                p[tgt], latency[tgt] = 0.0, 0.0
            # random delays? --> np.permutate(latency) or latency[sources] * self.params['delay_scale'] * np.rand

            sorted_indices = np.argsort(p)
            if conn_type[0] == "e":
                sources = sorted_indices[-n_src_cells_per_neuron:]
            else:  # source = inhibitory
                if conn_type[0] == conn_type[1]:
                    sources = sorted_indices[
                        1 : n_src_cells_per_neuron + 1
                    ]  # shift indices to avoid self-connection, because p_ii = .0
                else:
                    sources = sorted_indices[:n_src_cells_per_neuron]

            #            eta = 1e-9
            eta = 0
            w = (self.params["w_tgt_in_per_cell_%s" % conn_type] / (p[sources].sum() + eta)) * p[sources]
            #            print 'debug p', i_, tgt, p[sources]
            #            print 'debug sources', i_, tgt, sources
            #            print 'debug w', i_, tgt, w

            delays = np.minimum(
                np.maximum(latency[sources] * self.params["delay_scale"], delay_min), delay_max
            )  # map the delay into the valid range
            conn_list = np.array((sources, tgt * np.ones(n_src_cells_per_neuron), w, delays))
            local_connlist[i_ * n_src_cells_per_neuron : (i_ + 1) * n_src_cells_per_neuron, :] = conn_list.transpose()
            connector = FromListConnector(conn_list.transpose())
            if self.params["with_short_term_depression"]:
                prj = Projection(
                    src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression
                )
            else:
                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
            self.projections[conn_type].append(prj)

        if self.debug_connectivity:
            if self.pc_id == 0:
                print "DEBUG writing to file:", conn_list_fn
            np.savetxt(conn_list_fn, local_connlist, fmt="%d\t%d\t%.4e\t%.4e")

    def connect_ee_random(self):
        """
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    E X C - E X C    R A N D O M   #
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
        """

        if self.pc_id == 0:
            print "Drawing random connections"
        sigma_x, sigma_v = self.params["w_sigma_x"], self.params["w_sigma_v"]
        (delay_min, delay_max) = self.params["delay_range"]
        if self.debug_connectivity:
            conn_list_fn = self.params["conn_list_ee_fn_base"] + "%d.dat" % (self.pc_id)
            conn_file = open(conn_list_fn, "w")
            output = ""
        for tgt in self.local_idx_exc:
            p = np.zeros(self.params["n_exc"], dtype="float32")
            latency = np.zeros(self.params["n_exc"], dtype="float32")
            for src in xrange(self.params["n_exc"]):
                if src != tgt:
                    p[src], latency[src] = CC.get_p_conn(
                        self.tuning_prop_exc[src, :],
                        self.tuning_prop_exc[tgt, :],
                        sigma_x,
                        sigma_v,
                        params["connectivity_radius"],
                    )  #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']
            sources = random.sample(xrange(self.params["n_exc"]), int(self.params["n_src_cells_per_neuron"]))
            idx = p[sources] > 0
            non_zero_idx = np.nonzero(idx)[0]
            p_ = p[sources][non_zero_idx]
            l_ = latency[sources][non_zero_idx] * self.params["delay_scale"]

            w = utils.linear_transformation(p_, self.params["w_min"], self.params["w_max"])
            for i in xrange(len(p_)):
                #                        w[i] = max(self.params['w_min'], min(w[i], self.params['w_max']))
                delay = min(max(l_[i], delay_min), delay_max)  # map the delay into the valid range
                connect(self.exc_pop[non_zero_idx[i]], self.exc_pop[tgt], w[i], delay=delay, synapse_type="excitatory")
                if self.debug_connectivity:
                    output += "%d\t%d\t%.2e\t%.2e\n" % (
                        non_zero_idx[i],
                        tgt,
                        w[i],
                        delay,
                    )  #                    output += '%d\t%d\t%.2e\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], latency[sources[i]], p[sources[i]])

        if self.debug_connectivity:
            if self.pc_id == 0:
                print "DEBUG writing to file:", conn_list_fn
            conn_file.write(output)
            conn_file.close()

    def connect_isotropic(self, conn_type="ee"):
        """
        conn_type must be 'ee', 'ei', 'ie' or 'ii'
        Connect cells in a distant dependent manner:
            p_ij = exp(- d_ij / (2 * w_sigma_x**2))

        This will give a 'convergence constrained' connectivity, i.e. each cell will have the same sum of incoming weights
        ---> could be problematic for outlier cells
        """
        if self.pc_id == 0:
            print "Connect isotropic %s - %s" % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == "ee":
            w_ = self.params["w_max"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        elif conn_type == "ei":
            w_ = self.params["w_ei_mean"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == "ie":
            w_ = self.params["w_ie_mean"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == "ii":
            w_ = self.params["w_ii_mean"]
            w_tgt_in = params["w_tgt_in_per_cell_%s" % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        if self.debug_connectivity:
            conn_list_fn = self.params["conn_list_%s_fn_base" % conn_type] + "%d.dat" % (self.pc_id)
        #            conn_file = open(conn_list_fn, 'w')
        #            output = ''
        #            output_dist = ''

        w_mean = w_tgt_in / (self.params["p_%s" % conn_type] * n_max_conn / n_tgt)
        w_sigma = self.params["w_sigma_distribution"] * w_mean

        w_dist = RandomDistribution(
            "normal", (w_mean, w_sigma), rng=self.rng_conn, constrain="redraw", boundaries=(0, w_mean * 10.0)
        )
        delay_dist = RandomDistribution(
            "normal",
            (self.params["standard_delay"], self.params["standard_delay_sigma"]),
            rng=self.rng_conn,
            constrain="redraw",
            boundaries=(self.params["delay_range"][0], self.params["delay_range"][1]),
        )

        p_max = utils.get_pmax(self.params["p_%s" % conn_type], self.params["w_sigma_isotropic"], conn_type)
        connector = DistanceDependentProbabilityConnector(
            "%f * exp(-d/(2*%f**2))" % (p_max, params["w_sigma_isotropic"]),
            allow_self_connections=False,
            weights=w_dist,
            delays=delay_dist,
            space=self.torus,
        )  # , n_connections=n_conn_ee)
        print "p_max for %s" % conn_type, p_max
        if self.params["with_short_term_depression"]:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)  # , synapse_dynamics=self.STD)
        self.projections[conn_type].append(prj)
        if self.debug_connectivity:
            #                if self.pc_id == 0:
            #                    print 'DEBUG writing to file:', conn_list_fn
            prj.saveConnections(self.params["conn_list_%s_fn_base" % conn_type] + ".dat", gather=True)

    #            prj.saveConnections(self.params['conn_list_%s_fn_base' % conn_type] + 'gid%d.dat' % tgt, gather=False)
    #                conn_file.close()

    #            w = np.zeros(n_src, dtype='float32')
    #            delays = np.zeros(n_src, dtype='float32')
    #            for src in xrange(n_src):
    #                if conn_type[0] == conn_type[1]:
    #                    if (src != tgt): # no self-connections / autapses
    #                        d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
    #                        p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
    #                        if np.random.rand() <= p_ij:
    #                            w[src] = w_
    #                            delays[src] = d_ij * params['delay_scale']
    #                else:
    #                    d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
    #                    p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
    #                    if np.random.rand() <= p_ij:
    #                        w[src] = w_
    #                        delays[src] = d_ij * params['delay_scale']
    #            w *= w_tgt_in / w.sum()
    #            srcs = w.nonzero()[0]
    #            weights = w[srcs]
    #            for src in srcs:
    #                if w[src] > self.params['w_thresh_connection']:
    #                delay = min(max(delays[src], self.params['delay_range'][0]), self.params['delay_range'][1])  # map the delay into the valid range
    #                connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=delay, synapse_type=syn_type)
    #                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], delay)

    #        if self.debug_connectivity:
    #            if self.pc_id == 0:
    #                print 'DEBUG writing to file:', conn_list_fn
    #            conn_file.write(output)
    #            conn_file.close()

    def connect_random(self, conn_type):
        """
        There exist different possibilities to draw random connections:
        1) Calculate the weights as for the anisotropic case and sample sources randomly
        2) Load a file which stores some random connectivity --> # connector = FromFileConnector(self.params['conn_list_.... ']
        3) Create a random distribution with similar parameters as the non-random connectivition distribution

        connector_ee = FastFixedProbabilityConnector(self.params['p_ee'], weights=w_ee_dist, delays=self.delay_dist)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')

        conn_list_fn = self.params['random_weight_list_fn'] + str(sim_cnt) + '.dat'
        print "Connecting exc - exc from file", conn_list_fn
        connector_ee = FromFileConnector(conn_list_fn)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')
        """
        if self.pc_id == 0:
            print "Connect random connections %s - %s" % (conn_type[0].capitalize(), conn_type[1].capitalize())
        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        w_mean = self.params["w_tgt_in_per_cell_%s" % conn_type] / (n_src * self.params["p_%s" % conn_type])
        w_sigma = self.params["w_sigma_distribution"] * w_sigma

        weight_distr = RandomDistribution(
            "normal", (w_mean, w_sigma), rng=self.rng_conn, constrain="redraw", boundaries=(0, w_mean * 10.0)
        )

        delay_dist = RandomDistribution(
            "normal",
            (self.params["standard_delay"], self.params["standard_delay_sigma"]),
            rng=self.rng_conn,
            constrain="redraw",
            boundaries=(self.params["delay_range"][0], self.params["delay_range"][1]),
        )

        connector = FastFixedProbabilityConnector(
            self.params["p_%s" % conn_type], weights=weight_distr, delays=delay_dist
        )
        if self.params["with_short_term_depression"]:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)

        conn_list_fn = self.params["conn_list_%s_fn_base" % conn_type] + "%d.dat" % (self.pc_id)
        print "Saving random %s connections to %s" % (conn_type, conn_list_fn)
        prj.saveConnections(conn_list_fn, gather=False)

    def connect_populations(self, conn_type):
        """
            # # # # # # # # # # # #
            #     C O N N E C T   #
            # # # # # # # # # # # #
            Calls the right according to the flag set in simultation_parameters.py
        """
        if self.params["connectivity_%s" % conn_type] == "anisotropic":
            self.connect_anisotropic(conn_type)
        elif self.params["connectivity_%s" % conn_type] == "isotropic":
            self.connect_isotropic(conn_type)
        elif self.params["connectivity_%s" % conn_type] == "random":
            self.connect_random(conn_type)
        else:  # populations do not get connected
            pass

    def connect_noise(self):
        """
            # # # # # # # # # # # # # # # #
            #     N O I S E   I N P U T   #
            # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting noise - exc ... "
        noise_pop_exc = []
        noise_pop_inh = []
        for tgt in self.local_idx_exc:
            # new
            if self.params["simulator"] == "nest":  # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type("poisson_generator"), {"rate": self.params["f_exc_noise"]})
                noise_inh = create(native_cell_type("poisson_generator"), {"rate": self.params["f_inh_noise"]})
            else:
                noise_exc = create(SpikeSourcePoisson, {"rate": self.params["f_exc_noise"]})
                noise_inh = create(SpikeSourcePoisson, {"rate": self.params["f_inh_noise"]})
            connect(
                noise_exc, self.exc_pop[tgt], weight=self.params["w_exc_noise"], synapse_type="excitatory", delay=1.0
            )
            connect(
                noise_inh, self.exc_pop[tgt], weight=self.params["w_inh_noise"], synapse_type="inhibitory", delay=1.0
            )

        if self.pc_id == 0:
            print "Connecting noise - inh ... "
        for tgt in self.local_idx_inh:
            if self.params["simulator"] == "nest":  # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type("poisson_generator"), {"rate": self.params["f_exc_noise"]})
                noise_inh = create(native_cell_type("poisson_generator"), {"rate": self.params["f_inh_noise"]})
            else:
                noise_exc = create(SpikeSourcePoisson, {"rate": self.params["f_exc_noise"]})
                noise_inh = create(SpikeSourcePoisson, {"rate": self.params["f_inh_noise"]})
            connect(
                noise_exc, self.inh_pop[tgt], weight=self.params["w_exc_noise"], synapse_type="excitatory", delay=1.0
            )
            connect(
                noise_inh, self.inh_pop[tgt], weight=self.params["w_inh_noise"], synapse_type="inhibitory", delay=1.0
            )
        self.times["connect_noise"] = self.timer.diff()

    def run_sim(self, sim_cnt, record_v=True):
        # # # # # # # # # # # # # # # # # # # #
        #     P R I N T    W E I G H T S      #
        # # # # # # # # # # # # # # # # # # # #
        #    print 'Printing weights to :\n  %s\n  %s\n  %s' % (self.params['conn_list_ei_fn'], self.params['conn_list_ie_fn'], self.params['conn_list_ii_fn'])
        #    exc_inh_prj.saveConnections(self.params['conn_list_ei_fn'])
        #    inh_exc_prj.saveConnections(self.params['conn_list_ie_fn'])
        #    inh_inh_prj.saveConnections(self.params['conn_list_ii_fn'])
        #    self.times['t_save_conns'] = self.timer.diff()

        # # # # # # # # # # # #
        #     R E C O R D     #
        # # # # # # # # # # # #
        #    print "Recording spikes to file: %s" % (self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)
        #    for cell in xrange(self.params['n_exc']):
        #        record(self.exc_pop[cell], self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)
        record_exc = True
        if os.path.exists(self.params["gids_to_record_fn"]):
            gids_to_record = np.loadtxt(self.params["gids_to_record_fn"], dtype="int")[
                : self.params["n_gids_to_record"]
            ]
            record_exc = True
            n_rnd_cells_to_record = 2
        else:
            n_cells_to_record = 5  # self.params['n_exc'] * 0.02
            gids_to_record = np.random.randint(0, self.params["n_exc"], n_cells_to_record)

        if record_v:
            self.exc_pop_view = PopulationView(self.exc_pop, gids_to_record, label="good_exc_neurons")
            self.exc_pop_view.record_v()
            self.inh_pop_view = PopulationView(
                self.inh_pop,
                np.random.randint(0, self.params["n_inh"], self.params["n_gids_to_record"]),
                label="random_inh_neurons",
            )
            self.inh_pop_view.record_v()

        self.inh_pop.record()
        self.exc_pop.record()
        self.times["t_record"] = self.timer.diff()

        # # # # # # # # # # # # # #
        #     R U N N N I N G     #
        # # # # # # # # # # # # # #
        if self.pc_id == 0:
            print "Running simulation ... "
        run(self.params["t_sim"])
        self.times["t_sim"] = self.timer.diff()

    def print_results(self, print_v=True):
        """
            # # # # # # # # # # # # # # # # #
            #   P R I N T    R E S U L T S  #
            # # # # # # # # # # # # # # # # #
        """
        if print_v:
            if self.pc_id == 0:
                print "print_v to file: %s.v" % (self.params["exc_volt_fn_base"])
            self.exc_pop_view.print_v("%s.v" % (self.params["exc_volt_fn_base"]), compatible_output=False)
            if self.pc_id == 0:
                print "Printing inhibitory membrane potentials"
            self.inh_pop_view.print_v("%s.v" % (self.params["inh_volt_fn_base"]), compatible_output=False)

        if self.pc_id == 0:
            print "Printing excitatory spikes"
        self.exc_pop.printSpikes(self.params["exc_spiketimes_fn_merged"] + ".ras")
        if self.pc_id == 0:
            print "Printing inhibitory spikes"
        self.inh_pop.printSpikes(self.params["inh_spiketimes_fn_merged"] + ".ras")

        self.times["t_print"] = self.timer.diff()
        if self.pc_id == 0:
            print "calling pyNN.end() ...."
        end()
        self.times["t_end"] = self.timer.diff()

        if self.pc_id == 0:
            self.times["t_all"] = 0.0
            for k in self.times.keys():
                self.times["t_all"] += self.times[k]

            self.n_cells = {}
            self.n_cells["n_exc"] = self.params["n_exc"]
            self.n_cells["n_inh"] = self.params["n_inh"]
            self.n_cells["n_cells"] = self.params["n_cells"]
            self.n_cells["n_proc"] = self.n_proc
            output = {"times": self.times, "n_cells_proc": self.n_cells}
            print "Proc %d Simulation time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (
                self.pc_id,
                self.times["t_sim"],
                (self.times["t_sim"]) / 60.0,
                self.params["n_cells"],
                self.params["n_exc"],
                self.params["n_inh"],
            )
            print "Proc %d Full pyNN run time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (
                self.pc_id,
                self.times["t_all"],
                (self.times["t_all"]) / 60.0,
                self.params["n_cells"],
                self.params["n_exc"],
                self.params["n_inh"],
            )
            fn = utils.convert_to_url(params["folder_name"] + "times_dict_np%d.py" % self.n_proc)
            output = ntp.ParameterSet(output)
            output.save(fn)
Beispiel #7
0
    def run(self, params, verbose=True):
        tmpdir = tempfile.mkdtemp()
        timer = Timer()
        timer.start()  # start timer on construction

        # === Build the network ========================================================
        if verbose: print "Setting up simulation"
        sim.setup(timestep=params.simulation.dt,
                  max_delay=params.simulation.syn_delay,
                  debug=False)

        N = params.N
        #dc_generator
        current_source = sim.DCSource(amplitude=params.snr,
                                      start=params.simulation.simtime / 4,
                                      stop=params.simulation.simtime / 4 * 3)

        # internal noise model (NEST specific)
        noise = sim.Population(N, 'noise_generator', {
            'mean': 0.,
            'std': params.noise_std
        })
        # target population
        output = sim.Population(N, sim.IF_cond_exp)

        # initialize membrane potential
        numpy.random.seed(params.simulation.kernelseed)
        V_rest, V_spike = -70., -53.
        output.tset('v_init',
                    V_rest + numpy.random.rand(N, ) * (V_spike - V_rest))

        #  Connecting the network
        conn = sim.OneToOneConnector(weights=params.weight)
        sim.Projection(noise, output, conn)

        for cell in output:
            cell.inject(current_source)

        output.record()

        # reads out time used for building
        buildCPUTime = timer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose: print "Running simulation"

        timer.reset()  # start timer on construction
        sim.run(params.simulation.simtime)
        simCPUTime = timer.elapsedTime()

        timer.reset()  # start timer on construction

        output_filename = os.path.join(tmpdir, 'output.gdf')
        #print output_filename
        output.printSpikes(output_filename)  #
        output_DATA = load_spikelist(output_filename,
                                     N,
                                     t_start=0.0,
                                     t_stop=params.simulation.simtime)
        writeCPUTime = timer.elapsedTime()

        if verbose:
            print "\nFiber Network Simulation:"
            print "Number of Neurons  : ", N
            print "Mean Output rate    : ", output_DATA.mean_rate(
            ), "Hz during ", params.simulation.simtime, "ms"
            print("Build time             : %g s" % buildCPUTime)
            print("Simulation time        : %g s" % simCPUTime)
            print("Writing time           : %g s" % writeCPUTime)

        os.remove(output_filename)
        os.rmdir(tmpdir)

        return output_DATA
Beispiel #8
0
def run_model(sim, **options):
    """
    Run a simulation using the parameters read from the file "spike_train_statistics.json"

    :param sim: the PyNN backend module to be used.
    :param options: should contain a keyword "simulator" which is the name of the PyNN backend module used.
    :return: a tuple (`data`, `times`) where `data` is a Neo Block containing the recorded spikes
             and `times` is a dict containing the time taken for different phases of the simulation.
    """

    import json
    from pyNN.utility import Timer

    print("Running")

    timer = Timer()

    g = open("spike_train_statistics.json", 'r')
    d = json.load(g)

    N = d['param']['N']
    max_rate = d['param']['max_rate']
    tstop = d['param']['tstop']
    d['SpikeSourcePoisson'] = {
        "duration": tstop
    }

    if options['simulator'] == "hardware.brainscales":
        hardware_preset = d['setup'].pop('hardware_preset', None)
        if hardware_preset:
            d['setup']['hardware'] = sim.hardwareSetup[hardware_preset]
        d['SpikeSourcePoisson']['random'] = True
        place = mapper.place()

    timer.start()
    sim.setup(**d['setup'])

    spike_sources = sim.Population(N, sim.SpikeSourcePoisson, d['SpikeSourcePoisson'])
    delta_rate = max_rate/N
    rates = numpy.linspace(delta_rate, max_rate, N)
    print("Firing rates: %s" % rates)
    if PYNN07:
        spike_sources.tset("rate", rates)
    else:
        spike_sources.set(rate=rates)

    if options['simulator'] == "hardware.brainscales":
        for i, spike_source in enumerate(spike_sources):
            place.to(spike_source, hicann=i//8, neuron=i%64)
        place.commit()

    if PYNN07:
        spike_sources.record()
    else:
        spike_sources.record('spikes')

    setup_time = timer.diff()
    sim.run(tstop)
    run_time = timer.diff()

    if PYNN07:
        spike_array = spike_sources.getSpikes()
        data = spike_array_to_neo(spike_array, spike_sources, tstop)
    else:
        data = spike_sources.get_data()

    sim.end()

    closing_time = timer.diff()
    times = {'setup_time': setup_time, 'run_time': run_time, 'closing_time': closing_time}

    return data, times
class NetworkModel(object):

    def __init__(self, params, comm):

        self.params = params
        self.debug_connectivity = True
        self.comm = comm
        if self.comm != None:
            self.pc_id, self.n_proc = self.comm.rank, self.comm.size
            print "USE_MPI: yes", '\tpc_id, n_proc:', self.pc_id, self.n_proc
        else:
            self.pc_id, self.n_proc = 0, 1
            print "MPI not used"

        np.random.seed(params['np_random_seed'] + self.pc_id)

        if self.params['with_short_term_depression']:
            self.short_term_depression = SynapseDynamics(fast=TsodyksMarkramMechanism(U=0.95, tau_rec=10.0, tau_facil=0.0))

    def import_pynn(self):
        """
        This function needs only be called when this class is used in another script as imported module
        """
        import pyNN

        exec("from pyNN.%s import *" % self.params['simulator'])
        print 'import pyNN\npyNN.version: ', pyNN.__version__



    def setup(self, load_tuning_prop=False, times={}):

        self.projections = {}
        self.projections['ee'] = []
        self.projections['ei'] = []
        self.projections['ie'] = []
        self.projections['ii'] = []
        if not load_tuning_prop:
            self.tuning_prop_exc = utils.set_tuning_prop(self.params, mode='hexgrid', cell_type='exc')        # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
            self.tuning_prop_inh = utils.set_tuning_prop(self.params, mode='hexgrid', cell_type='inh')        # set the tuning properties of exc cells: space (x, y) and velocity (u, v)
        else:
            self.tuning_prop_exc = np.loadtxt(self.params['tuning_prop_means_fn'])
            self.tuning_prop_inh = np.loadtxt(self.params['tuning_prop_inh_fn'])

        indices, distances = utils.sort_gids_by_distance_to_stimulus(self.tuning_prop_exc, self.params) # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params['tuning_prop_means_fn']
            np.savetxt(self.params['tuning_prop_means_fn'], self.tuning_prop_exc)
            print "Saving tuning_prop to file:", self.params['tuning_prop_inh_fn']
            np.savetxt(self.params['tuning_prop_inh_fn'], self.tuning_prop_inh)
            print 'Saving gids to record to: ', self.params['gids_to_record_fn']
            np.savetxt(self.params['gids_to_record_fn'], indices[:self.params['n_gids_to_record']], fmt='%d')

#        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer
        self.timer = Timer()
        self.timer.start()
        self.times = times
        self.times['t_all'] = 0
        # # # # # # # # # # # #
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params['delay_range']
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params['seed'])
        rng_v = NumpyRNG(seed = sim_cnt*3147 + self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(seed = self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution('normal',
                (self.params['v_init'], self.params['v_init_sigma']),
                rng=rng_v,
                constrain='redraw',
                boundaries=(-80, -60))

        self.times['t_setup'] = self.timer.diff()
        self.times['t_calc_conns'] = 0
        if self.comm != None:
            self.comm.Barrier()

        self.torus = space.Space(axes='xy', periodic_boundaries=((0., self.params['torus_width']), (0., self.params['torus_height'])))

    def create_neurons_with_limited_tuning_properties(self):
        n_exc = self.tuning_prop_exc[:, 0].size
        n_inh = 0
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_exp, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'IF_cond_alpha':
            self.exc_pop = Population(n_exc, IF_cond_alpha, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_alpha, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
            self.exc_pop = Population(n_exc, EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], EIF_cond_exp_isfa_ista, self.params['cell_params_inh'], label="inh_pop")
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']

        # set cell positions, required for isotropic connections
        cell_pos_exc = np.zeros((3, self.params['n_exc']))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params['n_inh']))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh

        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)

        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]
        print 'Debug, pc_id %d has local %d exc indices:' % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc
        self.exc_pop.initialize('v', self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
        print 'Debug, pc_id %d has local %d inh indices:' % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize('v', self.v_init_dist)
        self.times['t_create'] = self.timer.diff()


    def create(self, input_created=False):
        """
            # # # # # # # # # # # #
            #     C R E A T E     #
            # # # # # # # # # # # #
        """
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(self.params['n_exc'], IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_exp, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'IF_cond_alpha':
            self.exc_pop = Population(self.params['n_exc'], IF_cond_alpha, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_alpha, self.params['cell_params_inh'], label="inh_pop")
        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
            self.exc_pop = Population(self.params['n_exc'], EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], EIF_cond_exp_isfa_ista, self.params['cell_params_inh'], label="inh_pop")
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']
        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)
        print 'Debug, pc_id %d has local %d exc indices:' % (self.pc_id, len(self.local_idx_exc)), self.local_idx_exc

        cell_pos_exc = np.zeros((3, self.params['n_exc']))
        cell_pos_exc[0, :] = self.tuning_prop_exc[:, 0]
        cell_pos_exc[1, :] = self.tuning_prop_exc[:, 1]
        self.exc_pop.positions = cell_pos_exc

        cell_pos_inh = np.zeros((3, self.params['n_inh']))
        cell_pos_inh[0, :] = self.tuning_prop_inh[:, 0]
        cell_pos_inh[1, :] = self.tuning_prop_inh[:, 1]
        self.inh_pop.positions = cell_pos_inh


        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]

        self.exc_pop.initialize('v', self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
        print 'Debug, pc_id %d has local %d inh indices:' % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
        self.inh_pop.initialize('v', self.v_init_dist)

        self.times['t_create'] = self.timer.diff()


    def connect(self):
        if self.params['n_exc'] > 5000:
            save_output = False
        else:
            save_output = True

        self.connect_input_to_exc()
        self.connect_populations('ee')
        self.connect_populations('ei')
        self.connect_populations('ie')
        self.connect_populations('ii')
        self.connect_noise()
        self.times['t_calc_conns'] = self.timer.diff()
        if self.comm != None:
            self.comm.Barrier()

    def get_motion_params_from_protocol(self, time):
        """

        """

        predictor_interval = int(time / self.params['predictor_interval_duration'])
        # based on the motion_protocol calculate the stimulus position and direction etc --> predictor_params
        if self.params['motion_protocol'] == 'congruent':
            x0, y0, u0, v0, theta = self.params['motion_params'][0], self.params['motion_params'][1],  self.params['motion_params'][2],  self.params['motion_params'][3], self.params['motion_params'][4]
            x, y = (x0 + u0 * time) % self.params['torus_width'], (y0 + v0 * time) % self.params['torus_height'] # current position of the blob at time t assuming a perfect translation
            predictor_params = (x, y, u0, v0, theta)

        elif self.params['motion_protocol'] == 'incongruent':
        # incongruent protocol means having oriented bar as stimulus that its orientation is flipped inside the CRF        
            predictor_params = self.params['motion_params']
#            if (t_check < time < t_stop_check):
#                orientation = sp.params['motion_params'][:,4] + np.pi/2.0

        # Missing CRF protocol includes a moving oriented bar which approches to CRF and disappears inside CRF     
        # --> we give noise as input
        # --> we shuffle the stimulus among all cells to get an incoherent input (the output of the CRF will be very small)
        elif protocol == 'Missing CRF':
            predictor_params = self.params['motion_params']
#            if (t_check < t < t_stop_check):
#                L = np.random.permutation(stimulus)

        # CRF only protocol includes an oriented bar which moves for a short period only inside CRF        
        elif protocol == 'CRF only':
            predictor_params = self.params['motion_params']
#            if (t_check < t < t_stop_check):
#                L = stimulus
#            else:
#                L = np.random.permutation(stimulus)
#                 L = 0

        elif self.params['motion_protocol'] == 'random predictor':
            predictor_params = self.params['motion_params']
            # we create a random sequence of orientations and segment the trajectory
#            orientation = np.random.rand(self.params['n_random_predictor_orientations']) * np.pi

        return predictor_params


    def create_input(self, load_files=False, save_output=False):


        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params['input_st_fn_base'] + str(tgt) + '.npy'
                    spike_times = np.load(fn)
                except: # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params['input_spikes_seed'])
            dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process
            time = np.arange(0, self.params['t_sim'], dt)
            blank_idx = np.arange(1./dt * self.params['t_before_blank'], 1. / dt * (self.params['t_before_blank'] + self.params['t_blank']))
            before_stim_idx = np.arange(0, self.params['t_start'] * 1./dt)
            blank_idx = np.concatenate((blank_idx, before_stim_idx))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))

            # get the input signal
            print 'Calculating input signal'
            for i_time, time_ in enumerate(time):
                predictor_params = self.get_motion_params_from_protocol(time_ / self.params['t_stimulus'])
                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, predictor_params, motion = self.params['motion_type'])
                L_input[:, i_time] *= self.params['f_max_stim']
                if (i_time % 500 == 0):
                    print "t:", time_

            # blanking
            for i_time in blank_idx:
                L_input[:, i_time] = np.random.permutation(L_input[:, i_time])
#                L_input[:, i_time] = 0.

            # create the spike trains
            print 'Creating input spiketrains for unit'
            for i_, unit in enumerate(my_units):
                rate_of_t = np.array(L_input[i_, :])
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if (r <= ((rate_of_t[i]/1000.) * dt)): # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt)
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params['input_rate_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params['input_st_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, np.array(spike_times))

        self.times['create_input'] = self.timer.diff()
        return self.spike_times_container



    def connect_input_to_exc(self):
        """
            # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    I N P U T - E X C  #
            # # # # # # # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting input spiketrains..."
        
#        self.stimulus = Population(len(self.local_idx_exc), SpikeSourceArray)
#            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
#                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
#            self.projections[conn_type].append(prj)

#        self.projections['stim'] = []
#        self.stimuli = []
#        self.pop_views = [] 
#        conn = OneToOneConnector(weights=self.params['w_input_exc'])
        for i_, unit in enumerate(self.local_idx_exc):
            spike_times = self.spike_times_container[i_]
#            ssa = create(SpikeSourceArray, {'spike_times': spike_times})
            ssa = Population(1, SpikeSourceArray, {'spike_times': spike_times})
#            ssa.set({'spike_times' : spike_times})
#            self.stimuli.append(ssa)

#            if self.params['with_short_term_depression']:

#                connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory', synapse_dynamics=self.short_term_depression)
#                selector = np.zeros(self.params['n_exc'], dtype=np.bool)
#                selector[unit] = True
#                print 'debug unit', unit, type(unit)
#                w[i_] = 1.#self.params['w_input_exc']
#                tgt = PopulationView(self.exc_pop, np.array([unit]))
#                self.pop_views.append(tgt)
#                prj = Projection(ssa, tgt, conn, target='excitatory', synapse_dynamics=self.short_term_depression)
#                prj = Projection(self.stimuli[-1], self.pop_views[-1], conn, target='excitatory', synapse_dynamics=self.short_term_depression)
#                self.projections['stim'].append(prj)
#            else:
            connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory')
        self.times['connect_input'] = self.timer.diff()


    def resolve_src_tgt(self, conn_type):
        """
        Deliver the correct source and target parameters based on conn_type
        """

        if conn_type == 'ee':
            n_src, n_tgt = self.params['n_exc'], self.params['n_exc']
            src_pop, tgt_pop = self.exc_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_exc
            syn_type = 'excitatory'

        elif conn_type == 'ei':
            n_src, n_tgt = self.params['n_exc'], self.params['n_inh']
            src_pop, tgt_pop = self.exc_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_inh
            syn_type = 'excitatory'

        elif conn_type == 'ie':
            n_src, n_tgt = self.params['n_inh'], self.params['n_exc']
            src_pop, tgt_pop = self.inh_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_exc
            syn_type = 'inhibitory'

        elif conn_type == 'ii':
            n_src, n_tgt = self.params['n_inh'], self.params['n_inh']
            src_pop, tgt_pop = self.inh_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_inh
            syn_type = 'inhibitory'

        return (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type)


    def connect_anisotropic(self, conn_type):
        """
        conn_type = ['ee', 'ei', 'ie', 'ii']
        """
        if self.pc_id == 0:
            print 'Connect anisotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)

        n_src_cells_per_neuron = int(round(self.params['p_%s' % conn_type] * n_src))
        (delay_min, delay_max) = self.params['delay_range']
        local_connlist = np.zeros((n_src_cells_per_neuron * len(tgt_cells), 4))
        for i_, tgt in enumerate(tgt_cells):
            if self.params['conn_conf'] == 'direction-based':
                p, latency = CC.get_p_conn_direction_based(tp_src, tp_tgt[tgt, :], self.params['w_sigma_x'], self.params['w_sigma_v'], self.params['connectivity_radius'])
            elif self.params['conn_conf'] == 'motion-based':
                p, latency = CC.get_p_conn_motion_based(tp_src, tp_tgt[tgt, :], self.params['w_sigma_x'], self.params['w_sigma_v'], self.params['connectivity_radius'])
            elif self.params['conn_conf'] == 'orientation-direction':
                p, latency = CC.get_p_conn_direction_and_orientation_based(tp_src, tp_tgt[tgt, :], self.params['w_sigma_x'], self.params['w_sigma_v'], self.params['w_sigma_theta'], self.params['connectivity_radius'])
            else:
                print '\n\nERROR! Wrong connection configutation conn_conf parameter provided\nShould be direction-based, motion-based or orientation-direction\n'
                exit(1)

            if conn_type[0] == conn_type[1]:
                p[tgt], latency[tgt] = 0., 0.
            # random delays? --> np.permutate(latency) or latency[sources] * self.params['delay_scale'] * np.rand

            sorted_indices = np.argsort(p)
            if conn_type[0] == 'e':
                sources = sorted_indices[-n_src_cells_per_neuron:]
            else: # source = inhibitory
                if conn_type[0] == conn_type[1]:
                    sources = sorted_indices[1:n_src_cells_per_neuron+1]  # shift indices to avoid self-connection, because p_ii = .0
                else:
                    sources = sorted_indices[:n_src_cells_per_neuron]

            eta = 1e-12
            w = (self.params['w_tgt_in_per_cell_%s' % conn_type] / (p[sources].sum() + eta)) * p[sources]
            w_ = np.minimum(np.maximum(w, self.params['w_thresh_min']), self.params['w_thresh_max'])

            delays = np.minimum(np.maximum(latency[sources] * self.params['delay_scale'], delay_min), delay_max)  # map the delay into the valid range
            conn_list = np.array((sources, tgt * np.ones(n_src_cells_per_neuron), w_, delays))
            local_connlist[i_ * n_src_cells_per_neuron : (i_ + 1) * n_src_cells_per_neuron, :] = conn_list.transpose()
            connector = FromListConnector(conn_list.transpose())
            if self.params['with_short_term_depression']:
                prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
            else:
                prj = Projection(src_pop, tgt_pop, connector, target=syn_type)
            self.projections[conn_type].append(prj)

        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            np.savetxt(conn_list_fn, local_connlist, fmt='%d\t%d\t%.4e\t%.4e')


    def connect_ee_random(self):
        """
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
            #     C O N N E C T    E X C - E X C    R A N D O M   #
            # # # # # # # # # # # # # # # # # # # # # # # # # # # #
        """

        if self.pc_id == 0:
            print 'Drawing random connections'
        sigma_x, sigma_v = self.params['w_sigma_x'], self.params['w_sigma_v']
        (delay_min, delay_max) = self.params['delay_range']
        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_ee_fn_base'] + '%d.dat' % (self.pc_id)
            conn_file = open(conn_list_fn, 'w')
            output = ''
        for tgt in self.local_idx_exc:
            p = np.zeros(self.params['n_exc'], dtype='float32')
            latency = np.zeros(self.params['n_exc'], dtype='float32')
            for src in xrange(self.params['n_exc']):
                if (src != tgt):
                    p[src], latency[src] = CC.get_p_conn(self.tuning_prop_exc[src, :], self.tuning_prop_exc[tgt, :], sigma_x, sigma_v, params['connectivity_radius']) #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']
            sources = random.sample(xrange(self.params['n_exc']), int(self.params['n_src_cells_per_neuron']))
            idx = p[sources] > 0
            non_zero_idx = np.nonzero(idx)[0]
            p_ = p[sources][non_zero_idx]
            l_ = latency[sources][non_zero_idx] * self.params['delay_scale']

            w = utils.linear_transformation(p_, self.params['w_min'], self.params['w_max'])
            for i in xrange(len(p_)):
#                        w[i] = max(self.params['w_min'], min(w[i], self.params['w_max']))
                delay = min(max(l_[i], delay_min), delay_max)  # map the delay into the valid range
                connect(self.exc_pop[non_zero_idx[i]], self.exc_pop[tgt], w[i], delay=delay, synapse_type='excitatory')
                if self.debug_connectivity:
                    output += '%d\t%d\t%.2e\t%.2e\n' % (non_zero_idx[i], tgt, w[i], delay) #                    output += '%d\t%d\t%.2e\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], latency[sources[i]], p[sources[i]])

        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            conn_file.write(output)
            conn_file.close()

    def connect_isotropic(self, conn_type='ee'):
        """
        conn_type must be 'ee', 'ei', 'ie' or 'ii'
        Connect cells in a distant dependent manner:
            p_ij = exp(- d_ij / (2 * w_sigma_x**2))

        This will give a 'convergence constrained' connectivity, i.e. each cell will have the same sum of incoming weights
        ---> could be problematic for outlier cells
        """
        if self.pc_id == 0:
            print 'Connect isotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == 'ee':
            w_ = self.params['w_max']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        elif conn_type == 'ei':
            w_ = self.params['w_ei_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ie':
            w_ = self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ii':
            w_ = self.params['w_ii_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
#            conn_file = open(conn_list_fn, 'w')
#            output = ''
#            output_dist = ''

        w_mean = w_tgt_in / (self.params['p_%s' % conn_type] * n_max_conn / n_tgt)
        w_sigma = self.params['w_sigma_distribution'] * w_mean

        w_dist = RandomDistribution('normal',
                (w_mean, w_sigma),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(0, w_mean * 10.))
        delay_dist = RandomDistribution('normal',
                (self.params['standard_delay'], self.params['standard_delay_sigma']),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(self.params['delay_range'][0], self.params['delay_range'][1]))

        p_max = utils.get_pmax(self.params['p_%s' % conn_type], self.params['w_sigma_isotropic'], conn_type)
        connector = DistanceDependentProbabilityConnector('%f * exp(-d/(2*%f**2))' % (p_max, params['w_sigma_isotropic']), allow_self_connections=False, \
                weights=w_dist, delays=delay_dist, space=self.torus)#, n_connections=n_conn_ee)
        if self.params['with_short_term_depression']:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)#, synapse_dynamics=self.STD)
        self.projections[conn_type].append(prj)
        if self.debug_connectivity:
#                if self.pc_id == 0:
#                    print 'DEBUG writing to file:', conn_list_fn
            prj.saveConnections(self.params['conn_list_%s_fn_base' % conn_type] + '.dat', gather=True)
#            prj.saveConnections(self.params['conn_list_%s_fn_base' % conn_type] + 'gid%d.dat' % tgt, gather=False)
#                conn_file.close()


#            w = np.zeros(n_src, dtype='float32')
#            delays = np.zeros(n_src, dtype='float32')
#            for src in xrange(n_src):
#                if conn_type[0] == conn_type[1]:
#                    if (src != tgt): # no self-connections / autapses
#                        d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
#                        p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
#                        if np.random.rand() <= p_ij:
#                            w[src] = w_
#                            delays[src] = d_ij * params['delay_scale']
#                else:
#                    d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
#                    p_ij = p_max * np.exp(-d_ij**2 / (2 * params['w_sigma_isotropic']**2))
#                    if np.random.rand() <= p_ij:
#                        w[src] = w_
#                        delays[src] = d_ij * params['delay_scale']
#            w *= w_tgt_in / w.sum()
#            srcs = w.nonzero()[0]
#            weights = w[srcs]
#            for src in srcs:
#                if w[src] > self.params['w_thresh_connection']:
#                delay = min(max(delays[src], self.params['delay_range'][0]), self.params['delay_range'][1])  # map the delay into the valid range
#                connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=delay, synapse_type=syn_type)
#                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], delay)

#        if self.debug_connectivity:
#            if self.pc_id == 0:
#                print 'DEBUG writing to file:', conn_list_fn
#            conn_file.write(output)
#            conn_file.close()


    def connect_random(self, conn_type):
        """
        There exist different possibilities to draw random connections:
        1) Calculate the weights as for the anisotropic case and sample sources randomly
        2) Load a file which stores some random connectivity --> # connector = FromFileConnector(self.params['conn_list_.... ']
        3) Create a random distribution with similar parameters as the non-random connectivition distribution

        connector_ee = FastFixedProbabilityConnector(self.params['p_ee'], weights=w_ee_dist, delays=self.delay_dist)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')

        conn_list_fn = self.params['random_weight_list_fn'] + str(sim_cnt) + '.dat'
        print "Connecting exc - exc from file", conn_list_fn
        connector_ee = FromFileConnector(conn_list_fn)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')
        """
        if self.pc_id == 0:
            print 'Connect random connections %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())
        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == 'ee':
            w_ = self.params['w_max']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        elif conn_type == 'ei':
            w_ = self.params['w_ei_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ie':
            w_ = self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt

        elif conn_type == 'ii':
            w_ = self.params['w_ii_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]
            n_max_conn = n_src * n_tgt - n_tgt

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
#            conn_file = open(conn_list_fn, 'w')
#            output = ''
#            output_dist = ''

        w_mean = w_tgt_in / (self.params['p_%s' % conn_type] * n_max_conn / n_tgt)
        w_sigma = self.params['w_sigma_distribution'] * w_mean

        weight_distr = RandomDistribution('normal',
                (w_mean, w_sigma),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(0, w_mean * 10.))

        delay_dist = RandomDistribution('normal',
                (self.params['standard_delay'], self.params['standard_delay_sigma']),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(self.params['delay_range'][0], self.params['delay_range'][1]))

        connector= FastFixedProbabilityConnector(self.params['p_%s' % conn_type], weights=weight_distr, delays=delay_dist)
        if self.params['with_short_term_depression']:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type, synapse_dynamics=self.short_term_depression)
        else:
            prj = Projection(src_pop, tgt_pop, connector, target=syn_type)

        conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
        print 'Saving random %s connections to %s' % (conn_type, conn_list_fn)
        prj.saveConnections(conn_list_fn, gather=False)



    def connect_populations(self, conn_type):
        """
            # # # # # # # # # # # #
            #     C O N N E C T   #
            # # # # # # # # # # # #
            Calls the right according to the flag set in simultation_parameters.py
        """
        if self.params['connectivity_%s' % conn_type] == 'anisotropic':
            self.connect_anisotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'isotropic':
            self.connect_isotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'random':
            self.connect_random(conn_type)
        else: # populations do not get connected
            pass


    def connect_noise(self):
        """
            # # # # # # # # # # # # # # # #
            #     N O I S E   I N P U T   #
            # # # # # # # # # # # # # # # #
        """
        if self.pc_id == 0:
            print "Connecting noise - exc ... "
        noise_pop_exc = []
        noise_pop_inh = []
        for tgt in self.local_idx_exc:
            #new
            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
            else:
                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
            connect(noise_exc, self.exc_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
            connect(noise_inh, self.exc_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)

        if self.pc_id == 0:
            print "Connecting noise - inh ... "
        for tgt in self.local_idx_inh:
            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
            else:
                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
            connect(noise_exc, self.inh_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
            connect(noise_inh, self.inh_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)
        self.times['connect_noise'] = self.timer.diff()




    def run_sim(self, sim_cnt, record_v=False):
        # # # # # # # # # # # # # # # # # # # #
        #     P R I N T    W E I G H T S      #
        # # # # # # # # # # # # # # # # # # # #
    #    print 'Printing weights to :\n  %s\n  %s\n  %s' % (self.params['conn_list_ei_fn'], self.params['conn_list_ie_fn'], self.params['conn_list_ii_fn'])
    #    exc_inh_prj.saveConnections(self.params['conn_list_ei_fn'])
    #    inh_exc_prj.saveConnections(self.params['conn_list_ie_fn'])
    #    inh_inh_prj.saveConnections(self.params['conn_list_ii_fn'])
    #    self.times['t_save_conns'] = self.timer.diff()

        # # # # # # # # # # # #
        #     R E C O R D     #
        # # # # # # # # # # # #
    #    print "Recording spikes to file: %s" % (self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)
    #    for cell in xrange(self.params['n_exc']):
    #        record(self.exc_pop[cell], self.params['exc_spiketimes_fn_merged'] + '%d.ras' % sim_cnt)

        record_exc = True
        if os.path.exists(self.params['gids_to_record_fn']):
            gids_to_record = np.loadtxt(self.params['gids_to_record_fn'], dtype='int')[:self.params['n_gids_to_record']]
            record_exc = True
            n_rnd_cells_to_record = 2
        
        else:
            n_cells_to_record = 5# self.params['n_exc'] * 0.02
            gids_to_record = np.random.randint(0, self.params['n_exc'], n_cells_to_record)
        
        

        if ps.params['anticipatory_mode']:
            record_gids, pops = utils.select_well_tuned_cells(self.tuning_prop_exc, self.params, self.params['n_gids_to_record'], 1)
            np.savetxt(self.params['gids_to_record_fn'], record_gids)
            self.exc_pop_view_anticipation = PopulationView(self.exc_pop, record_gids, label='anticipation')
            self.exc_pop_view_anticipation.record_v()
            self.exc_pop_view_anticipation.record_gsyn()
            self.anticipatory_record = True
              ###################################
              ###################################
              
              
        if record_v:
            self.exc_pop_view = PopulationView(self.exc_pop, gids_to_record, label='good_exc_neurons')
            self.exc_pop_view.record_v()
            self.inh_pop_view = PopulationView(self.inh_pop, np.random.randint(0, self.params['n_inh'], self.params['n_gids_to_record']), label='random_inh_neurons')
            self.inh_pop_view.record_v()


        self.inh_pop.record()
        self.exc_pop.record()
        self.times['t_record'] = self.timer.diff()

        # # # # # # # # # # # # # #
        #     R U N N N I N G     #
        # # # # # # # # # # # # # #
        if self.pc_id == 0:
            print "Running simulation ... "
        run(self.params['t_sim'])
        self.times['t_sim'] = self.timer.diff()

    def print_results(self, print_v=True):
        """
            # # # # # # # # # # # # # # # # #
            #   P R I N T    R E S U L T S  #
            # # # # # # # # # # # # # # # # #
        """
        if print_v:
            if self.pc_id == 0:
                print 'print_v to file: %s.v' % (self.params['exc_volt_fn_base'])
            self.exc_pop_view.print_v("%s.v" % (self.params['exc_volt_fn_base']), compatible_output=False)
            if self.pc_id == 0:
                print "Printing inhibitory membrane potentials"
            self.inh_pop_view.print_v("%s.v" % (self.params['inh_volt_fn_base']), compatible_output=False)

        print 'DEBUG printing anticipatory cells', self.anticipatory_record
        if self.anticipatory_record == True:   
            print 'print_v to file: %s' % (self.params['exc_volt_anticipation'])
            self.exc_pop_view_anticipation.print_v("%s" % (self.params['exc_volt_anticipation']), compatible_output=False)
            print 'print_gsyn to file: %s' % (self.params['exc_gsyn_anticipation'])
            self.exc_pop_view_anticipation.print_gsyn("%s" % (self.params['exc_gsyn_anticipation']), compatible_output=False)


        if self.pc_id == 0:
            print "Printing excitatory spikes"
        self.exc_pop.printSpikes(self.params['exc_spiketimes_fn_merged'] + '.ras')
        if self.pc_id == 0:
            print "Printing inhibitory spikes"
        self.inh_pop.printSpikes(self.params['inh_spiketimes_fn_merged'] + '.ras')

        self.times['t_print'] = self.timer.diff()
        if self.pc_id == 0:
            print "calling pyNN.end() ...."
        end()
        self.times['t_end'] = self.timer.diff()

        if self.pc_id == 0:
            self.times['t_all'] = 0.
            for k in self.times.keys():
                self.times['t_all'] += self.times[k]

            self.n_cells = {}
            self.n_cells['n_exc'] = self.params['n_exc']
            self.n_cells['n_inh'] = self.params['n_inh']
            self.n_cells['n_cells'] = self.params['n_cells']
            self.n_cells['n_proc'] = self.n_proc
            output = {'times' : self.times, 'n_cells_proc' : self.n_cells}
            print "Proc %d Simulation time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_sim'], (self.times['t_sim'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            print "Proc %d Full pyNN run time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_all'], (self.times['t_all'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            fn = utils.convert_to_url(params['folder_name'] + 'times_dict_np%d.py' % self.n_proc)
Beispiel #10
0
    def run(self, params, verbose=True):
        """
        params are the parameters to use

        """
        tmpdir = tempfile.mkdtemp()
        myTimer = Timer()
        # === Build the network ========================================================
        if verbose: print "Setting up simulation"
        myTimer.start()  # start timer on construction
        sim.setup(timestep=params['dt'], max_delay=params['syn_delay'])
        N = params['N']
        #dc_generator
        phr_ON = sim.Population((N, ), 'dc_generator')
        phr_OFF = sim.Population((N, ), 'dc_generator')

        for factor, phr in [(-params['snr'], phr_OFF),
                            (params['snr'], phr_ON)]:
            phr.tset('amplitude', params['amplitude'] * factor)
            phr.set({
                'start': params['simtime'] / 4,
                'stop': params['simtime'] / 4 * 3
            })

        # internal noise model (see benchmark_noise)
        noise_ON = sim.Population((N, ), 'noise_generator', {
            'mean': 0.,
            'std': params['noise_std']
        })
        noise_OFF = sim.Population((N, ), 'noise_generator', {
            'mean': 0.,
            'std': params['noise_std']
        })

        # target ON and OFF populations (what about a tridimensional Population?)
        out_ON = sim.Population(
            (N, ), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #iaf_sfa_neuron')# EIF_cond_alpha_isfa_ista, IF_cond_exp_gsfa_grr,sim.IF_cond_alpha)#'iaf_sfa_neuron',params['parameters_gc'])#'iaf_cond_neuron')# IF_cond_alpha) #
        out_OFF = sim.Population(
            (N, ), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #IF_curr_alpha)#'iaf_sfa_neuron')#sim.IF_curr_alpha)#,params['parameters_gc'])

        # initialize membrane potential TODO: and conductances?
        from pyNN.random import RandomDistribution, NumpyRNG
        rng = NumpyRNG(seed=params['kernelseed'])
        vinit_distr = RandomDistribution(distribution='uniform',
                                         parameters=[-70, -55],
                                         rng=rng)
        for out_ in [out_ON, out_OFF]:
            out_.randomInit(vinit_distr)

        retina_proj_ON = sim.Projection(phr_ON, out_ON,
                                        sim.OneToOneConnector())
        retina_proj_ON.setWeights(params['weight'])
        # TODO fix setWeight, add setDelays to 10 ms (relative to stimulus onset)
        retina_proj_OFF = sim.Projection(phr_OFF, out_OFF,
                                         sim.OneToOneConnector())
        retina_proj_OFF.setWeights(params['weight'])

        noise_proj_ON = sim.Projection(noise_ON, out_ON,
                                       sim.OneToOneConnector())
        noise_proj_ON.setWeights(params['weight'])
        noise_proj_OFF = sim.Projection(
            noise_OFF, out_OFF, sim.OneToOneConnector(
            ))  # implication if ON and OFF have the same noise input?
        noise_proj_OFF.setWeights(params['weight'])

        out_ON.record()
        out_OFF.record()

        # reads out time used for building
        buildCPUTime = myTimer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose: print "Running simulation"

        myTimer.reset()  # start timer on construction
        sim.run(params['simtime'])
        simCPUTime = myTimer.elapsedTime()

        myTimer.reset()  # start timer on construction
        # TODO LUP use something like "for pop in [phr, out]" ?
        out_ON_filename = os.path.join(tmpdir, 'out_on.gdf')
        out_OFF_filename = os.path.join(tmpdir, 'out_off.gdf')
        out_ON.printSpikes(out_ON_filename)  #
        out_OFF.printSpikes(out_OFF_filename)  #

        # TODO LUP  get out_ON_DATA on a 2D grid independantly of out_ON.cell.astype(int)
        out_ON_DATA = load_spikelist(out_ON_filename,
                                     range(N),
                                     t_start=0.0,
                                     t_stop=params['simtime'])
        out_OFF_DATA = load_spikelist(out_OFF_filename,
                                      range(N),
                                      t_start=0.0,
                                      t_stop=params['simtime'])

        out = {
            'out_ON_DATA': out_ON_DATA,
            'out_OFF_DATA': out_OFF_DATA
        }  #,'out_ON_pos':out_ON}
        # cleans up
        os.remove(out_ON_filename)
        os.remove(out_OFF_filename)
        os.rmdir(tmpdir)
        writeCPUTime = myTimer.elapsedTime()

        if verbose:
            print "\nRetina Network Simulation:"
            print(params['description'])
            print "Number of Neurons  : ", N
            print "Output rate  (ON) : ", out_ON_DATA.mean_rate(
            ), "Hz/neuron in ", params['simtime'], "ms"
            print "Output rate (OFF)   : ", out_OFF_DATA.mean_rate(
            ), "Hz/neuron in ", params['simtime'], "ms"
            print("Build time             : %g s" % buildCPUTime)
            print("Simulation time        : %g s" % simCPUTime)
            print("Writing time           : %g s" % writeCPUTime)

        return out
class NetworkModel(object):

    def __init__(self, params, comm):

        self.params = params
        self.debug_connectivity = True
        self.comm = comm
        if self.comm != None:
            self.pc_id, self.n_proc = self.comm.rank, self.comm.size
            print "USE_MPI: yes", '\tpc_id, n_proc:', self.pc_id, self.n_proc
        else:
            self.pc_id, self.n_proc = 0, 1
            print "MPI not used"


    def import_pynn(self):
        """
        This function needs only be called when this class is used in another script as imported module
        """
        import pyNN
        exec("from pyNN.%s import *" % self.params['simulator'])
        print 'import pyNN\npyNN.version: ', pyNN.__version__



    def setup(self, load_tuning_prop=False):

        if load_tuning_prop:
            print 'Loading tuning properties from', self.params['tuning_prop_means_fn']
            self.tuning_prop_exc = np.loadtxt(self.params['tuning_prop_means_fn'])
        else:
            print 'Preparing tuning properties with limited range....'
            x_range = (0, 1.)
            y_range = (0.2, .5)
            u_range = (.05, 1.0)
            v_range = (-.2, .2)
            tp_exc_good, tp_exc_out_of_range = utils.set_limited_tuning_properties(params, y_range, x_range, u_range, v_range, cell_type='exc')
            self.tuning_prop_exc = tp_exc_good
            print 'n_exc within range: ', tp_exc_good[:, 0].size
            print "Saving tuning_prop to file:", params['tuning_prop_means_fn']
            np.savetxt(params['tuning_prop_means_fn'], tp_exc_good)

        indices, distances = utils.sort_gids_by_distance_to_stimulus(self.tuning_prop_exc, self.params['motion_params'], self.params) # cells in indices should have the highest response to the stimulus
        if self.pc_id == 0:
            print "Saving tuning_prop to file:", self.params['tuning_prop_means_fn']
            np.savetxt(self.params['tuning_prop_means_fn'], self.tuning_prop_exc)
            print 'Saving gids to record to: ', self.params['gids_to_record_fn']
            np.savetxt(self.params['gids_to_record_fn'], indices[:self.params['n_gids_to_record']], fmt='%d')

#        np.savetxt(params['gids_to_record_fn'], indices[:params['n_gids_to_record']], fmt='%d')

        if self.comm != None:
            self.comm.Barrier()
        from pyNN.utility import Timer
        self.timer = Timer()
        self.timer.start()
        self.times = {}
        # # # # # # # # # # # # 
        #     S E T U P       #
        # # # # # # # # # # # #
        (delay_min, delay_max) = self.params['delay_range']
        setup(timestep=0.1, min_delay=delay_min, max_delay=delay_max, rng_seeds_seed=self.params['seed'])
        rng_v = NumpyRNG(seed = sim_cnt*3147 + self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes
        self.rng_conn = NumpyRNG(seed = self.params['seed'], parallel_safe=True) #if True, slower but does not depend on number of nodes

        # # # # # # # # # # # # # # # # # # # # # # # # #
        #     R A N D O M    D I S T R I B U T I O N S  #
        # # # # # # # # # # # # # # # # # # # # # # # # #
        self.v_init_dist = RandomDistribution('normal',
                (self.params['v_init'], self.params['v_init_sigma']),
                rng=rng_v,
                constrain='redraw',
                boundaries=(-80, -60))

        self.times['t_setup'] = self.timer.diff()
        self.times['t_calc_conns'] = 0
        if self.comm != None:
            self.comm.Barrier()

    def create_neurons_with_limited_tuning_properties(self, input_created):
        n_exc = self.tuning_prop_exc[:, 0].size
        n_inh = 0
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(n_exc, IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
            self.exc_pop = Population(n_exc, EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']


        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)
        self.exc_pop.initialize('v', self.v_init_dist)
        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]

#        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
#        print 'Debug, pc_id %d has local %d inh indices:' % (self.pc_id, len(self.local_idx_inh)), self.local_idx_inh
#        self.inh_pop.initialize('v', self.v_init_dist)
        self.times['t_create'] = self.timer.diff()


    def create(self):
        """
            # # # # # # # # # # # # 
            #     C R E A T E     #
            # # # # # # # # # # # #
        """
        if self.params['neuron_model'] == 'IF_cond_exp':
            self.exc_pop = Population(self.params['n_exc'], IF_cond_exp, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], IF_cond_exp, self.params['cell_params_inh'], label="inh_pop")
#        elif self.params['neuron_model'] == 'EIF_cond_exp_isfa_ista':
        elif self.params['neuron_model'] == 'EIF_cond_alpha_isfa_ista':
            self.exc_pop = Population(self.params['n_exc'], EIF_cond_exp_isfa_ista, self.params['cell_params_exc'], label='exc_cells')
            self.inh_pop = Population(self.params['n_inh'], EIF_cond_exp_isfa_ista, self.params['cell_params_inh'], label="inh_pop")
        else:
            print '\n\nUnknown neuron model:\n\t', self.params['neuron_model']
        self.local_idx_exc = get_local_indices(self.exc_pop, offset=0)

        if not input_created:
            self.spike_times_container = [ [] for i in xrange(len(self.local_idx_exc))]

        self.exc_pop.initialize('v', self.v_init_dist)

        self.local_idx_inh = get_local_indices(self.inh_pop, offset=self.params['n_exc'])
        self.inh_pop.initialize('v', self.v_init_dist)

        self.times['t_create'] = self.timer.diff()


    def connect(self):
        self.connect_input_to_exc()
        self.connect_populations('ee')
#        self.connect_populations('ei')
#        self.connect_populations('ie')
#        self.connect_populations('ii')
        self.connect_noise()
        self.times['t_connect'] = self.timer.diff()
        if self.comm != None:
            self.comm.Barrier()

    def create_input(self, load_files=False, save_output=False):


        if load_files:
            if self.pc_id == 0:
                print "Loading input spiketrains..."
            for i_, tgt in enumerate(self.local_idx_exc):
                try:
                    fn = self.params['input_st_fn_base'] + str(tgt) + '.npy'
                    spike_times = np.load(fn)
                except: # this cell does not get any input
                    print "Missing file: ", fn
                    spike_times = []
                self.spike_times_container[i_] = spike_times
        else:
            if self.pc_id == 0:
                print "Computing input spiketrains..."
            nprnd.seed(self.params['input_spikes_seed'])
            dt = self.params['dt_rate'] # [ms] time step for the non-homogenous Poisson process 
            time = np.arange(0, self.params['t_sim'], dt)
            blank_idx = np.arange(1./dt * self.params['t_before_blank'], 1. / dt * (self.params['t_before_blank'] + self.params['t_blank']))

            my_units = self.local_idx_exc
            n_cells = len(my_units)
            L_input = np.zeros((n_cells, time.shape[0]))
            # get the input signal
            for i_time, time_ in enumerate(time):
                if (i_time % 500 == 0):
                    print "t:", time_
                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, time_/1000.)
#                L_input[:, i_time] = utils.get_input(self.tuning_prop_exc[my_units, :], self.params, time_/self.params['t_stimulus'])
                L_input[:, i_time] *= self.params['f_max_stim']
            # blanking 
            for i_time in blank_idx:
                L_input[:, i_time] = 0.

            # create the spike trains
            for i_, unit in enumerate(my_units):
                rate_of_t = np.array(L_input[i_, :]) 
                # each cell will get its own spike train stored in the following file + cell gid
                n_steps = rate_of_t.size
                spike_times = []
                for i in xrange(n_steps):
                    r = nprnd.rand()
                    if (r <= ((rate_of_t[i]/1000.) * dt)): # rate is given in Hz -> 1/1000.
                        spike_times.append(i * dt) 
                self.spike_times_container[i_] = spike_times
                if save_output:
                    output_fn = self.params['input_rate_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, rate_of_t)
                    output_fn = self.params['input_st_fn_base'] + str(unit) + '.npy'
                    np.save(output_fn, np.array(spike_times))
        self.times['create_input'] = self.timer.diff()
        return self.spike_times_container

    def connect_input_to_exc(self):
        """
            # # # # # # # # # # # # # # # # # # # # # # 
            #     C O N N E C T    I N P U T - E X C  #
            # # # # # # # # # # # # # # # # # # # # # # 
        """
        if self.pc_id == 0:
            print "Connecting input spiketrains..."
        for i_, unit in enumerate(self.local_idx_exc):
            spike_times = self.spike_times_container[i_]
            ssa = create(SpikeSourceArray, {'spike_times': spike_times})
            connect(ssa, self.exc_pop[unit], self.params['w_input_exc'], synapse_type='excitatory')
        self.times['connect_input'] = self.timer.diff()


    def connect_anisotropic(self, conn_type):
        """
        """
        if self.pc_id == 0:
            print 'Connect anisotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
            conn_file = open(conn_list_fn, 'w')
            output = ''

        n_src_cells_per_neuron = int(round(self.params['p_%s' % conn_type] * n_src))
        (delay_min, delay_max) = self.params['delay_range']
        for tgt in tgt_cells:
            p = np.zeros(n_src)
            latency = np.zeros(n_src)
            for src in xrange(n_src):
                if conn_type[0] == conn_type[1]: # no self-connection
                    if (src != tgt):
                        p[src], latency[src] = CC.get_p_conn(tp_src[src, :], tp_tgt[tgt, :], params['w_sigma_x'], params['w_sigma_v']) #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']
                else: # different populations --> same indices mean different cells, no check for src != tgt
                    p[src], latency[src] = CC.get_p_conn(tp_src[src, :], tp_tgt[tgt, :], params['w_sigma_x'], params['w_sigma_v']) #                            print 'debug pc_id src tgt ', self.pc_id, src, tgt#, int(ID) < self.params['n_exc']

            sorted_indices = np.argsort(p)
            if conn_type[0] == 'e':
                sources = sorted_indices[-n_src_cells_per_neuron:] 
            else:
                if conn_type == 'ii':
                    sources = sorted_indices[1:n_src_cells_per_neuron+1]  # shift indices to avoid self-connection, because p_ii = .0
                else:
                    sources = sorted_indices[:n_src_cells_per_neuron] 
            w = (self.params['w_tgt_in_per_cell_%s' % conn_type] / p[sources].sum()) * p[sources]
            for i in xrange(len(sources)):
                if w[i] > self.params['w_thresh_connection']:
#                        w[i] = max(self.params['w_min'], min(w[i], self.params['w_max']))
                    delay = min(max(latency[sources[i]] * self.params['t_stimulus'], delay_min), delay_max)  # map the delay into the valid range
    #                print 'debug ', delay , ' latency', latency[sources[i]]
    #                delay = min(max(latency[sources[i]] * self.params['delay_scale'], delay_min), delay_max)  # map the delay into the valid range
                    connect(src_pop[sources[i]], tgt_pop[tgt], w[i], delay=delay, synapse_type=syn_type)
                    if self.debug_connectivity:
                        output += '%d\t%d\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], delay) #                    output += '%d\t%d\t%.2e\t%.2e\t%.2e\n' % (sources[i], tgt, w[i], latency[sources[i]], p[sources[i]])


        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            conn_file.write(output)
            conn_file.close()



    def resolve_src_tgt(self, conn_type):
        """
        Deliver the correct source and target parameters based on conn_type
        """

        if conn_type == 'ee':
            n_src, n_tgt = self.tuning_prop_exc[:, 0].size, self.tuning_prop_exc[:, 0].size
            src_pop, tgt_pop = self.exc_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_exc
            syn_type = 'excitatory'

        elif conn_type == 'ei':
            n_src, n_tgt = self.tuning_prop_exc[:, 0].size, self.tuning_prop_inh[:, 0].size
            src_pop, tgt_pop = self.exc_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_exc
            tp_tgt = self.tuning_prop_inh
            syn_type = 'excitatory'

        elif conn_type == 'ie':
            n_src, n_tgt = self.tuning_prop_inh[:, 0].size, self.tuning_prop_exc[:, 0].size
            src_pop, tgt_pop = self.inh_pop, self.exc_pop
            tgt_cells = self.local_idx_exc
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_exc
            syn_type = 'inhibitory'

        elif conn_type == 'ii':
            n_src, n_tgt = self.tuning_prop_inh[:, 0].size, self.tuning_prop_inh[:, 0].size
            src_pop, tgt_pop = self.inh_pop, self.inh_pop
            tgt_cells = self.local_idx_inh
            tp_src = self.tuning_prop_inh
            tp_tgt = self.tuning_prop_inh
            syn_type = 'inhibitory'

        return (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type)





    def connect_isotropic(self, conn_type='ee'):
        """
        conn_type must be 'ee', 'ei', 'ie' or 'ii'
        Connect cells in a distant dependent manner:
            p_ij = exp(- d_ij / (2 * w_sigma_x**2))

        This will give a 'convergence constrained' connectivity, i.e. each cell will have the same sum of incoming weights 
        ---> could be problematic for outlier cells
        """
        if self.pc_id == 0:
            print 'Connect isotropic %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())

        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        if conn_type == 'ee':
            w_= self.params['w_max']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        elif conn_type == 'ei':
            w_= self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        elif conn_type == 'ie':
            w_= self.params['w_ie_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        elif conn_type == 'ii':
            w_= self.params['w_ii_mean']
            w_tgt_in = params['w_tgt_in_per_cell_%s' % conn_type]

        if self.debug_connectivity:
            conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
            conn_file = open(conn_list_fn, 'w')
            output = ''

        p_max = utils.get_pmax(self.params['p_%s' % conn_type])
        for tgt in tgt_cells:
            w = np.zeros(n_src, dtype='float32') 
            delays = np.zeros(n_src, dtype='float32')
            for src in xrange(n_src):
                if (src != tgt):
#                    d_ij = np.sqrt((tp_src[src, 0] - tp_tgt[tgt, 0])**2 + (tp_src[src, 1] - tp_tgt[tgt, 1])**2)
                    d_ij = utils.torus_distance2D(tp_src[src, 0], tp_tgt[tgt, 0], tp_src[src, 1], tp_tgt[tgt, 1])
                    p_ij = p_max * np.exp(-d_ij / (2 * params['w_sigma_x']**2))
                    if np.random.rand() <= p_ij:
                        w[src] = w_
                        delays[src] = d_ij * self.params['delay_scale']
            w *= w_tgt_in / w.sum()
            srcs = w.nonzero()[0]
            for src in srcs:
                if w[src] > self.params['w_thresh_connection']:
                    delay = min(max(delays[src], self.params['delay_range'][0]), self.params['delay_range'][1])  # map the delay into the valid range
                    connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=delay, synapse_type=syn_type)
                    output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], delay) 
    #                connect(src_pop[int(src)], tgt_pop[int(tgt)], w[src], delay=params['standard_delay'], synapse_type=syn_type)
    #                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w[src], params['standard_delay']) 
                    
        if self.debug_connectivity:
            if self.pc_id == 0:
                print 'DEBUG writing to file:', conn_list_fn
            conn_file.write(output)
            conn_file.close()

#   isotropic nearest neighbour code:
#        for tgt in tgt_cells:
#            n_src_to_choose = int(round(p_max * n_src)) # guarantee that all cells have same number of connections
#            dist = np.zeros(n_src, dtype='float32')
#            for src in xrange(n_src):
#                if (src != tgt):
#                    dist[src] = np.sqrt((tp_src[src, 0] - tp_tgt[tgt, 0])**2 + (tp_src[src, 1] - tp_tgt[tgt, 1])**2)
#            src_idx = dist.argsort()[:n_src_to_choose] # choose cells closest to the target
#            for src in src_idx:
#                connect(src_pop[int(src)], tgt_pop[int(tgt)], w_, delay=params['standard_delay'], synapse_type='excitatory')
#                output += '%d\t%d\t%.2e\t%.2e\n' % (src, tgt, w_, params['standard_delay']) 



    def connect_random(self, conn_type):
        """
        There exist different possibilities to draw random connections:
        1) Calculate the weights as for the anisotropic case and sample sources randomly
        2) Load a file which stores some random connectivity --> # connector = FromFileConnector(self.params['conn_list_.... ']
        3) Create a random distribution with similar parameters as the non-random connectivition distribution

        connector_ee = FastFixedProbabilityConnector(self.params['p_ee'], weights=w_ee_dist, delays=self.delay_dist)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')

        conn_list_fn = self.params['random_weight_list_fn'] + str(sim_cnt) + '.dat'
        print "Connecting exc - exc from file", conn_list_fn
        connector_ee = FromFileConnector(conn_list_fn)
        prj_ee = Projection(self.exc_pop, self.exc_pop, connector_ee, target='excitatory')
        """
        if self.pc_id == 0:
            print 'Connect random connections %s - %s' % (conn_type[0].capitalize(), conn_type[1].capitalize())
        (n_src, n_tgt, src_pop, tgt_pop, tp_src, tp_tgt, tgt_cells, syn_type) = self.resolve_src_tgt(conn_type)
        w_mean = self.params['w_tgt_in_per_cell_%s' % conn_type] / (n_src * self.params['p_%s' % conn_type])
        w_sigma = w_mean * .5 * (self.params['w_sigma_x'] + self.params['w_sigma_v'])

        weight_distr = RandomDistribution('normal',
                (w_mean, w_sigma),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(0, w_mean * 10.))

        delay_dist = RandomDistribution('normal',
                (self.params['standard_delay'], self.params['standard_delay_sigma']),
                rng=self.rng_conn,
                constrain='redraw',
                boundaries=(self.params['delay_range'][0], self.params['delay_range'][1]))

        connector= FastFixedProbabilityConnector(self.params['p_%s' % conn_type], weights=weight_distr, delays=delay_dist)
        prj = Projection(src_pop, tgt_pop, connector, target=syn_type)

        conn_list_fn = self.params['conn_list_%s_fn_base' % conn_type] + '%d.dat' % (self.pc_id)
        print 'Saving random %s connections to %s' % (conn_type, conn_list_fn)
        prj.saveConnections(conn_list_fn, gather=False)



    def connect_populations(self, conn_type):
        """
            # # # # # # # # # # # # 
            #     C O N N E C T   #
            # # # # # # # # # # # # 
            Calls the right according to the flag set in simultation_parameters.py
        """
        if self.params['connectivity_%s' % conn_type] == 'anisotropic':
            self.connect_anisotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'isotropic':
            self.connect_isotropic(conn_type)
        elif self.params['connectivity_%s' % conn_type] == 'random':
            self.connect_random(conn_type)
        else: # populations do not get connected
            pass
        self.times['t_calc_conns'] += self.timer.diff()


    def connect_noise(self):
        """
            # # # # # # # # # # # # # # # # 
            #     N O I S E   I N P U T   #
            # # # # # # # # # # # # # # # # 
        """
        if self.pc_id == 0:
            print "Connecting noise - exc ... "
        noise_pop_exc = []
        noise_pop_inh = []
        for tgt in self.local_idx_exc:
            #new
            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
            else:
                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
            connect(noise_exc, self.exc_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
            connect(noise_inh, self.exc_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)

#        if self.pc_id == 0:
#            print "Connecting noise - inh ... "
#        for tgt in self.local_idx_inh:
#            if (self.params['simulator'] == 'nest'): # for nest one can use the optimized Poisson generator
#                noise_exc = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_exc_noise']})
#                noise_inh = create(native_cell_type('poisson_generator'), {'rate' : self.params['f_inh_noise']})
#            else:
#                noise_exc = create(SpikeSourcePoisson, {'rate' : self.params['f_exc_noise']})
#                noise_inh = create(SpikeSourcePoisson, {'rate' : self.params['f_inh_noise']})
#            connect(noise_exc, self.inh_pop[tgt], weight=self.params['w_exc_noise'], synapse_type='excitatory', delay=1.)
#            connect(noise_inh, self.inh_pop[tgt], weight=self.params['w_inh_noise'], synapse_type='inhibitory', delay=1.)





    def run_sim(self, sim_cnt, record_v=True):
        # # # # # # # # # # # # # # # # # # # #
        #     P R I N T    W E I G H T S      # 
        # # # # # # # # # # # # # # # # # # # #
        record_exc = True
        if os.path.exists(self.params['gids_to_record_fn']):
            gids_to_record = np.loadtxt(self.params['gids_to_record_fn'], dtype='int')[:self.params['n_gids_to_record']]
            record_exc = True
            n_rnd_cells_to_record = 2
        else:
            n_cells_to_record = 5# self.params['n_exc'] * 0.02
            gids_to_record = np.random.randint(0, self.params['n_exc'], n_cells_to_record)


        if record_v:
            self.exc_pop_view = PopulationView(self.exc_pop, gids_to_record, label='good_exc_neurons')
            self.exc_pop_view.record_v()

        self.exc_pop.record()
        self.times['t_record'] = self.timer.diff()

        if self.pc_id == 0:
            print "Running simulation ... "
        run(self.params['t_sim'])
        self.times['t_sim'] = self.timer.diff()


    def print_results(self, print_v=True):
        """
            # # # # # # # # # # # # # # # # #
            #     P R I N T    R E S U L T S 
            # # # # # # # # # # # # # # # # #
        """
        if print_v:
            if self.pc_id == 0:
                print 'print_v to file: %s.v' % (self.params['exc_volt_fn_base'])
            self.exc_pop_view.print_v("%s.v" % (self.params['exc_volt_fn_base']), compatible_output=False)

        if self.pc_id == 0:
            print "Printing excitatory spikes"
        self.exc_pop.printSpikes(self.params['exc_spiketimes_fn_merged'] + '.ras')
        # print a dummy file for inhibitory
        np.savetxt(self.params['inh_spiketimes_fn_merged'] + '.ras', np.array([]))

        self.times['t_print'] = self.timer.diff()
        if self.pc_id == 0:
            print "calling pyNN.end() ...."
        end()
        self.times['t_end'] = self.timer.diff()

        if self.pc_id == 0:
            self.times['t_all'] = 0.
            for k in self.times.keys():
                self.times['t_all'] += self.times[k]

            self.n_cells = {}
            self.n_cells['n_exc'] = self.params['n_exc']
            self.n_cells['n_inh'] = self.params['n_inh']
            self.n_cells['n_cells'] = self.params['n_cells']
            self.n_cells['n_proc'] = self.n_proc
            output = {'times' : self.times, 'n_cells_proc' : self.n_cells}
            print "Proc %d Simulation time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_sim'], (self.times['t_sim'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            print "Proc %d Full pyNN run time: %d sec or %.1f min for %d cells (%d exc %d inh)" % (self.pc_id, self.times['t_all'], (self.times['t_all'])/60., self.params['n_cells'], self.params['n_exc'], self.params['n_inh'])
            fn = utils.convert_to_url(params['folder_name'] + 'times_dict_np%d.py' % self.n_proc)
            output = ntp.ParameterSet(output)
            output.save(fn)
Beispiel #12
0
    def run(self, params, verbose=True):
        """
        params are the parameters to use

        """
        tmpdir = tempfile.mkdtemp()
        myTimer = Timer()
        # === Build the network ========================================================
        if verbose:
            print "Setting up simulation"
        myTimer.start()  # start timer on construction
        sim.setup(timestep=params["dt"], max_delay=params["syn_delay"])
        N = params["N"]
        # dc_generator
        phr_ON = sim.Population((N,), "dc_generator")
        phr_OFF = sim.Population((N,), "dc_generator")

        for factor, phr in [(-params["snr"], phr_OFF), (params["snr"], phr_ON)]:
            phr.tset("amplitude", params["amplitude"] * factor)
            phr.set({"start": params["simtime"] / 4, "stop": params["simtime"] / 4 * 3})

        # internal noise model (see benchmark_noise)
        noise_ON = sim.Population((N,), "noise_generator", {"mean": 0.0, "std": params["noise_std"]})
        noise_OFF = sim.Population((N,), "noise_generator", {"mean": 0.0, "std": params["noise_std"]})

        # target ON and OFF populations (what about a tridimensional Population?)
        out_ON = sim.Population(
            (N,), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #iaf_sfa_neuron')# EIF_cond_alpha_isfa_ista, IF_cond_exp_gsfa_grr,sim.IF_cond_alpha)#'iaf_sfa_neuron',params['parameters_gc'])#'iaf_cond_neuron')# IF_cond_alpha) #
        out_OFF = sim.Population(
            (N,), sim.IF_curr_alpha
        )  #'IF_cond_alpha) #IF_curr_alpha)#'iaf_sfa_neuron')#sim.IF_curr_alpha)#,params['parameters_gc'])

        # initialize membrane potential TODO: and conductances?
        from pyNN.random import RandomDistribution, NumpyRNG

        rng = NumpyRNG(seed=params["kernelseed"])
        vinit_distr = RandomDistribution(distribution="uniform", parameters=[-70, -55], rng=rng)
        for out_ in [out_ON, out_OFF]:
            out_.randomInit(vinit_distr)

        retina_proj_ON = sim.Projection(phr_ON, out_ON, sim.OneToOneConnector())
        retina_proj_ON.setWeights(params["weight"])
        # TODO fix setWeight, add setDelays to 10 ms (relative to stimulus onset)
        retina_proj_OFF = sim.Projection(phr_OFF, out_OFF, sim.OneToOneConnector())
        retina_proj_OFF.setWeights(params["weight"])

        noise_proj_ON = sim.Projection(noise_ON, out_ON, sim.OneToOneConnector())
        noise_proj_ON.setWeights(params["weight"])
        noise_proj_OFF = sim.Projection(
            noise_OFF, out_OFF, sim.OneToOneConnector()
        )  # implication if ON and OFF have the same noise input?
        noise_proj_OFF.setWeights(params["weight"])

        out_ON.record()
        out_OFF.record()

        # reads out time used for building
        buildCPUTime = myTimer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose:
            print "Running simulation"

        myTimer.reset()  # start timer on construction
        sim.run(params["simtime"])
        simCPUTime = myTimer.elapsedTime()

        myTimer.reset()  # start timer on construction
        # TODO LUP use something like "for pop in [phr, out]" ?
        out_ON_filename = os.path.join(tmpdir, "out_on.gdf")
        out_OFF_filename = os.path.join(tmpdir, "out_off.gdf")
        out_ON.printSpikes(out_ON_filename)  #
        out_OFF.printSpikes(out_OFF_filename)  #

        # TODO LUP  get out_ON_DATA on a 2D grid independantly of out_ON.cell.astype(int)
        out_ON_DATA = load_spikelist(out_ON_filename, range(N), t_start=0.0, t_stop=params["simtime"])
        out_OFF_DATA = load_spikelist(out_OFF_filename, range(N), t_start=0.0, t_stop=params["simtime"])

        out = {"out_ON_DATA": out_ON_DATA, "out_OFF_DATA": out_OFF_DATA}  # ,'out_ON_pos':out_ON}
        # cleans up
        os.remove(out_ON_filename)
        os.remove(out_OFF_filename)
        os.rmdir(tmpdir)
        writeCPUTime = myTimer.elapsedTime()

        if verbose:
            print "\nRetina Network Simulation:"
            print (params["description"])
            print "Number of Neurons  : ", N
            print "Output rate  (ON) : ", out_ON_DATA.mean_rate(), "Hz/neuron in ", params["simtime"], "ms"
            print "Output rate (OFF)   : ", out_OFF_DATA.mean_rate(), "Hz/neuron in ", params["simtime"], "ms"
            print ("Build time             : %g s" % buildCPUTime)
            print ("Simulation time        : %g s" % simCPUTime)
            print ("Writing time           : %g s" % writeCPUTime)

        return out
def runNetwork(Be, 
               Bi, 
               nn_stim, 
               show_gui=True,
               dt = defaultParams.dt, 
               N_rec_v = 5, 
               save=False, 
               simtime = defaultParams.Tpost+defaultParams.Tstim+defaultParams.Tblank+defaultParams.Ttrans, 
               extra = {},
               kernelseed = 123):
    
    exec("from pyNN.%s import *" % simulator_name) in globals()
    
    timer = Timer()

    rec_conn={'EtoE':1, 'EtoI':1, 'ItoE':1, 'ItoI':1}

    print('####################')
    print('### (Be, Bi, nn_stim): ', Be, Bi, nn_stim)
    print('####################')

    Bee, Bei = Be, Be
    Bie, Bii = Bi, Bi

    N = defaultParams.N
    NE = defaultParams.NE
    NI = defaultParams.NI

    print('\n # -----> Num cells: %s, size of pert. inh: %s; base rate %s; pert rate %s'% (N, nn_stim, defaultParams.r_bkg, defaultParams.r_stim))

    r_extra = np.zeros(N)
    r_extra[NE:NE+nn_stim] = defaultParams.r_stim

    rr1 = defaultParams.r_bkg*np.random.uniform(.75,1.25, N)
    rr2 = rr1 + r_extra
    
    rank = setup(timestep=dt, max_delay=defaultParams.delay_default, reference='ISN', save_format='hdf5', **extra)
    
    print("rank =", rank)
    nump = num_processes()
    print("num_processes =", nump)
    import socket
    host_name = socket.gethostname()
    print("Host #%d is on %s" % (rank+1, host_name))

    if 'threads' in extra:
        print("%d Initialising the simulator with %d threads..." % (rank, extra['threads']))
    else:
        print("%d Initialising the simulator with single thread..." % rank)
        
        
    timer.start()  # start timer on construction
    
    print("%d Setting up random number generator using seed %s" % (rank, kernelseed))
    
    ks = open('kernelseed','w')
    ks.write('%i'%kernelseed)
    ks.close()
    
    rng = NumpyRNG(kernelseed, parallel_safe=True)
    
    
    nesp = defaultParams.neuron_params_default
    cell_parameters = {
        'cm':         nesp['C_m']/1000,   # Capacitance of the membrane in nF
        'tau_refrac': nesp['t_ref'],     # Duration of refractory period in ms.
        'v_spike':    0.0 ,     # Spike detection threshold in mV.   https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp
        'v_reset':    nesp['V_reset'],     # Reset value for V_m after a spike. In mV.
        'v_rest':     nesp['E_L'],     # Resting membrane potential (Leak reversal potential) in mV.
        'tau_m':      nesp['C_m']/nesp['g_L'],  # Membrane time constant in ms = cm/tau_m*1000.0, C_m/g_L
        'i_offset':   nesp['I_e']/1000,     # Offset current in nA
        'a':          0,     # Subthreshold adaptation conductance in nS.
        'b':          0,  # Spike-triggered adaptation in nA
        'delta_T':    2 ,     # Slope factor in mV. See https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp
        'tau_w':      144.0,     # Adaptation time constant in ms. See https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp
        'v_thresh':   nesp['V_th'],     # Spike initiation threshold in mV
        'e_rev_E':    nesp['E_ex'],     # Excitatory reversal potential in mV.
        'tau_syn_E':  nesp['tau_syn_ex'],     # Rise time of excitatory synaptic conductance in ms (alpha function).
        'e_rev_I':    nesp['E_in'],     # Inhibitory reversal potential in mV.
        'tau_syn_I':  nesp['tau_syn_in'],     # Rise time of the inhibitory synaptic conductance in ms (alpha function).
    }

    print("%d Creating population with %d neurons." % (rank, N))
    celltype = EIF_cond_alpha_isfa_ista(**cell_parameters)
    celltype.default_initial_values['v'] = cell_parameters['v_rest'] # Setting default init v, useful for NML2 export
    
    layer_volume = Cuboid(1000,100,1000)
    layer_structure = RandomStructure(layer_volume, origin=(0,0,0))
    
    layer_structure_input = RandomStructure(layer_volume, origin=(0,-150,0))
             
    default_cell_radius = 15
    stim_cell_radius = 10
    
    #EI_pop = Population(N, celltype, structure=layer_structure, label="EI")
    E_pop = Population(NE, celltype, structure=layer_structure, label='E_pop')
    E_pop.annotate(color='1 0 0')
    E_pop.annotate(radius=default_cell_radius)
    E_pop.annotate(type='E') # temp indicator to use for connection arrowhead
    #print("%d Creating pop %s." % (rank, E_pop))
    I_pop = Population(NI, celltype, structure=layer_structure, label='I_pop')
    I_pop.annotate(color='0 0 .9')
    I_pop.annotate(radius=default_cell_radius)
    I_pop.annotate(type='I') # temp indicator to use for connection arrowhead
    #print("%d Creating pop %s." % (rank, I_pop))
    
    I_pert_pop = PopulationView(I_pop, np.array(range(0,nn_stim)),label='I_pert_pop')
    I_nonpert_pop = PopulationView(I_pop, np.array(range(nn_stim,NI)),label='I_nonpert_pop')
    
    p_rate = defaultParams.r_bkg
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeA_E = SpikeSourcePoisson(rate=p_rate, start=0,duration=defaultParams.Ttrans+defaultParams.Tblank+defaultParams.Tstim+defaultParams.Tpost)
    expoissonA_E = Population(NE, source_typeA_E, structure=layer_structure_input, label="stim_E")
    
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeA_I = SpikeSourcePoisson(rate=p_rate, start=0,duration=defaultParams.Ttrans+defaultParams.Tblank)
    expoissonA_I = Population(NI, source_typeA_I, structure=layer_structure_input, label="pre_pert_stim_I")
    
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeB = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank,duration=defaultParams.Tstim+defaultParams.Tpost)
    #expoissonB_E = Population(NE, source_typeB, label="non_pert_stim_E")
    expoissonB_I = Population(len(I_nonpert_pop), source_typeB, structure=layer_structure_input, label="non_pert_stim_I")
    
    p_rate = defaultParams.r_bkg+defaultParams.r_stim
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeC = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank, duration=defaultParams.Tstim)
    expoissonC = Population(nn_stim, source_typeC, structure=layer_structure_input, label="pert_stim")

    p_rate = defaultParams.r_bkg
    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_typeD = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank+defaultParams.Tstim, duration=defaultParams.Tpost)
    expoissonD = Population(nn_stim, source_typeD, structure=layer_structure_input, label="pert_poststim")
    
    for p in [expoissonA_E,expoissonA_I,expoissonB_I,expoissonC,expoissonD]:
        p.annotate(color='0.8 0.8 0.8')
        p.annotate(radius=stim_cell_radius)

    progress_bar = ProgressBar(width=20)
    connector_E = FixedProbabilityConnector(0.15, rng=rng, callback=progress_bar)
    connector_I = FixedProbabilityConnector(1, rng=rng, callback=progress_bar)
    
    EE_syn = StaticSynapse(weight=0.001*Bee, delay=defaultParams.delay_default)
    EI_syn = StaticSynapse(weight=0.001*Bei, delay=defaultParams.delay_default)
    II_syn = StaticSynapse(weight=0.001*Bii, delay=defaultParams.delay_default)
    IE_syn = StaticSynapse(weight=0.001*Bie, delay=defaultParams.delay_default)
    
    #I_syn = StaticSynapse(weight=JI, delay=delay)
    ext_Connector = OneToOneConnector(callback=progress_bar)
    ext_syn_bkg = StaticSynapse(weight=0.001*defaultParams.Be_bkg, delay=defaultParams.delay_default)
    ext_syn_stim = StaticSynapse(weight=0.001*defaultParams.Be_stim, delay=defaultParams.delay_default)
    
    
    E_to_E = Projection(E_pop, E_pop, connector_E, EE_syn, receptor_type="excitatory")
    print("E --> E\t\t", len(E_to_E), "connections")
    E_to_I = Projection(E_pop, I_pop, connector_E, EI_syn, receptor_type="excitatory")
    print("E --> I\t\t", len(E_to_I), "connections")
    I_to_I = Projection(I_pop, I_pop, connector_I, II_syn, receptor_type="inhibitory")
    print("I --> I\t\t", len(I_to_I), "connections")
    I_to_E = Projection(I_pop, E_pop, connector_I, IE_syn, receptor_type="inhibitory")
    print("I --> E\t\t", len(I_to_E), "connections")
    
    
    input_A_E = Projection(expoissonA_E, E_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(E_pop), len(input_A_E), "connections")
    input_A_I = Projection(expoissonA_I, I_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(I_pop), len(input_A_I), "connections")
    
    ##input_B_E = Projection(expoissonB_E, E_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    ##print("input --> %s cells post pert\t"%len(E_pop), len(input_B_E), "connections")
    
    input_B_I = Projection(expoissonB_I, I_nonpert_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory")
    print("input --> %s cells post pert\t"%len(I_nonpert_pop), len(input_B_I), "connections")
    
    
    input_C = Projection(expoissonC, I_pert_pop, ext_Connector, ext_syn_stim, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(I_pert_pop), len(input_C), "connections")
    
    input_D = Projection(expoissonD, I_pert_pop, ext_Connector, ext_syn_stim, receptor_type="excitatory")
    print("input --> %s cells pre pert\t"%len(I_pert_pop), len(input_D), "connections")
    
    # Can't be used for connections etc. as NeuroML export not (yet) supported
    EI_pop = Assembly(E_pop, I_pop, label='EI')
    
    # Record spikes
    print("%d Setting up recording in excitatory population." % rank)
    EI_pop.record('spikes')
    if N_rec_v>0:
        EI_pop[0:min(N,N_rec_v)].record('v')
    
    
    # read out time used for building
    buildCPUTime = timer.elapsedTime()
    # === Run simulation ===========================================================

    # run, measure computer time
    timer.start()  # start timer on construction
    print("%d Running simulation in %s for %g ms (dt=%sms)." % (rank, simulator_name, simtime, dt))
    run(simtime)
    print("Done")
    simCPUTime = timer.elapsedTime()
    
    # write data to file
    if save and not simulator_name=='neuroml':
        for pop in [EI_pop]:
            filename="ISN-%s-%s-%i.gdf"%(simulator_name, pop.label, rank)
            ff = open(filename, 'w')
            spikes =  pop.get_data('spikes', gather=False)
            spiketrains = spikes.segments[0].spiketrains
            print('Saving data recorded for %i spiketrains in pop %s, indices: %s, ids: %s to %s'% \
                (len(spiketrains),
                 pop.label, 
                 [s.annotations['source_index'] for s in spiketrains], 
                 [s.annotations['source_id'] for s in spiketrains], 
                 filename))
                 
            for spiketrain_i in range(len(spiketrains)):
                spiketrain = spiketrains[spiketrain_i]
                source_id = spiketrain.annotations['source_id']
                source_index = spiketrain.annotations['source_index']
                #print("Writing spike data for cell %s[%s] (gid: %i): %i spikes: [%s,...,%s] "%(pop.label,source_index, source_id, len(spiketrain),spiketrain[0],spiketrain[-1]))
                for t in spiketrain:
                    ff.write('%s\t%i\n'%(t.magnitude,spiketrain_i))
            ff.close()
                
            vs =  pop.get_data('v', gather=False)
            for segment in vs.segments:
                for i in range(len(segment.analogsignals[0].transpose())):
                    filename="ISN-%s-%s-cell%i.dat"%(simulator_name, pop.label, i)
                    print('Saving cell %i in %s to %s'%(i,pop.label,filename))
                    vm = segment.analogsignals[0].transpose()[i]
                    tt = np.array([t*dt/1000. for t in range(len(vm))])
                    times_vm = np.array([tt, vm/1000.]).transpose()
                    np.savetxt(filename, times_vm , delimiter = '\t', fmt='%s')
            
    spike_data = {}
    spike_data['senders'] = []
    spike_data['times'] = []
    index_offset = 1
    for pop in [EI_pop]:
        if rank == 0:
            spikes =  pop.get_data('spikes', gather=False)
            #print(spikes.segments[0].all_data)
            num_rec = len(spikes.segments[0].spiketrains)
            print("Extracting spike info (%i) for %i cells in %s"%(num_rec,pop.size,pop.label))
            #assert(num_rec==len(spikes.segments[0].spiketrains))
            for i in range(num_rec):
                ss = spikes.segments[0].spiketrains[i]
                for s in ss:
                    index = i+index_offset
                    #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index))
                    spike_data['senders'].append(index)
                    spike_data['times'].append(s)
            index_offset+=pop.size


    print("Build time         : %g s" % buildCPUTime)
    print("Simulation time    : %g s" % simCPUTime)

    # === Clean up and quit ========================================================

    end()
Beispiel #14
0
    def test_va_benchmark(self):

        simulator_name = 'spiNNaker'

        timer = Timer()

        # === Define parameters ========================================================

        rngseed = 98766987
        parallel_safe = True

        n = 1500  # number of cells
        r_ei = 4.0   # number of excitatory cells:number of inhibitory cells
        pconn = 0.02  # connection probability

        dt = 0.1        # (ms) simulation timestep
        tstop = 200    # (ms) simulaton duration
        delay = 1

        # Cell parameters
        area = 20000.  # (µm²)
        tau_m = 20.    # (ms)
        cm = 1.     # (µF/cm²)
        g_leak = 5e-5   # (S/cm²)
        e_leak = -49.  # (mV)
        v_thresh = -50.   # (mV)
        v_reset = -60.   # (mV)
        t_refrac = 5.     # (ms) (clamped at v_reset)
        v_mean = -60.   # (mV) 'mean' membrane potential, for calculating CUBA weights
        tau_exc = 5.     # (ms)
        tau_inh = 10.    # (ms)

        g_exc = 0.27   # (nS) #Those weights should be similar to the COBA weights
        g_inh = 4.5    # (nS) # but the delpolarising drift should be taken into account
        e_rev_exc = 0.     # (mV)
        e_rev_inh = -80.   # (mV)

        # === Calculate derived parameters =============================================

        area *= 1e-8                     # convert to cm²
        cm *= area * 1000                  # convert to nF
        r_m = 1e-6 / (g_leak * area)            # membrane resistance in MΩ
        assert tau_m == cm * r_m                 # just to check

        n_exc = int(round((n * r_ei / (1 + r_ei))))  # number of excitatory cells
        n_inh = n - n_exc                     # number of inhibitory cells

        print n_exc, n_inh

        celltype = IF_curr_exp
        w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean)  # (nA) weight of excitatory synapses
        w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean)  # (nA)
        assert w_exc > 0
        assert w_inh < 0

        # === Build the network ========================================================

        setup(timestep=dt, min_delay=delay, max_delay=delay)

        if simulator_name == 'spiNNaker':
            set_number_of_neurons_per_core('IF_curr_exp', 100)      # this will set 100 neurons per core
            set_number_of_neurons_per_core('IF_cond_exp', 50)      # this will set 50 neurons per core

        node_id = 1
        np = 1

        host_name = socket.gethostname()
        print "Host #%d is on %s" % (np, host_name)

        cell_params = {
            'tau_m': tau_m, 'tau_syn_E': tau_exc, 'tau_syn_I': tau_inh,
            'v_rest': e_leak, 'v_reset': v_reset, 'v_thresh': v_thresh,
            'cm': cm, 'tau_refrac': t_refrac, 'i_offset': 0}

        print cell_params

        timer.start()

        print "%s Creating cell populations..." % node_id
        exc_cells = Population(n_exc, celltype, cell_params,
                               label="Excitatory_Cells")
        inh_cells = Population(n_inh, celltype, cell_params,
                               label="Inhibitory_Cells")
        NativeRNG(12345)

        print "%s Initialising membrane potential to random values..." % node_id
        rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)
        uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh],
                                           rng=rng)
        exc_cells.initialize('v', uniform_distr)
        inh_cells.initialize('v', uniform_distr)

        print "%s Connecting populations..." % node_id
        exc_conn = FixedProbabilityConnector(pconn, weights=w_exc, delays=delay)
        inh_conn = FixedProbabilityConnector(pconn, weights=w_inh, delays=delay)

        connections = dict()
        connections['e2e'] = Projection(exc_cells, exc_cells, exc_conn,
                                        target='excitatory', rng=rng)
        connections['e2i'] = Projection(exc_cells, inh_cells, exc_conn,
                                        target='excitatory', rng=rng)
        connections['i2e'] = Projection(inh_cells, exc_cells, inh_conn,
                                        target='inhibitory', rng=rng)
        connections['i2i'] = Projection(inh_cells, inh_cells, inh_conn,
                                        target='inhibitory', rng=rng)

        # === Setup recording ==============================
        print "%s Setting up recording..." % node_id
        exc_cells.record()

        # === Run simulation ================================
        print "%d Running simulation..." % node_id

        print "timings: number of neurons:", n
        print "timings: number of synapses:", n * n * pconn

        run(tstop)

        # === Print results to file ===============================

        print "%d Writing data to file..." % node_id

        if not(os.path.isdir('Results')):
            os.mkdir('Results')

        exc_spikes = exc_cells.getSpikes()

        current_file_path = os.path.dirname(os.path.abspath(__file__))
        current_file_path = os.path.join(current_file_path, "spikes.data")
        #  exc_cells.printSpikes(current_file_path)
        pre_recorded_spikes = utility_calls.read_spikes_from_file(
            current_file_path, 0, n_exc, 0, tstop)

        end()

        for spike_element, read_element in zip(exc_spikes, pre_recorded_spikes):
                self.assertEqual(round(spike_element[0], 1),
                                 round(read_element[0], 1))
                self.assertEqual(round(spike_element[1], 1),
                                 round(read_element[1], 1))
Beispiel #15
0
def test(cases=[1]):    
    
    sp            = Space(periodic_boundaries=((0,1), (0,1), None))
    safe          = False
    verbose       = True
    autapse       = False
    parallel_safe = True    
    render        = True
        
    for case in cases:
        #w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        #w = 0.1
        #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2) 
        
        #delay = RandomDistribution('uniform', (0.1,5.))
        delay = "0.1 + d/0.2"
        #delay = 0.1    
        #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances 
    
        d_expression = "d < 0.1"
        #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"
    
        timer   = Timer()
        np      = num_processes()
        timer.start()    
        if case is 1:
            conn  = DistanceDependentProbabilityConnector(d_expression, delays=delay, weights=w, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "DistanceDependent_%s_np_%d.png" %(simulator_name, np)
        elif case is 2:
            conn  = FixedProbabilityConnector(0.05, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "FixedProbability_%s_np_%d.png" %(simulator_name, np)
        elif case is 3:
            conn  = AllToAllConnector(delays=delay, weights=w, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" %(simulator_name, np)
        elif case is 4:
            conn  = FixedNumberPostConnector(50, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "FixedNumberPost_%s_np_%d.png" %(simulator_name, np)
        elif case is 5:
            conn  = FixedNumberPreConnector(50, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse)
            fig_name = "FixedNumberPre_%s_np_%d.png" %(simulator_name, np)
        elif case is 6:
            conn  = OneToOneConnector(safe=safe, weights=w, delays=delay, verbose=verbose)
            fig_name = "OneToOne_%s_np_%d.png" %(simulator_name, np)
        elif case is 7:
            conn  = FromFileConnector('connections.dat', safe=safe, verbose=verbose)
            fig_name = "FromFile_%s_np_%d.png" %(simulator_name, np)
        elif case is 8:
            conn  = SmallWorldConnector(degree=0.1, rewiring=0., weights=w, delays=delay, safe=safe, verbose=verbose, allow_self_connections=autapse, space=sp)
            fig_name = "SmallWorld_%s_np_%d.png" %(simulator_name, np)
        
        
        print "Generating data for %s" %fig_name
        rng   = NumpyRNG(23434, num_processes=np, parallel_safe=parallel_safe)
        prj   = Projection(x, x, conn, rng=rng)

        simulation_time = timer.elapsedTime()
        print "Building time", simulation_time
        print "Nb synapses built", len(prj)

        if render : 
            if not(os.path.isdir('Results')):
                os.mkdir('Results')

            print "Saving Positions...."
            x.savePositions('Results/positions.dat')

            print "Saving Connections...."
            prj.saveConnections('Results/connections.dat', compatible_output=False)
            
        if node_id == 0 and render:
            figure()
            print "Generating and saving %s" %fig_name
            positions   = numpy.loadtxt('Results/positions.dat')
            connections = numpy.loadtxt('Results/connections.dat')
            positions   = positions[numpy.argsort(positions[:,0])]
            idx_pre     = (connections[:,0] - x.first_id).astype(int)
            idx_post    = (connections[:,1] - x.first_id).astype(int)
            d           = distances(positions[idx_pre,1:3], positions[idx_post,1:3], 1)
            subplot(231)
            title('Cells positions')
            plot(positions[:,1], positions[:,2], '.')
            subplot(232)
            title('Weights distribution')
            hist(connections[:,2], 50)
            subplot(233)
            title('Delay distribution')
            hist(connections[:,3], 50)
            subplot(234)
            ids   = numpy.random.permutation(numpy.unique(positions[:,0]))[0:6]
            colors = ['k', 'r', 'b', 'g', 'c', 'y'] 
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:,2], '.')

            subplot(236)
            plot(d, connections[:,3], '.')
            savefig("Results/" + fig_name)            
            os.remove('Results/connections.dat')
            os.remove('Results/positions.dat')
Beispiel #16
0
def run_retina(params):
    """Run the retina using the specified parameters."""

    print "Setting up simulation"
    timer = Timer()
    timer.start()  # start timer on construction
    pyNN.setup(timestep=params['dt'],
               max_delay=params['syn_delay'],
               threads=params['threads'],
               rng_seeds=params['kernelseeds'])

    N = params['N']
    phr_ON = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    phr_OFF = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    noise_ON = pyNN.Population(
        (N, N),
        pyNN.native_cell_type('noise_generator')(mean=0.0,
                                                 std=params['noise_std']))
    noise_OFF = pyNN.Population(
        (N, N),
        pyNN.native_cell_type('noise_generator')(mean=0.0,
                                                 std=params['noise_std']))

    phr_ON.set(start=params['simtime'] / 4,
               stop=params['simtime'] / 4 * 3,
               amplitude=params['amplitude'] * params['snr'])
    phr_OFF.set(start=params['simtime'] / 4,
                stop=params['simtime'] / 4 * 3,
                amplitude=-params['amplitude'] * params['snr'])

    # target ON and OFF populations
    v_init = params['parameters_gc'].pop('Vinit')
    out_ON = pyNN.Population((N, N),
                             pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(
                                 **params['parameters_gc']))
    out_OFF = pyNN.Population((N, N),
                              pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(
                                  **params['parameters_gc']))
    out_ON.initialize(v=v_init)
    out_OFF.initialize(v=v_init)

    #print "Connecting the network"

    retina_proj_ON = pyNN.Projection(phr_ON, out_ON, pyNN.OneToOneConnector())
    retina_proj_ON.set(weight=params['weight'])
    retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF,
                                      pyNN.OneToOneConnector())
    retina_proj_OFF.set(weight=params['weight'])

    noise_proj_ON = pyNN.Projection(noise_ON, out_ON, pyNN.OneToOneConnector())
    noise_proj_ON.set(weight=params['weight'])
    noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF,
                                     pyNN.OneToOneConnector())
    noise_proj_OFF.set(weight=params['weight'])

    out_ON.record('spikes')
    out_OFF.record('spikes')

    # reads out time used for building
    buildCPUTime = timer.elapsedTime()

    print "Running simulation"

    timer.start()  # start timer on construction
    pyNN.run(params['simtime'])
    simCPUTime = timer.elapsedTime()

    out_ON_DATA = out_ON.get_data().segments[0]
    out_OFF_DATA = out_OFF.get_data().segments[0]

    print "\nRetina Network Simulation:"
    print(params['description'])
    print "Number of Neurons : ", N**2
    print "Output rate  (ON) : ", out_ON.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Output rate (OFF) : ", out_OFF.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Build time        : ", buildCPUTime, "s"
    print "Simulation time   : ", simCPUTime, "s"

    return out_ON_DATA, out_OFF_DATA
def run_retina(params):
    """Run the retina using the specified parameters."""

    print "Setting up simulation"
    timer = Timer()
    timer.start()  # start timer on construction
    pyNN.setup(timestep=params['dt'], max_delay=params['syn_delay'], threads=params['threads'], rng_seeds=params['kernelseeds'])

    N = params['N']
    phr_ON = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    phr_OFF = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')())
    noise_ON = pyNN.Population((N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std']))
    noise_OFF = pyNN.Population((N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std']))

    phr_ON.set(start=params['simtime']/4, stop=params['simtime']/4*3,
               amplitude=params['amplitude'] * params['snr'])
    phr_OFF.set(start=params['simtime']/4, stop=params['simtime']/4*3,
                amplitude=-params['amplitude'] * params['snr'])

    # target ON and OFF populations
    v_init = params['parameters_gc'].pop('Vinit')
    out_ON = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(**params['parameters_gc']))
    out_OFF = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(**params['parameters_gc']))
    out_ON.initialize(v=v_init)
    out_OFF.initialize(v=v_init)

    #print "Connecting the network"

    retina_proj_ON = pyNN.Projection(phr_ON, out_ON, pyNN.OneToOneConnector())
    retina_proj_ON.set(weight=params['weight'])
    retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF, pyNN.OneToOneConnector())
    retina_proj_OFF.set(weight=params['weight'])

    noise_proj_ON = pyNN.Projection(noise_ON, out_ON, pyNN.OneToOneConnector())
    noise_proj_ON.set(weight=params['weight'])
    noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF, pyNN.OneToOneConnector())
    noise_proj_OFF.set(weight=params['weight'])

    out_ON.record('spikes')
    out_OFF.record('spikes')

    # reads out time used for building
    buildCPUTime = timer.elapsedTime()

    print "Running simulation"

    timer.start()  # start timer on construction
    pyNN.run(params['simtime'])
    simCPUTime = timer.elapsedTime()

    out_ON_DATA = out_ON.get_data().segments[0]
    out_OFF_DATA = out_OFF.get_data().segments[0]

    print "\nRetina Network Simulation:"
    print(params['description'])
    print "Number of Neurons : ", N**2
    print "Output rate  (ON) : ", out_ON.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Output rate (OFF) : ", out_OFF.mean_spike_count(), \
        "spikes/neuron in ", params['simtime'], "ms"
    print "Build time        : ", buildCPUTime, "s"
    print "Simulation time   : ", simCPUTime, "s"

    return out_ON_DATA, out_OFF_DATA
    def run(self,params, verbose =True):
        tmpdir = tempfile.mkdtemp()
        timer = Timer()
        timer.start() # start timer on construction

        # === Build the network ========================================================
        if verbose: print "Setting up simulation"
        sim.setup(timestep=params.simulation.dt,max_delay=params.simulation.syn_delay, debug=False)

        N = params.N
        #dc_generator
        current_source = sim.DCSource(  amplitude= params.snr,
                                        start=params.simulation.simtime/4,
                                        stop=params.simulation.simtime/4*3)
        
        # internal noise model (NEST specific)
        noise = sim.Population(N,'noise_generator',{'mean':0.,'std':params.noise_std}) 
        # target population
        output = sim.Population(N , sim.IF_cond_exp)

        # initialize membrane potential
        numpy.random.seed(params.simulation.kernelseed)
        V_rest, V_spike = -70., -53.
        output.tset('v_init',V_rest + numpy.random.rand(N,)* (V_spike -V_rest))

        #  Connecting the network
        conn = sim.OneToOneConnector(weights = params.weight)
        sim.Projection(noise, output, conn)

        for cell in output:
            cell.inject(current_source)

        output.record()

        # reads out time used for building
        buildCPUTime= timer.elapsedTime()

        # === Run simulation ===========================================================
        if verbose: print "Running simulation"

        timer.reset() # start timer on construction
        sim.run(params.simulation.simtime)
        simCPUTime = timer.elapsedTime()

        timer.reset()  # start timer on construction

        output_filename = os.path.join(tmpdir,'output.gdf')
        #print output_filename
        output.printSpikes(output_filename)#
        output_DATA = load_spikelist(output_filename,N,
                                        t_start=0.0, t_stop=params.simulation.simtime)
        writeCPUTime = timer.elapsedTime()

        if verbose:
            print "\nFiber Network Simulation:"
            print "Number of Neurons  : ", N
            print "Mean Output rate    : ", output_DATA.mean_rate(), "Hz during ",params.simulation.simtime, "ms"
            print("Build time             : %g s" % buildCPUTime)
            print("Simulation time        : %g s" % simCPUTime)
            print("Writing time           : %g s" % writeCPUTime)

        os.remove(output_filename)
        os.rmdir(tmpdir)

        return output_DATA
def run_model(sim, **options):
    """
    Run a simulation using the parameters read from the file "I_f_curve.json"

    :param sim: the PyNN backend module to be used.
    :param options: should contain a keyword "simulator" which is the name of the PyNN backend module used.
    :return: a tuple (`data`, `times`) where `data` is a Neo Block containing the recorded spikes
             and `times` is a dict containing the time taken for different phases of the simulation.
    """
    
    import json
    from pyNN.utility import Timer

    timer = Timer()

    g = open("I_f_curve.json", 'r')
    d = json.load(g)
    
    N = d['param']['N']
    max_current = d['param']['max_current']
    tstop = d['param']['tstop']

    if options['simulator'] == "hardware.brainscales":
        hardware_preset = d['setup'].pop('hardware_preset', None)
        if hardware_preset:
            d['setup']['hardware'] = sim.hardwareSetup[hardware_preset]

    timer.start()
    sim.setup(**d['setup'])

    popcell = sim.Population(N, sim.IF_cond_exp, d['IF_cond_exp'])

    #current_source = []
    #for i in xrange(N):
    #    current_source.append(sim.DCSource(amplitude=(max_current*(i+1)/N)))
    #    popcell[i:(i+1)].inject(current_source[i])
    i_offset = max_current * (1 + np.arange(N))/N
    popcell.tset("i_offset", i_offset)

    if PYNN07:
        popcell.record()
    else:
        popcell.record('spikes')
        #popcell[0, 1, N-2, N-1].record('v')  # debug

    setup_time = timer.diff()
    sim.run(tstop)
    run_time = timer.diff()

    if PYNN07:
        spike_array = popcell.getSpikes()
        data = spike_array_to_neo(spike_array, popcell, tstop)
    else:
        data = popcell.get_data()

    sim.end()

    closing_time = timer.diff()
    times = {'setup_time': setup_time, 'run_time': run_time, 'closing_time': closing_time}

    return data, times
Beispiel #20
0
def test(cases=[1]):

    sp = Space(periodic_boundaries=((0, 1), (0, 1), None), axes='xy')
    safe = False
    callback = progress_bar.set_level
    autapse = False
    parallel_safe = True
    render = True
    to_file = True

    for case in cases:
        #w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        #w = 0.1
        #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2)

        #delay = RandomDistribution('uniform', (0.1,5.))
        #delay = "0.1 + d/0.2"
        delay = 0.1
        #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances

        d_expression = "exp(-d**2/(2*0.1**2))"
        #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"

        timer = Timer()
        np = num_processes()
        timer.start()

        synapse = StaticSynapse(weight=w, delay=delay)
        rng = NumpyRNG(23434, parallel_safe=parallel_safe)

        if case is 1:
            conn = DistanceDependentProbabilityConnector(
                d_expression,
                safe=safe,
                callback=callback,
                allow_self_connections=autapse,
                rng=rng)
            fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np)
        elif case is 2:
            conn = FixedProbabilityConnector(0.02,
                                             safe=safe,
                                             callback=callback,
                                             allow_self_connections=autapse,
                                             rng=rng)
            fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np)
        elif case is 3:
            conn = AllToAllConnector(delays=delay,
                                     safe=safe,
                                     callback=callback,
                                     allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np)
        elif case is 4:
            conn = FixedNumberPostConnector(50,
                                            safe=safe,
                                            callback=callback,
                                            allow_self_connections=autapse,
                                            rng=rng)
            fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np)
        elif case is 5:
            conn = FixedNumberPreConnector(50,
                                           safe=safe,
                                           callback=callback,
                                           allow_self_connections=autapse,
                                           rng=rng)
            fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np)
        elif case is 6:
            conn = OneToOneConnector(safe=safe, callback=callback)
            fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np)
        elif case is 7:
            conn = FromFileConnector(files.NumpyBinaryFile(
                'Results/connections.dat', mode='r'),
                                     safe=safe,
                                     callback=callback,
                                     distributed=True)
            fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np)
        elif case is 8:
            conn = SmallWorldConnector(degree=0.1,
                                       rewiring=0.,
                                       safe=safe,
                                       callback=callback,
                                       allow_self_connections=autapse)
            fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np)

        print "Generating data for %s" % fig_name

        prj = Projection(x, x, conn, synapse, space=sp)

        mytime = timer.diff()
        print "Time to connect the cell population:", mytime, 's'
        print "Nb synapses built", prj.size()

        if to_file:
            if not (os.path.isdir('Results')):
                os.mkdir('Results')
            print "Saving Connections...."
            prj.save('all',
                     files.NumpyBinaryFile('Results/connections.dat',
                                           mode='w'),
                     gather=True)

        mytime = timer.diff()
        print "Time to save the projection:", mytime, 's'

        if render and to_file:
            print "Saving Positions...."
            x.save_positions('Results/positions.dat')
        end()

        if node_id == 0 and render and to_file:
            figure()
            print "Generating and saving %s" % fig_name
            positions = numpy.loadtxt('Results/positions.dat')

            positions[:, 0] -= positions[:, 0].min()
            connections = files.NumpyBinaryFile('Results/connections.dat',
                                                mode='r').read()
            print positions.shape, connections.shape
            connections[:, 0] -= connections[:, 0].min()
            connections[:, 1] -= connections[:, 1].min()
            idx_pre = connections[:, 0].astype(int)
            idx_post = connections[:, 1].astype(int)
            d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1)
            subplot(231)
            title('Cells positions')
            plot(positions[:, 1], positions[:, 2], '.')
            subplot(232)
            title('Weights distribution')
            hist(connections[:, 2], 50)
            subplot(233)
            title('Delay distribution')
            hist(connections[:, 3], 50)
            subplot(234)
            numpy.random.seed(74562)
            ids = numpy.random.permutation(positions[:, 0])[0:6]
            colors = ['k', 'r', 'b', 'g', 'c', 'y']
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:, 2], '.')

            subplot(236)
            plot(d, connections[:, 3], '.')
            savefig("Results/" + fig_name)
            #os.remove('Results/connections.dat')
            #os.remove('Results/positions.dat')
            show()
cortical_delay = 0.1


# ================= Simulation time ==================
dt = 1.0  # Simulation's time step
delay_min = 1.0  # Minimum delay
delay_max = 5.0  # Maximum delay

#############################
# Build the Network
#############################

# Has to be called at the beginning of the simulation
simulator.setup(timestep=dt, min_delay=delay_min, max_delay=delay_max)

timer.start()  # start timer on construction

# ================== LGN ========================

# Load LGN positions
positions_on, positions_off = load_positions()

## Load the spikes
spikes_on, spikes_off = load_lgn_spikes(contrast, N_lgn_layers)

# Spike functions
def spike_times(simulator, layer, spikes_file):
    return [simulator.Sequence(x) for x in spikes_file[layer]]

# Spatial structure of on LGN cells
# On cells
Beispiel #22
0
def test(cases=[1]):

    sp = Space(periodic_boundaries=((0, 1), (0, 1), None))
    safe = False
    verbose = True
    autapse = False
    parallel_safe = True
    render = True

    for case in cases:
        #w = RandomDistribution('uniform', (0,1))
        w = "0.2 + d/0.2"
        #w = 0.1
        #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2)

        #delay = RandomDistribution('uniform', (0.1,5.))
        delay = "0.1 + d/0.2"
        #delay = 0.1
        #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances

        d_expression = "d < 0.1"
        #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)"
        #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()"

        timer = Timer()
        np = num_processes()
        timer.start()
        if case is 1:
            conn = DistanceDependentProbabilityConnector(
                d_expression,
                delays=delay,
                weights=w,
                space=sp,
                safe=safe,
                verbose=verbose,
                allow_self_connections=autapse)
            fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np)
        elif case is 2:
            conn = FixedProbabilityConnector(0.05,
                                             weights=w,
                                             delays=delay,
                                             space=sp,
                                             safe=safe,
                                             verbose=verbose,
                                             allow_self_connections=autapse)
            fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np)
        elif case is 3:
            conn = AllToAllConnector(delays=delay,
                                     weights=w,
                                     space=sp,
                                     safe=safe,
                                     verbose=verbose,
                                     allow_self_connections=autapse)
            fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np)
        elif case is 4:
            conn = FixedNumberPostConnector(50,
                                            weights=w,
                                            delays=delay,
                                            space=sp,
                                            safe=safe,
                                            verbose=verbose,
                                            allow_self_connections=autapse)
            fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np)
        elif case is 5:
            conn = FixedNumberPreConnector(50,
                                           weights=w,
                                           delays=delay,
                                           space=sp,
                                           safe=safe,
                                           verbose=verbose,
                                           allow_self_connections=autapse)
            fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np)
        elif case is 6:
            conn = OneToOneConnector(safe=safe,
                                     weights=w,
                                     delays=delay,
                                     verbose=verbose)
            fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np)
        elif case is 7:
            conn = FromFileConnector('connections.dat',
                                     safe=safe,
                                     verbose=verbose)
            fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np)
        elif case is 8:
            conn = SmallWorldConnector(degree=0.1,
                                       rewiring=0.,
                                       weights=w,
                                       delays=delay,
                                       safe=safe,
                                       verbose=verbose,
                                       allow_self_connections=autapse,
                                       space=sp)
            fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np)

        print "Generating data for %s" % fig_name
        rng = NumpyRNG(23434, num_processes=np, parallel_safe=parallel_safe)
        prj = Projection(x, x, conn, rng=rng)

        simulation_time = timer.elapsedTime()
        print "Building time", simulation_time
        print "Nb synapses built", len(prj)

        if render:
            if not (os.path.isdir('Results')):
                os.mkdir('Results')

            print "Saving Positions...."
            x.savePositions('Results/positions.dat')

            print "Saving Connections...."
            prj.saveConnections('Results/connections.dat',
                                compatible_output=False)

        if node_id == 0 and render:
            figure()
            print "Generating and saving %s" % fig_name
            positions = numpy.loadtxt('Results/positions.dat')
            connections = numpy.loadtxt('Results/connections.dat')
            positions = positions[numpy.argsort(positions[:, 0])]
            idx_pre = (connections[:, 0] - x.first_id).astype(int)
            idx_post = (connections[:, 1] - x.first_id).astype(int)
            d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1)
            subplot(231)
            title('Cells positions')
            plot(positions[:, 1], positions[:, 2], '.')
            subplot(232)
            title('Weights distribution')
            hist(connections[:, 2], 50)
            subplot(233)
            title('Delay distribution')
            hist(connections[:, 3], 50)
            subplot(234)
            ids = numpy.random.permutation(numpy.unique(positions[:, 0]))[0:6]
            colors = ['k', 'r', 'b', 'g', 'c', 'y']
            for count, cell in enumerate(ids):
                draw_rf(cell, positions, connections, colors[count])
            subplot(235)
            plot(d, connections[:, 2], '.')

            subplot(236)
            plot(d, connections[:, 3], '.')
            savefig("Results/" + fig_name)
            os.remove('Results/connections.dat')
            os.remove('Results/positions.dat')
Beispiel #23
0

rng = NumpyRNG(seed=seed, parallel_safe=True)

print("[%d] Creating populations" % node)
n_spikes = int(2 * tstop * input_rate / 1000.0)
spike_times = numpy.add.accumulate(rng.next(n_spikes, 'exponential',
                                            {'beta': 1000.0 / input_rate}, mask_local=False))

input_population = Population(100, SpikeSourceArray(spike_times=spike_times), label="input")
output_population = Population(10, IF_curr_exp(**cell_params), label="output")
print("[%d] input_population cells: %s" % (node, input_population.local_cells))
print("[%d] output_population cells: %s" % (node, output_population.local_cells))

print("[%d] Connecting populations" % node)
timer.start()
connector = CSAConnector(csa.random(0.5))
syn = StaticSynapse(weight=0.1)
projection = Projection(input_population, output_population, connector, syn)
print(connector.describe(), timer.elapsedTime())

file_stem = "Results/simpleRandomNetwork_csa_np%d_%s" % (num_processes(), simulator_name)

projection.save('all', '%s.conn' % file_stem)

input_population.record('spikes')
output_population.record('spikes')
output_population.sample(n_record, rng).record('v')

print("[%d] Running simulation" % node)
run(tstop)
    def test_va_benchmark(self):
        try:
            simulator_name = 'spiNNaker'

            timer = Timer()

            # === Define parameters =========================================

            rngseed = 98766987
            parallel_safe = True

            n = 1500  # number of cells
            # number of excitatory cells:number of inhibitory cells
            r_ei = 4.0
            pconn = 0.02  # connection probability

            dt = 0.1  # (ms) simulation timestep
            tstop = 200  # (ms) simulaton duration
            delay = 1

            # Cell parameters
            area = 20000.  # (µm²)
            tau_m = 20.  # (ms)
            cm = 1.  # (µF/cm²)
            g_leak = 5e-5  # (S/cm²)
            e_leak = -49.  # (mV)
            v_thresh = -50.  # (mV)
            v_reset = -60.  # (mV)
            t_refrac = 5.  # (ms) (clamped at v_reset)
            # (mV) 'mean' membrane potential,  for calculating CUBA weights
            v_mean = -60.
            tau_exc = 5.  # (ms)
            tau_inh = 10.  # (ms)
            # (nS) #Those weights should be similar to the COBA weights
            g_exc = 0.27
            # (nS) # but the delpolarising drift should be taken into account
            g_inh = 4.5
            e_rev_exc = 0.  # (mV)
            e_rev_inh = -80.  # (mV)

            # === Calculate derived parameters ===============================

            area *= 1e-8  # convert to cm²
            cm *= area * 1000  # convert to nF
            r_m = 1e-6 / (g_leak * area)  # membrane resistance in MΩ
            assert tau_m == cm * r_m  # just to check

            # number of excitatory cells
            n_exc = int(round((n * r_ei / (1 + r_ei))))
            n_inh = n - n_exc  # number of inhibitory cells

            print n_exc, n_inh

            celltype = p.IF_curr_exp
            # (nA) weight of excitatory synapses
            w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean)
            w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean)  # (nA)
            assert w_exc > 0
            assert w_inh < 0

            # === Build the network ==========================================

            p.setup(timestep=dt, min_delay=delay, max_delay=delay)

            if simulator_name == 'spiNNaker':
                # this will set 100 neurons per core
                p.set_number_of_neurons_per_core('IF_curr_exp', 100)
                # this will set 50 neurons per core
                p.set_number_of_neurons_per_core('IF_cond_exp', 50)

            node_id = 1
            np = 1

            host_name = socket.gethostname()
            print "Host #%d is on %s" % (np, host_name)

            cell_params = {
                'tau_m': tau_m,
                'tau_syn_E': tau_exc,
                'tau_syn_I': tau_inh,
                'v_rest': e_leak,
                'v_reset': v_reset,
                'v_thresh': v_thresh,
                'cm': cm,
                'tau_refrac': t_refrac,
                'i_offset': 0
            }

            print cell_params

            timer.start()

            print "%s Creating cell populations..." % node_id
            exc_cells = p.Population(n_exc,
                                     celltype,
                                     cell_params,
                                     label="Excitatory_Cells")
            inh_cells = p.Population(n_inh,
                                     celltype,
                                     cell_params,
                                     label="Inhibitory_Cells")
            p.NativeRNG(12345)

            print "%s Initialising membrane potential to random values..." \
                  % node_id
            rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)
            uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh],
                                               rng=rng)
            exc_cells.initialize('v', uniform_distr)
            inh_cells.initialize('v', uniform_distr)

            print "%s Connecting populations..." % node_id
            exc_conn = p.FixedProbabilityConnector(pconn,
                                                   weights=w_exc,
                                                   delays=delay)
            inh_conn = p.FixedProbabilityConnector(pconn,
                                                   weights=w_inh,
                                                   delays=delay)

            connections = dict()
            connections['e2e'] = p.Projection(exc_cells,
                                              exc_cells,
                                              exc_conn,
                                              target='excitatory',
                                              rng=rng)
            connections['e2i'] = p.Projection(exc_cells,
                                              inh_cells,
                                              exc_conn,
                                              target='excitatory',
                                              rng=rng)
            connections['i2e'] = p.Projection(inh_cells,
                                              exc_cells,
                                              inh_conn,
                                              target='inhibitory',
                                              rng=rng)
            connections['i2i'] = p.Projection(inh_cells,
                                              inh_cells,
                                              inh_conn,
                                              target='inhibitory',
                                              rng=rng)

            # === Setup recording ==============================
            print "%s Setting up recording..." % node_id
            exc_cells.record()

            # === Run simulation ================================
            print "%d Running simulation..." % node_id

            print "timings: number of neurons:", n
            print "timings: number of synapses:", n * n * pconn

            p.run(tstop)

            exc_spikes = exc_cells.getSpikes()
            print len(exc_spikes)

            current_file_path = os.path.dirname(os.path.abspath(__file__))
            current_file_path = os.path.join(current_file_path, "spikes.data")
            exc_cells.printSpikes(current_file_path)
            pre_recorded_spikes = p.utility_calls.read_spikes_from_file(
                current_file_path, 0, n_exc, 0, tstop)

            for spike_element, read_element in zip(exc_spikes,
                                                   pre_recorded_spikes):
                self.assertEqual(round(spike_element[0], 1),
                                 round(read_element[0], 1))
                self.assertEqual(round(spike_element[1], 1),
                                 round(read_element[1], 1))

            p.end()


# System intentional overload so may error
        except SpinnmanTimeoutException as ex:
            raise SkipTest(ex)
Beispiel #25
0
print "Host #%d is on %s" % (rank + 1, host_name)

if extra.has_key('threads'):
    print "%d Initialising the simulator with %d threads..." % (
        rank, extra['threads'])
else:
    print "%d Initialising the simulator with single thread..." % (rank)


# Small function to display information only on node 1
def nprint(s):
    if (rank == 0):
        print s


timer.start()  # start timer on construction

print "%d Setting up random number generator" % rank
rng = NumpyRNG(kernelseed, parallel_safe=True)

print "%d Creating excitatory population with %d neurons." % (rank, NE)
celltype = IF_curr_alpha(**cell_params)
E_net = Population(NE, celltype, label="E_net")

print "%d Creating inhibitory population with %d neurons." % (rank, NI)
I_net = Population(NI, celltype, label="I_net")

print "%d Initialising membrane potential to random values between %g mV and %g mV." % (
    rank, U0, theta)
uniformDistr = RandomDistribution('uniform', [U0, theta], rng)
E_net.initialize(v=uniformDistr)
Beispiel #26
0
def runBrunelNetwork(
    g=5.0,
    eta=2.0,
    dt=0.1,
    simtime=1000.0,
    delay=1.5,
    epsilon=0.1,
    order=2500,
    N_rec=50,
    N_rec_v=2,
    save=False,
    simulator_name="nest",
    extra={},
):

    exec("from pyNN.%s import *" % simulator_name) in globals()

    timer = Timer()

    # === Define parameters ========================================================

    downscale = 1  # scale number of neurons down by this factor
    # scale synaptic weights up by this factor to
    # obtain similar dynamics independent of size
    order = order  # determines size of network:
    # 4*order excitatory neurons
    # 1*order inhibitory neurons
    Nrec = N_rec  # number of neurons to record from, per population
    epsilon = epsilon  # connectivity: proportion of neurons each neuron projects to

    # Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1
    # here: Case C, asynchronous irregular firing, ~35 Hz
    eta = eta  # rel rate of external input
    g = g  # rel strength of inhibitory synapses
    J = 0.1  # synaptic weight [mV]
    delay = delay  # synaptic delay, all connections [ms]

    # single neuron parameters
    tauMem = 20.0  # neuron membrane time constant [ms]
    tauSyn = 0.1  # synaptic time constant [ms]
    tauRef = 2.0  # refractory time [ms]
    U0 = 0.0  # resting potential [mV]
    theta = 20.0  # threshold

    # simulation-related parameters
    simtime = simtime  # simulation time [ms]
    dt = dt  # simulation step length [ms]

    # seed for random generator used when building connections
    connectseed = 12345789
    use_RandomArray = True  # use Python rng rather than NEST rng

    # seed for random generator(s) used during simulation
    kernelseed = 43210987

    # === Calculate derived parameters =============================================

    # scaling: compute effective order and synaptic strength
    order_eff = int(float(order) / downscale)
    J_eff = J * downscale

    # compute neuron numbers
    NE = int(4 * order_eff)  # number of excitatory neurons
    NI = int(1 * order_eff)  # number of inhibitory neurons
    N = NI + NE  # total number of neurons

    # compute synapse numbers
    CE = int(epsilon * NE)  # number of excitatory synapses on neuron
    CI = int(epsilon * NI)  # number of inhibitory synapses on neuron
    C = CE + CI  # total number of internal synapses per n.
    Cext = CE  # number of external synapses on neuron

    # synaptic weights, scaled for alpha functions, such that
    # for constant membrane potential, charge J would be deposited
    fudge = 0.00041363506632638  # ensures dV = J at V=0

    # excitatory weight: JE = J_eff / tauSyn * fudge
    JE = (J_eff / tauSyn) * fudge

    # inhibitory weight: JI = - g * JE
    JI = -g * JE

    # threshold, external, and Poisson generator rates:
    nu_thresh = theta / (J_eff * CE * tauMem)
    nu_ext = eta * nu_thresh  # external rate per synapse
    p_rate = 1000 * nu_ext * Cext  # external input rate per neuron (Hz)

    # number of synapses---just so we know
    Nsyn = (
        C + 1
    ) * N + 2 * Nrec  # number of neurons * (internal synapses + 1 synapse from PoissonGenerator) + 2synapses" to spike detectors

    # put cell parameters into a dict
    cell_params = {
        "tau_m": tauMem,
        "tau_syn_E": tauSyn,
        "tau_syn_I": tauSyn,
        "tau_refrac": tauRef,
        "v_rest": U0,
        "v_reset": U0,
        "v_thresh": theta,
        "cm": 0.001,
    }  # (nF)

    # === Build the network ========================================================

    # clear all existing network elements and set resolution and limits on delays.
    # For NEST, limits must be set BEFORE connecting any elements

    # extra = {'threads' : 2}

    rank = setup(timestep=dt, max_delay=delay, **extra)
    print("rank =", rank)
    np = num_processes()
    print("np =", np)
    import socket

    host_name = socket.gethostname()
    print("Host #%d is on %s" % (rank + 1, host_name))

    if "threads" in extra:
        print("%d Initialising the simulator with %d threads..." % (rank, extra["threads"]))
    else:
        print("%d Initialising the simulator with single thread..." % rank)

    # Small function to display information only on node 1
    def nprint(s):
        if rank == 0:
            print(s)

    timer.start()  # start timer on construction

    print("%d Setting up random number generator" % rank)
    rng = NumpyRNG(kernelseed, parallel_safe=True)

    print("%d Creating excitatory population with %d neurons." % (rank, NE))
    celltype = IF_curr_alpha(**cell_params)
    E_net = Population(NE, celltype, label="E_net")

    print("%d Creating inhibitory population with %d neurons." % (rank, NI))
    I_net = Population(NI, celltype, label="I_net")

    print("%d Initialising membrane potential to random values between %g mV and %g mV." % (rank, U0, theta))
    uniformDistr = RandomDistribution("uniform", low=U0, high=theta, rng=rng)
    E_net.initialize(v=uniformDistr)
    I_net.initialize(v=uniformDistr)

    print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate))
    source_type = SpikeSourcePoisson(rate=p_rate)
    expoisson = Population(NE, source_type, label="expoisson")

    print("%d Creating inhibitory Poisson generator with the same rate." % rank)
    inpoisson = Population(NI, source_type, label="inpoisson")

    # Record spikes
    print("%d Setting up recording in excitatory population." % rank)
    E_net.sample(Nrec).record("spikes")
    E_net[0 : min(NE, N_rec_v)].record("v")

    print("%d Setting up recording in inhibitory population." % rank)
    I_net.sample(Nrec).record("spikes")
    I_net[0 : min(NI, N_rec_v)].record("v")

    progress_bar = ProgressBar(width=20)
    connector = FixedProbabilityConnector(epsilon, rng=rng, callback=progress_bar)
    E_syn = StaticSynapse(weight=JE, delay=delay)
    I_syn = StaticSynapse(weight=JI, delay=delay)
    ext_Connector = OneToOneConnector(callback=progress_bar)
    ext_syn = StaticSynapse(weight=JE, delay=dt)

    print(
        "%d Connecting excitatory population with connection probability %g, weight %g nA and delay %g ms."
        % (rank, epsilon, JE, delay)
    )
    E_to_E = Projection(E_net, E_net, connector, E_syn, receptor_type="excitatory")
    print("E --> E\t\t", len(E_to_E), "connections")
    I_to_E = Projection(I_net, E_net, connector, I_syn, receptor_type="inhibitory")
    print("I --> E\t\t", len(I_to_E), "connections")
    input_to_E = Projection(expoisson, E_net, ext_Connector, ext_syn, receptor_type="excitatory")
    print("input --> E\t", len(input_to_E), "connections")

    print(
        "%d Connecting inhibitory population with connection probability %g, weight %g nA and delay %g ms."
        % (rank, epsilon, JI, delay)
    )
    E_to_I = Projection(E_net, I_net, connector, E_syn, receptor_type="excitatory")
    print("E --> I\t\t", len(E_to_I), "connections")
    I_to_I = Projection(I_net, I_net, connector, I_syn, receptor_type="inhibitory")
    print("I --> I\t\t", len(I_to_I), "connections")
    input_to_I = Projection(inpoisson, I_net, ext_Connector, ext_syn, receptor_type="excitatory")
    print("input --> I\t", len(input_to_I), "connections")

    # read out time used for building
    buildCPUTime = timer.elapsedTime()
    # === Run simulation ===========================================================

    # run, measure computer time
    timer.start()  # start timer on construction
    print("%d Running simulation for %g ms." % (rank, simtime))
    run(simtime)
    simCPUTime = timer.elapsedTime()

    # write data to file
    # print("%d Writing data to file." % rank)
    # (E_net + I_net).write_data("Results/brunel_np%d_%s.pkl" % (np, simulator_name))
    if save:
        for pop in [E_net, I_net]:
            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.gdf" % (simulator_name, pop.label, rank))
            spikes = pop.get_data("spikes", gather=False)
            for segment in spikes.segments:
                io.write_segment(segment)

            io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.dat" % (simulator_name, pop.label, rank))
            vs = pop.get_data("v", gather=False)
            for segment in vs.segments:
                io.write_segment(segment)

    spike_data = {}
    spike_data["senders"] = []
    spike_data["times"] = []
    index_offset = 0
    for pop in [E_net, I_net]:
        spikes = pop.get_data("spikes", gather=False)
        # print(spikes.segments[0].all_data)
        num_rec = min(pop.size, N_rec)
        print("Extracting spike info (%i) for %i cells in %s" % (num_rec, pop.size, pop.label))
        assert num_rec == len(spikes.segments[0].spiketrains)
        for i in range(num_rec):
            ss = spikes.segments[0].spiketrains[i]
            for s in ss:
                index = i + index_offset
                # print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index))
                spike_data["senders"].append(index)
                spike_data["times"].append(s)
        index_offset += pop.size

    # from IPython.core.debugger import Tracer
    # Tracer()()

    E_rate = E_net.mean_spike_count() * 1000.0 / simtime
    I_rate = I_net.mean_spike_count() * 1000.0 / simtime

    # write a short report
    nprint("\n--- Brunel Network Simulation ---")
    nprint("Nodes              : %d" % np)
    nprint("Number of Neurons  : %d" % N)
    nprint("Number of Synapses : %d" % Nsyn)
    nprint("Input firing rate  : %g" % p_rate)
    nprint("Excitatory weight  : %g" % JE)
    nprint("Inhibitory weight  : %g" % JI)
    nprint("Excitatory rate    : %g Hz" % E_rate)
    nprint("Inhibitory rate    : %g Hz" % I_rate)
    nprint("Build time         : %g s" % buildCPUTime)
    nprint("Simulation time    : %g s" % simCPUTime)

    # === Clean up and quit ========================================================

    end()

    return spike_data