Example #1
0
def setup_2_layers_4_units_ff_net():
    configure_scheduling()
    pynnn.setup()
    Tns.p1 = pynnn.Population(4,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.p2 = pynnn.Population(4,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.prj1_2 = pynnn.Projection(
        Tns.p1,
        Tns.p2,
        pynnn.AllToAllConnector(allow_self_connections=False),
        target='excitatory')
    Tns.prj1_2.set("weight", 1)
    Tns.max_weight = 34
    Tns.rore1_update_p = 10
    Tns.rore1_win_width = 200
    Tns.rore2_update_p = 10
    Tns.rore2_win_width = 200
    Tns.rore1 = RectilinearOutputRateEncoder(Tns.p1, 2, 2, Tns.rore1_update_p,
                                             Tns.rore1_win_width)
    Tns.rore2 = RectilinearOutputRateEncoder(Tns.p2, 2, 2, Tns.rore2_update_p,
                                             Tns.rore2_win_width)
    common.pynn_utils.POP_ADAPT_DICT[(
        Tns.p1, common.pynn_utils.RectilinearOutputRateEncoder)] = Tns.rore1
    common.pynn_utils.POP_ADAPT_DICT[(
        Tns.p2, common.pynn_utils.RectilinearOutputRateEncoder)] = Tns.rore2
    enable_recording(Tns.p1, Tns.p2)
    schedule_output_rate_calculation(Tns.p1)
    schedule_output_rate_calculation(Tns.p2)
Example #2
0
def setup_pynn_populations_with_1_to_1_connectivity():
    pynnn.setup()
    Tns.p1 = pynnn.Population(64,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.p2 = pynnn.Population(64,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.prj1_2 = pynnn.Projection(Tns.p1,
                                  Tns.p2,
                                  pynnn.OneToOneConnector(),
                                  target='excitatory')
Example #3
0
def test_adapter_keeps_unit_count():
    """Add_pynn_population and commit_structure result in consistent number of
    units."""
    assert A.num_units == 0
    pop_size = 27
    pynn_pop1 = pynnn.Population(pop_size, pynnn.IF_cond_alpha)
    A.add_pynn_population(pynn_pop1, alias = "soilwork")
    pynn_pop2 = pynnn.Population(pop_size, pynnn.IF_cond_alpha,
                                 structure = pynnn.space.Grid3D())
    A.add_pynn_population(pynn_pop2)
    A.commit_structure()
    assert A.num_units == pop_size * 2
Example #4
0
def setup_pynn_populations_with_full_connectivity():
    pynnn.setup()
    Tns.p1 = pynnn.Population(4,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.p2 = pynnn.Population(4,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.prj1_2 = pynnn.Projection(
        Tns.p1,
        Tns.p2,
        pynnn.AllToAllConnector(allow_self_connections=False),
        target='excitatory')
Example #5
0
def setup_pynn_populations():
    pynnn.setup()
    Tns.p1 = pynnn.Population(64,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.p2 = pynnn.Population(64,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.prj1_2 = pynnn.Projection(
        Tns.p1,
        Tns.p2,
        pynnn.AllToAllConnector(allow_self_connections=False),
        target='excitatory')
    # Weights in nA as IF_curr_alpha uses current-based synapses
    Tns.prj1_2.set("weight", 1)
    Tns.max_weight = 33
Example #6
0
def test_add_pynn_population_processes_all_units():
    """add_pynn_population checks the int value of each unit it's given."""
    pop_size = 27
    pynnn.simulator.ID.__int__.return_value = 1
    pynn_pop1 = pynnn.Population(pop_size, pynnn.IF_cond_alpha)
    A.add_pynn_population(pynn_pop1)
    for u in pynn_pop1.all():
        assert u.__int__.call_count == pop_size, \
            "units missed in the 2D case"
    pynnn.simulator.ID.__int__.reset_mock()
    pynnn.simulator.ID.__int__.return_value = 1
    pynn_pop2 = pynnn.Population(pop_size, pynnn.IF_cond_alpha,
                                 structure = pynnn.space.Grid3D())
    A.add_pynn_population(pynn_pop2, alias = "testmap")
    for u in pynn_pop2.all():
        assert u.__int__.call_count == pop_size, "units missed in the 3D case"
Example #7
0
def run_sim(ncell):

    print "Cells: ", ncell

    setup0 = time.time()

    sim.setup(timestep=0.1)

    hh_cell_type = sim.HH_cond_exp()

    hh = sim.Population(ncell, hh_cell_type)

    pulse = sim.DCSource(amplitude=0.5, start=20.0, stop=80.0)
    pulse.inject_into(hh)

    hh.record('v')

    setup1 = time.time()

    t0 = time.time()

    sim.run(100.0)

    v = hh.get_data()

    sim.end()

    t1 = time.time()

    setup_total = setup1 - setup0
    run_total = t1 - t0
    print "Setup: ", setup_total
    print "Run: ", run_total
    print "Total sim time: ", setup_total + run_total
    return run_total
Example #8
0
    def test_partitioning(self):
        p1 = sim.Population(5, sim.IF_cond_exp())
        p2 = sim.Population(7, sim.IF_cond_exp())
        a = p1 + p2[1:4]
        # [0 2 3 4 5][x 1 2 3 x x x]
        prj = sim.Projection(a, a, MockConnector(), synapse_type=self.syn)
        presynaptic_indices = numpy.array([0, 3, 4, 6, 7])
        partitions = prj._partition(presynaptic_indices)
        self.assertEqual(len(partitions), 2)
        assert_array_equal(partitions[0], numpy.array([0, 3, 4]))
        assert_array_equal(partitions[1], numpy.array([2, 3]))

        # [0 1 2 3 4][x 1 2 3 x]
        self.assertEqual(prj._localize_index(0), (0, 0))
        self.assertEqual(prj._localize_index(3), (0, 3))
        self.assertEqual(prj._localize_index(5), (1, 1))
        self.assertEqual(prj._localize_index(7), (1, 3))
Example #9
0
def setup_and_fill_adapter():
    setup_adapter()
    Tns.pop_size = 27
    Tns.pynn_pop1 = pynnn.Population(Tns.pop_size, pynnn.IF_cond_alpha)
    Tns.ids1 = [int(u) for u in Tns.pynn_pop1.all()]
    Tns.pynn_pop2 = pynnn.Population(Tns.pop_size, pynnn.IF_cond_alpha,
                                 structure=pynnn.space.Grid3D())
    Tns.ids2 = [int(u) for u in Tns.pynn_pop2.all()]
    A.add_pynn_population(Tns.pynn_pop1)
    Tns.pop2_alias = "testmap"
    A.add_pynn_population(Tns.pynn_pop2, alias=Tns.pop2_alias)
    Tns.pynn_proj1 = pynnn.Projection(Tns.pynn_pop1, Tns.pynn_pop2,
                                  pynnn.OneToOneConnector())
    Tns.pynn_proj2 = pynnn.Projection(Tns.pynn_pop2, Tns.pynn_pop1,
                                  pynnn.AllToAllConnector())
    A.add_pynn_projection(Tns.pynn_pop1, Tns.pynn_pop2,
                          Tns.pynn_proj1)
    A.add_pynn_projection(Tns.pynn_pop2, Tns.pynn_pop1,
                          Tns.pynn_proj2)
Example #10
0
def test_adapter_methods_call_check_open():
    """methods in the methods_checking_open list have called check_open"""
    A.check_open = Mock(return_value=True)
    pynn_pop1 = pynnn.Population(1, pynnn.IF_cond_alpha)
    pynn_pop2 = pynnn.Population(1, pynnn.IF_cond_alpha)
    pynn_prj = pynnn.Projection(
        pynn_pop1, pynn_pop2,
        pynnn.OneToOneConnector(),
        target='excitatory')
    pynn_u = pynn_pop1[0]
    methods_checking_open = [
        [A.assert_open, ()],
        [A.commit_structure, ()],
        [A.add_pynn_population, (pynn_pop1,)],
        [A.add_pynn_projection, (pynn_pop1, pynn_pop1,
                                     pynn_prj)]]
    for m in methods_checking_open:
        m[0](*m[1])
        assert A.check_open.called, \
            m[0].__name__ + " does not call check_open."
        A.check_open.reset_mock()
Example #11
0
 def setUp(self):
     sim.setup()
     sim.Population.nPop = 0
     sim.Projection.nProj = 0
     self.target33 = sim.Population((3, 3), sim.IF_curr_alpha)
     self.target6 = sim.Population((6, ), sim.IF_curr_alpha)
     self.target1 = sim.Population((1, ), sim.IF_cond_exp)
     self.source5 = sim.Population((5, ), sim.SpikeSourcePoisson)
     self.source22 = sim.Population((2, 2), sim.SpikeSourcePoisson)
     self.source33 = sim.Population((3, 3), sim.SpikeSourcePoisson)
     self.expoisson33 = sim.Population((3, 3), sim.SpikeSourcePoisson,
                                       {'rate': 100})
Example #12
0
def generate_data(label):
    spikesTrain = []
    organisedData = {}
    for i in range(input_class):
        for j in range(input_len):
            neuid = (i, j)
            organisedData[neuid] = []
    for i in range(input_len):
        neuid = (label, i)
        organisedData[neuid].append(i * v_co)


#        if neuid not in organisedData:
#            organisedData[neuid]=[i*v_co]
#        else:
#            organisedData[neuid].append(i*v_co)
    for i in range(input_class):
        for j in range(input_len):
            neuid = (i, j)
            organisedData[neuid].sort()
            spikesTrain.append(organisedData[neuid])
    runTime = int(max(max(spikesTrain)))
    sim.setup(timestep=1)

    noise = sim.Population(input_size, sim.SpikeSourcePoisson(), label='noise')

    noise.record(['spikes'])  #noise

    sim.run(runTime)
    neonoise = noise.get_data(["spikes"])
    spikesnoise = neonoise.segments[0].spiketrains  #noise
    sim.end()
    for i in range(input_size):
        for noisespike in spikesnoise[i]:
            spikesTrain[i].append(noisespike)
            spikesTrain[i].sort()
    return spikesTrain
Example #13
0
def train(label, untrained_weights=None):
    organisedStim = {}
    labelSpikes = []
    spikeTimes = generate_data(label)

    for i in range(output_size):
        labelSpikes.append([])
    labelSpikes[label] = [int(max(max(spikeTimes))) + 1]

    if untrained_weights == None:
        untrained_weights = RandomDistribution('uniform',
                                               low=wMin,
                                               high=wMaxInit).next(input_size *
                                                                   output_size)
        #untrained_weights = RandomDistribution('normal_clipped', mu=0.1, sigma=0.05, low=wMin, high=wMaxInit).next(input_size*output_size)
        untrained_weights = np.around(untrained_weights, 3)
        #saveWeights(untrained_weights, 'untrained_weightssupmodel1traj')
        print("init!")

    print "length untrained_weights :", len(untrained_weights)

    if len(untrained_weights) > input_size:
        training_weights = [[0 for j in range(output_size)]
                            for i in range(input_size)
                            ]  #np array? size 1024x25
        k = 0
        #for i in untrained_weights:
        #    training_weights[i[0]][i[1]]=i[2]
        for i in range(input_size):
            for j in range(output_size):
                training_weights[i][j] = untrained_weights[k]
                k += 1
    else:
        training_weights = untrained_weights

    connections = []
    for n_pre in range(input_size):  # len(untrained_weights) = input_size
        for n_post in range(
                output_size
        ):  # len(untrained_weight[0]) = output_size; 0 or any n_pre
            connections.append((n_pre, n_post, training_weights[n_pre][n_post],
                                __delay__))  #index
    runTime = int(max(max(spikeTimes))) + 100
    #####################
    sim.setup(timestep=1)
    #def populations
    layer1 = sim.Population(input_size,
                            sim.SpikeSourceArray, {'spike_times': spikeTimes},
                            label='inputspikes')
    layer2 = sim.Population(output_size,
                            sim.IF_curr_exp,
                            cellparams=cell_params_lif,
                            label='outputspikes')
    supsignal = sim.Population(output_size,
                               sim.SpikeSourceArray,
                               {'spike_times': labelSpikes},
                               label='supersignal')

    #def learning rule
    stdp = sim.STDPMechanism(
        #weight=untrained_weights,
        #weight=0.02,  # this is the initial value of the weight
        #delay="0.2 + 0.01*d",
        timing_dependence=sim.SpikePairRule(tau_plus=tauPlus,
                                            tau_minus=tauMinus,
                                            A_plus=aPlus,
                                            A_minus=aMinus),
        #weight_dependence=sim.MultiplicativeWeightDependence(w_min=wMin, w_max=wMax),
        weight_dependence=sim.AdditiveWeightDependence(w_min=wMin, w_max=wMax),
        dendritic_delay_fraction=0)
    #def projections

    stdp_proj = sim.Projection(layer1,
                               layer2,
                               sim.FromListConnector(connections),
                               synapse_type=stdp)
    inhibitory_connections = sim.Projection(
        layer2,
        layer2,
        sim.AllToAllConnector(allow_self_connections=False),
        synapse_type=sim.StaticSynapse(weight=inhibWeight, delay=__delay__),
        receptor_type='inhibitory')
    stim_proj = sim.Projection(supsignal,
                               layer2,
                               sim.OneToOneConnector(),
                               synapse_type=sim.StaticSynapse(
                                   weight=stimWeight, delay=__delay__))

    layer1.record(['spikes'])

    layer2.record(['v', 'spikes'])
    supsignal.record(['spikes'])
    sim.run(runTime)

    print("Weights:{}".format(stdp_proj.get('weight', 'list')))

    weight_list = [
        stdp_proj.get('weight', 'list'),
        stdp_proj.get('weight', format='list', with_address=False)
    ]
    neo = layer2.get_data(["spikes", "v"])
    spikes = neo.segments[0].spiketrains
    v = neo.segments[0].filter(name='v')[0]
    neostim = supsignal.get_data(["spikes"])
    print(label)
    spikestim = neostim.segments[0].spiketrains
    neoinput = layer1.get_data(["spikes"])
    spikesinput = neoinput.segments[0].spiketrains

    plt.close('all')
    pplt.Figure(pplt.Panel(v,
                           ylabel="Membrane potential (mV)",
                           xticks=True,
                           yticks=True,
                           xlim=(0, runTime)),
                pplt.Panel(spikesinput,
                           xticks=True,
                           yticks=True,
                           markersize=2,
                           xlim=(0, runTime)),
                pplt.Panel(spikestim,
                           xticks=True,
                           yticks=True,
                           markersize=2,
                           xlim=(0, runTime)),
                pplt.Panel(spikes,
                           xticks=True,
                           xlabel="Time (ms)",
                           yticks=True,
                           markersize=2,
                           xlim=(0, runTime)),
                title="Training" + str(label),
                annotations="Training" +
                str(label)).save('plot/' + str(trylabel) + str(label) +
                                 '_training.png')
    #plt.hist(weight_list[1], bins=100)
    #plt.show()
    plt.close('all')
    print(wMax)
    '''
    plt.hist([weight_list[1][0:input_size], weight_list[1][input_size:input_size*2], weight_list[1][input_size*2:]], bins=20, label=['neuron 0', 'neuron 1', 'neuron 2'], range=(0, wMax))
    plt.title('weight distribution')
    plt.xlabel('Weight value')
    plt.ylabel('Weight count')
    '''
    #plt.show()
    #plt.show()

    sim.end()
    for i in weight_list[0]:
        #training_weights[int(i[0])][int(i[1])]=float(i[2])
        weight_list[1][int(i[0]) * output_size + int(i[1])] = i[2]
    return weight_list[1]
Example #14
0
def test(spikeTimes, trained_weights, label):

    #spikeTimes = extractSpikes(sample)
    runTime = int(max(max(spikeTimes))) + 100

    ##########################################

    sim.setup(timestep=1)

    pre_pop = sim.Population(input_size,
                             sim.SpikeSourceArray, {'spike_times': spikeTimes},
                             label="pre_pop")
    post_pop = sim.Population(output_size,
                              sim.IF_curr_exp,
                              cell_params_lif,
                              label="post_pop")
    '''
    if len(untrained_weights)>input_size:
        training_weights = [[0 for j in range(output_size)] for i in range(input_size)] #np array? size 1024x25
        k=0
        for i in untrained_weights:
            training_weights[i[0]][i[1]]=i[2]
    '''
    if len(trained_weights) > input_size:
        weigths = [[0 for j in range(output_size)]
                   for i in range(input_size)]  #np array? size 1024x25
        k = 0
        for i in range(input_size):
            for j in range(output_size):
                weigths[i][j] = trained_weights[k]
                k += 1
    else:
        weigths = trained_weights

    connections = []

    #k = 0
    for n_pre in range(input_size):  # len(untrained_weights) = input_size
        for n_post in range(
                output_size
        ):  # len(untrained_weight[0]) = output_size; 0 or any n_pre
            #connections.append((n_pre, n_post, weigths[n_pre][n_post]*(wMax), __delay__))
            connections.append((n_pre, n_post, weigths[n_pre][n_post] *
                                (wMax) / max(trained_weights), __delay__))  #
            #k += 1

    prepost_proj = sim.Projection(
        pre_pop,
        post_pop,
        sim.FromListConnector(connections),
        synapse_type=sim.StaticSynapse(),
        receptor_type='excitatory')  # no more learning !!
    #inhib_proj = sim.Projection(post_pop, post_pop, sim.AllToAllConnector(), synapse_type=sim.StaticSynapse(weight=inhibWeight, delay=__delay__), receptor_type='inhibitory')
    # no more lateral inhib

    post_pop.record(['v', 'spikes'])
    sim.run(runTime)

    neo = post_pop.get_data(['v', 'spikes'])
    spikes = neo.segments[0].spiketrains
    v = neo.segments[0].filter(name='v')[0]
    f1 = pplt.Figure(
        # plot voltage
        pplt.Panel(v,
                   ylabel="Membrane potential (mV)",
                   xticks=True,
                   yticks=True,
                   xlim=(0, runTime + 100)),
        # raster plot
        pplt.Panel(spikes,
                   xlabel="Time (ms)",
                   xticks=True,
                   yticks=True,
                   markersize=2,
                   xlim=(0, runTime + 100)),
        title='Test with label ' + str(label),
        annotations='Test with label ' + str(label))
    f1.save('plot/' + str(trylabel) + str(label) + '_test.png')
    f1.fig.texts = []
    print("Weights:{}".format(prepost_proj.get('weight', 'list')))

    weight_list = [
        prepost_proj.get('weight', 'list'),
        prepost_proj.get('weight', format='list', with_address=False)
    ]
    #predict_label=
    sim.end()
    return spikes
        (run_time * i + start_time_3, run_time * i + stop_time_3)).next(
            int((stop_time_3 - start_time_3) * rate_3 * 1e-3))
    # spike_times_4 = RandomDistribution('uniform', (run_time * i + start_time_4, run_time * i + stop_time_4)).next(int((stop_time_3 - start_time_3) * rate_3 * 1e-3))
    spike_time = np.concatenate((spike_times_1, spike_times_2, spike_times_3),
                                axis=0)
    # print spike_time
    # print type(spike_times_1)
    spike_time_run = np.hstack((spike_time_run, spike_time))

# print spike_time_run
# ==========simulation setup=====================

sim.setup()

# ==========generate OR read in the input spikes data=====================
Excinp = sim.Population(Exc_in,
                        sim.SpikeSourceArray(spike_times=spike_time_run))
Inhinp = sim.Population(Inh_in,
                        sim.SpikeSourceArray(spike_times=spike_time_run))

# ==========create neuron population=====================
# todo: the initail parameters of neurons might be modified
cell_type_parameters = {'tau_refrac': 0.1, 'v_thresh': -50.0, 'tau_m': 20.0, 'tau_syn_E': 0.5, 'v_rest': -65.0,\
      'cm': 1.0, 'v_reset': -65.0, 'tau_syn_I': 0.5, 'i_offset': 0.0}
# print(sim.IF_curr_alpha.default_parameters)

cell_type = sim.IF_cond_exp(**
                            cell_type_parameters)  # neuron type of population
Pexc = sim.Population(
    N_e, cell_type, label="excitotary neurons")  # excitatory neuron population
Pinh = sim.Population(
    N_i, cell_type,
        (run_time * i + start_time_3, run_time * i + stop_time_3)).next(
            int((stop_time_3 - start_time_3) * rate_3 * 1e-3))
    # spike_times_4 = RandomDistribution('uniform', (run_time * i + start_time_4, run_time * i + stop_time_4)).next(int((stop_time_3 - start_time_3) * rate_3 * 1e-3))
    spike_time = np.concatenate((spike_times_1, spike_times_2, spike_times_3),
                                axis=0)
    # print spike_time
    # print type(spike_times_1)
    spike_time_run = np.hstack((spike_time_run, spike_time))

# print spike_time_run
# ==========simulation setup=====================

sim.setup()

# ==========generate OR read in the input spikes data=====================
Excinp = sim.Population(Exc_in,
                        sim.SpikeSourceArray(spike_times=spike_time_run))
Inhinp = sim.Population(Inh_in,
                        sim.SpikeSourceArray(spike_times=spike_time_run))

# # ==========create neuron population=====================
# # todo: the initail parameters of neurons might be modified
# cell_type_parameters = {'tau_refrac': 0.1, 'v_thresh': -50.0, 'tau_m': 20.0, 'tau_syn_E': 0.5, 'v_rest': -65.0,\
# 						'cm': 1.0, 'v_reset': -65.0, 'tau_syn_I': 0.5, 'i_offset': 0.0}
# # print(sim.IF_curr_alpha.default_parameters)

# cell_type = sim.IF_cond_exp(**cell_type_parameters) # neuron type of population
# Pexc = sim.Population(N_e, cell_type, label = "excitotary neurons") # excitatory neuron population
# Pinh = sim.Population(N_i, cell_type, label = "inhibitatory neurons") # inhibitoty neuron population
# all_cells = sim.Assembly(Pexc, Pinh) # assembly for all neuron population for the purpose of data recording
# # todo: the Population structure
Example #17
0
def test_add_pynn_population_sets_up_labels_and_aliases():
    pynn_pop3 =  pynnn.Population(1, pynnn.IF_cond_alpha)
    A.add_pynn_population(pynn_pop3)
    assert A.aliases[Tns.pynn_pop1.label] == Tns.pynn_pop1.label
    assert A.aliases[Tns.pynn_pop2.label] == Tns.pop2_alias
    assert A.aliases[pynn_pop3.label] == pynn_pop3.label
import pyNN.brian as sim

sim.setup()

p = sim.Population(5, sim.IF_cond_exp())
p.set(tau_m=15.0)
print(p.get('tau_m'))

p[0, 2, 4].set(tau_m=10)
print(p.get('tau_m'))
print(p[0].tau_m)

#random value
from pyNN.random import RandomDistribution, NumpyRNG
gbar_na_distr = RandomDistribution('normal', (20.0, 2.0),
                                   rng=NumpyRNG(seed=85524))
p = sim.Population(7, sim.HH_cond_exp(gbar_Na=gbar_na_distr))
print(p.get('gbar_Na'))
print(p[0].gbar_Na)

#setting from an array
import numpy as np
p = sim.Population(6,
                   sim.SpikeSourcePoisson(rate=np.linspace(10.0, 20.0, num=6)))
print(p.get('rate'))

#using function to calculate
from numpy import sin, pi
p = sim.Population(8, sim.IF_cond_exp(i_offset=lambda i: sin(i * pi / 8)))
print(p.get('i_offset'))
Example #19
0
runtime = 50
eta = 0.5  # learning rate
iter_no = 5  # learning iteration
source_rate = [20, 40]

w1_1 = 0.1
w1_2 = 0.1
w2_1 = 0.1
w2_2 = 0.1

# set up the classifier network
for i in range(len(training_label)):
    # for i in range(1):
    lg.info('iteration number %d' % i)
    sim.setup()
    In_1 = sim.Population(
        10, sim.SpikeSourcePoisson(rate=source_rate[training_label[i]]))
    In_2 = sim.Population(
        10, sim.SpikeSourcePoisson(rate=source_rate[1 - training_label[i]]))
    In = In_1 + In_2

    Out_1 = sim.Population(10, sim.IF_cond_exp())
    Out_2 = sim.Population(10, sim.IF_cond_exp())

    Out = Out_1 + Out_2

    syn_1_1 = sim.StaticSynapse(weight=w1_1, delay=0.5)
    syn_1_2 = sim.StaticSynapse(weight=w1_2, delay=0.5)
    syn_2_1 = sim.StaticSynapse(weight=w2_1, delay=0.5)
    syn_2_2 = sim.StaticSynapse(weight=w2_2, delay=0.5)
    prj_1_1 = sim.Projection(In_1,
                             Out_1,
weightStim = 8.0  # weight of stimulating synapses
timingPrePostStim = 0.3  # limit of precision of spiking in ms
stimulusOffset = 100.0  # offset from beginning and end of emulation in ms (should be larger than timingPrePostPlastic)

# prepare stimuli
stimulus = np.arange(stimulusOffset,
                     (noSpikePairs - 0.5) * intervalPairs + stimulusOffset,
                     intervalPairs)
stimulusPlastic = stimulus + timingPrePostStim - timingPrePostPlastic
# print stimulusPlastic
# print stimulus, (noSpikePairs - 0.5) * intervalPairs
# assert(len(stimulus) == noSpikePairs)
sim.setup()

# create postsynaptic neuron
neuron = sim.Population(1, sim.IF_curr_exp())

spikeSourceStim = None
spikeSourcePlastic = None
# place stimulating synapses above plastic synapse
if row < noStim:
    if row > 0:
        dummy = sim.Population(row, sim.SpikeSourceArray)
    spikeSourcePlastic = sim.Population(1, sim.SpikeSourceArray,
                                        {'spike_times': stimulusPlastic})

# create stimulating inputs
spikeSourceStim = sim.Population(noStim, sim.SpikeSourceArray,
                                 {'spike_times': stimulus})

# place stimulating synapses below plastic synapse

def plot_signal(signal, index, colour='b'):
    label = "Neuron %d " % index
    plt.plot(signal.times, signal, colour, label=label)
    plt.ylabel("%s (%s)" % (signal.name, signal.units._dimensionality.string))
    plt.setp(plt.gca().get_xticklabels(), visible=False)


run_time = 500
no_run = 3

sim.setup()
# Excinp = sim.Population(10, sim.SpikeSourcePoisson(rate = 20.0, start = 0, duration = run_time))
stimSpikes = RandomDistribution('uniform', low=0, high=500.0).next([1, 10])
Excinp = sim.Population(10, sim.SpikeSourceArray(spike_times=stimSpikes[0, :]))
# cell_type_parameters = {'tau_refrac': 0.1, 'v_thresh': -50.0, 'tau_m': 20.0, 'tau_syn_E': 0.5, 'v_rest': -65.0,\
# 'cm': 1.0, 'v_reset': -65.0, 'tau_syn_I': 0.5, 'i_offset': 0.0}
# print(sim.IF_curr_alpha.default_parameters)

# cell_type = sim.IF_cond_exp(**cell_type_parameters) # neuron type of population
Pexc = sim.Population(10,
                      sim.EIF_cond_exp_isfa_ista(),
                      label="excitotary neurons")
# Pexc.set(tau_refrac = 0.1, v_thresh = -50.0, tau_m = 20.0, tau_syn_E = 0.5, v_rest = -65.0, \
# 		cm = 1.0, v_reset = -65, tau_syn_I = 0.5, i_offset = 0.0)
# Pexc.initialize(**cell_type_parameters)
# print Pexc.celltype.default_initial_values
# print Pexc.get('tau_m')
# syn = sim.StaticSynapse(weight = 0.05, delay = 0.5)
depressing_synapse_ee = sim.TsodyksMarkramSynapse(weight=0.05,
Example #22
0
import pyNN.brian as sim
from pyNN.random import RandomDistribution, NumpyRNG

Exc_in = 32
Inh_in = 32
noSpikes = 20  # number of spikes per chanel per simulation run
stimSpikes = RandomDistribution(
    'uniform', low=0, high=500.0, rng=NumpyRNG(seed=72386)
).next(
    [Exc_in + Inh_in, noSpikes]
)  # generate a time uniform distributed signal with Exc_in + Inh_in chanels and noSpikes for each chanel
# print stimSpikes

for i in range(Exc_in):
    if i == 0:
        Excinp = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i, :]))
    else:
        spike_source = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i, :]))
        Excinp = Excinp + spike_source

for i in range(Inh_in):
    if i == 0:
        Inhinp = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i + Exc_in, :]))
    else:
        spike_source = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i + Exc_in, :]))
        Inhinp = Inhinp + spike_source

# for p in Excinp.populations:
Example #23
0
tc_parameters = ctx_parameters.copy()
tc_parameters.update({'a': 20.0, 'b': 0.0})

thalamocortical_type = sim.EIF_cond_exp_isfa_ista(**tc_parameters)
cortical_type = sim.EIF_cond_exp_isfa_ista(**ctx_parameters)
'''
populations
'''
#tec_cells=create(thalamocortical_type,n=100)
#tc_cells = Population(100, thalamocortical_type)
#ctx_cells = Population(500, cortical_type)

from pyNN.space import Grid2D, RandomStructure, Sphere
tc_cells = sim.Population(
    100,
    thalamocortical_type,
    structure=RandomStructure(boundary=Sphere(radius=200.0)),
    initial_values={'v': -70.0},
    label="Thalamocortical neurons")
from pyNN.random import RandomDistribution
v_init = RandomDistribution('uniform', (-70.0, -60.0))
ctx_cells = sim.Population(500,
                           cortical_type,
                           structure=Grid2D(dx=10.0, dy=10.0),
                           initial_values={'v': v_init},
                           label="Cortical neurons")
pre = tc_cells[:50]
post = ctx_cells[:50]
excitatory_connections = sim.Projection(pre, post, sim.AllToAllConnector(),
                                        sim.StaticSynapse(weight=0.123))
#full example
from pyNN.space import Space
import pyNN.brian as sim
from pyNN.random import RandomDistribution, NumpyRNG
import numpy as np
import matplotlib.pyplot as plt

N_e = 75  # number of excitatory neurons
N_i = 25  # number of inhibitatory neurons
Exc_in = 32  # number of excitatory inputs
Inh_in = 32  # number of inhibitatory inputs
weight_ini = 0.16  # define the initial value of the input signal weight

run_time = 500  # define the simulation time per run

sim.setup()
# ==========generate OR read in the input spikes data=====================
Excinp = sim.Population(Exc_in, sim.SpikeSourcePoisson(rate=15))
Inhinp = sim.Population(Inh_in, sim.SpikeSourcePoisson(rate=15))

# ==========create neuron population=====================
# todo: the initail parameters of neurons might be modified
cell_type_parameters = {'tau_refrac': 0.1, 'v_thresh': -50.0, 'tau_m': 20.0, 'tau_syn_E': 0.5, 'v_rest': -65.0,\
      'cm': 1.0, 'v_reset': -65.0, 'tau_syn_I': 0.5, 'i_offset': 0.0}
# print(sim.IF_curr_alpha.default_parameters)

cell_type = sim.IF_cond_exp(**
                            cell_type_parameters)  # neuron type of population
Pexc = sim.Population(
    N_e, cell_type, label="excitotary neurons")  # excitatory neuron population
Pinh = sim.Population(
    N_i, cell_type,
    label="inhibitatory neurons")  # inhibitoty neuron population
Example #25
0
import pyNN.brian as sim
import numpy as np 
import matplotlib.pyplot as plt

run_time = 500
no_run = 3

sim.setup()
Excinp = sim.Population(10, sim.SpikeSourcePoisson(rate = 20.0, start = 0, duration = run_time * no_run))
# cell_type_parameters = {'tau_refrac': 0.1, 'v_thresh': -50.0, 'tau_m': 20.0, 'tau_syn_E': 0.5, 'v_rest': -65.0,\
						# 'cm': 1.0, 'v_reset': -65.0, 'tau_syn_I': 0.5, 'i_offset': 0.0}
# print(sim.IF_curr_alpha.default_parameters)

# cell_type = sim.IF_cond_exp(**cell_type_parameters) # neuron type of population
Pexc = sim.Population(10, sim.EIF_cond_exp_isfa_ista(), label = "excitotary neurons")
# Pexc.set(tau_refrac = 0.1, v_thresh = -50.0, tau_m = 20.0, tau_syn_E = 0.5, v_rest = -65.0, \
# 		cm = 1.0, v_reset = -65, tau_syn_I = 0.5, i_offset = 0.0)
# Pexc.initialize(**cell_type_parameters)
# print Pexc.celltype.default_initial_values
# print Pexc.get('tau_m')
# syn = sim.StaticSynapse(weight = 0.05, delay = 0.5)
# depressing_synapse_ee = sim.TsodyksMarkramSynapse(weight = 0.05, delay = 0.2, U = 0.5, tau_rec = 800.0, tau_facil = 0.01)
facilitating_synapse_ee = sim.TsodyksMarkramSynapse(weight = 0.05, delay = 0.5, U = 0.04, tau_rec = 100.0, tau_facil = 1000)
connection = sim.Projection(Excinp, Pexc, sim.AllToAllConnector(), facilitating_synapse_ee, receptor_type = 'excitatory')


# E_E_connection = sim.Projection(Pexc, Pexc, sim.FixedProbabilityConnector(p_connect = 0.5), depressing_synapse_ee, receptor_type = 'excitatory')

Excinp.record('spikes')
# Excinp[1].record('v')
Pexc.record('spikes')
Example #26
0

Number_of_neurons_lsm = 125
Net_shape= (5,5,5) # 20x5=>layer, 6=>number of layers 
Number_of_neurons_I = 10
Net_shape_I= (10,1,1)

## === Define parameters ========================================================

'''
cell_params = {
    'tau_m'      : 20.0,   # (ms)
    'tau_syn_E'  : 2.0,    # (ms)
    'tau_syn_I'  : 4.0,    # (ms)
    'e_rev_E'    : 0.0,    # (mV)
    'e_rev_I'    : -70.0,  # (mV)
    'tau_refrac' : 2.0,    # (ms)
    'v_rest'     : -60.0,  # (mV)
    'v_reset'    : -70.0,  # (mV)
    'v_thresh'   : -50.0,  # (mV)
    'cm'         : 0.5}    # (nF)
dt         = 0.1           # (ms)
syn_delay  = 1.0           # (ms)
input_rate = 50.0          # (Hz)
simtime    = 1000.0        # (ms)
}
'''
## === Build Networks ========================================================
Population = {}
Population['Input'] = sim.Population(Number_of_neurons_I, sim.IF_curr_exp())
Population['Liquid'] = sim.Population(Number_of_neurons_lsm, sim.IF_curr_exp())
# ==========generate OR read in the input spikes data=====================
noSpikes = 20  # number of spikes per chanel per simulation run
stimSpikes = RandomDistribution(
    'uniform', low=0, high=run_time, rng=NumpyRNG(seed=72386)
).next(
    [Exc_in + Inh_in, noSpikes]
)  # generate a time uniform distributed signal with Exc_in + Inh_in chanels and noSpikes for each chanel
# todo: 64 chanel represents different data

sim.setup()  # start buiding up the network topology

# ==========create the input signal neuron population==================
# form the Exc_in chanels excitatory inputs as a assembly Inhinp
for i in range(Exc_in):
    if i == 0:
        Excinp = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i, :]))
    else:
        spike_source = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i, :]))
        Excinp = Excinp + spike_source

# form the Inh_in chanels excitatory inputs as a assembly Inhinp
for i in range(Inh_in):
    if i == 0:
        Inhinp = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i + Exc_in, :]))
    else:
        spike_source = sim.Population(
            1, sim.SpikeSourceArray(spike_times=stimSpikes[i + Exc_in, :]))
        Inhinp = Inhinp + spike_source
Example #28
0
import pyNN.brian as sim  # can of course replace `neuron` with `nest`, `brian`, etc.
import matplotlib.pyplot as plt
import numpy as np

sim.setup(timestep=0.01)
p_in = sim.Population(10, sim.SpikeSourcePoisson(rate=10.0), label="input")
p_out = sim.Population(10, sim.EIF_cond_exp_isfa_ista(), label="AdExp neurons")

syn = sim.StaticSynapse(weight=0.05)
random = sim.FixedProbabilityConnector(p_connect=0.5)
connections = sim.Projection(p_in,
                             p_out,
                             random,
                             syn,
                             receptor_type='excitatory')

p_in.record('spikes')
p_out.record('spikes')  # record spikes from all neurons
p_out[0:2].record(['v', 'w', 'gsyn_exc'
                   ])  # record other variables from first two neurons

for i in range(2):
    sim.run(500.0)
    spikes_in = p_in.get_data()
    data_out = p_out.get_data()
    sim.reset()
    connections.set(weight=0.05)

sim.end()
print 'finish simulation'
import pyNN.brian as sim
sim.setup()

cell = sim.Population(1, sim.HH_cond_exp())
cell.record('v')
sim.run(100)
data = cell.get_data()
sim.end()
import pyNN.brian as sim
import numpy as np
training_data = np.loadtxt('training_data_0_1.txt', delimiter=',')
training_label = training_data[:, -1]
training_rate = training_data[:, 0:64]
# print training_rate[1, :]
inputpop = []
sim.setup()
for i in range(np.size(training_rate, 1)):
    inputpop.append(
        sim.Population(1, sim.SpikeSourcePoisson(rate=abs(training_rate[0,
                                                                        i]))))

# print inputpop[0].get('rate')
# inputpop[0].set(rate = 8)
# print inputpop[0].get('rate')

pop = sim.Population(1, sim.IF_cond_exp(), label='exc')

prj1 = sim.Projection(inputpop[0],
                      pop,
                      sim.OneToOneConnector(),
                      synapse_type=sim.StaticSynapse(weight=0.04, delay=0.5),
                      receptor_type='inhibitory')
print prj1.get('weight', format='list')