Esempio n. 1
0
def setup_pynn_populations_with_1_to_1_connectivity():
    pynnn.setup()
    Tns.p1 = pynnn.Population(64,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.p2 = pynnn.Population(64,
                              pynnn.IF_curr_alpha,
                              structure=pynnn.space.Grid2D())
    Tns.prj1_2 = pynnn.Projection(Tns.p1,
                                  Tns.p2,
                                  pynnn.OneToOneConnector(),
                                  target='excitatory')
Esempio n. 2
0
def setup_and_fill_adapter():
    setup_adapter()
    Tns.pop_size = 27
    Tns.pynn_pop1 = pynnn.Population(Tns.pop_size, pynnn.IF_cond_alpha)
    Tns.ids1 = [int(u) for u in Tns.pynn_pop1.all()]
    Tns.pynn_pop2 = pynnn.Population(Tns.pop_size, pynnn.IF_cond_alpha,
                                 structure=pynnn.space.Grid3D())
    Tns.ids2 = [int(u) for u in Tns.pynn_pop2.all()]
    A.add_pynn_population(Tns.pynn_pop1)
    Tns.pop2_alias = "testmap"
    A.add_pynn_population(Tns.pynn_pop2, alias=Tns.pop2_alias)
    Tns.pynn_proj1 = pynnn.Projection(Tns.pynn_pop1, Tns.pynn_pop2,
                                  pynnn.OneToOneConnector())
    Tns.pynn_proj2 = pynnn.Projection(Tns.pynn_pop2, Tns.pynn_pop1,
                                  pynnn.AllToAllConnector())
    A.add_pynn_projection(Tns.pynn_pop1, Tns.pynn_pop2,
                          Tns.pynn_proj1)
    A.add_pynn_projection(Tns.pynn_pop2, Tns.pynn_pop1,
                          Tns.pynn_proj2)
Esempio n. 3
0
def test_adapter_methods_call_check_open():
    """methods in the methods_checking_open list have called check_open"""
    A.check_open = Mock(return_value=True)
    pynn_pop1 = pynnn.Population(1, pynnn.IF_cond_alpha)
    pynn_pop2 = pynnn.Population(1, pynnn.IF_cond_alpha)
    pynn_prj = pynnn.Projection(
        pynn_pop1, pynn_pop2,
        pynnn.OneToOneConnector(),
        target='excitatory')
    pynn_u = pynn_pop1[0]
    methods_checking_open = [
        [A.assert_open, ()],
        [A.commit_structure, ()],
        [A.add_pynn_population, (pynn_pop1,)],
        [A.add_pynn_projection, (pynn_pop1, pynn_pop1,
                                     pynn_prj)]]
    for m in methods_checking_open:
        m[0](*m[1])
        assert A.check_open.called, \
            m[0].__name__ + " does not call check_open."
        A.check_open.reset_mock()
Esempio n. 4
0
def train(label, untrained_weights=None):
    organisedStim = {}
    labelSpikes = []
    spikeTimes = generate_data(label)

    for i in range(output_size):
        labelSpikes.append([])
    labelSpikes[label] = [int(max(max(spikeTimes))) + 1]

    if untrained_weights == None:
        untrained_weights = RandomDistribution('uniform',
                                               low=wMin,
                                               high=wMaxInit).next(input_size *
                                                                   output_size)
        #untrained_weights = RandomDistribution('normal_clipped', mu=0.1, sigma=0.05, low=wMin, high=wMaxInit).next(input_size*output_size)
        untrained_weights = np.around(untrained_weights, 3)
        #saveWeights(untrained_weights, 'untrained_weightssupmodel1traj')
        print("init!")

    print "length untrained_weights :", len(untrained_weights)

    if len(untrained_weights) > input_size:
        training_weights = [[0 for j in range(output_size)]
                            for i in range(input_size)
                            ]  #np array? size 1024x25
        k = 0
        #for i in untrained_weights:
        #    training_weights[i[0]][i[1]]=i[2]
        for i in range(input_size):
            for j in range(output_size):
                training_weights[i][j] = untrained_weights[k]
                k += 1
    else:
        training_weights = untrained_weights

    connections = []
    for n_pre in range(input_size):  # len(untrained_weights) = input_size
        for n_post in range(
                output_size
        ):  # len(untrained_weight[0]) = output_size; 0 or any n_pre
            connections.append((n_pre, n_post, training_weights[n_pre][n_post],
                                __delay__))  #index
    runTime = int(max(max(spikeTimes))) + 100
    #####################
    sim.setup(timestep=1)
    #def populations
    layer1 = sim.Population(input_size,
                            sim.SpikeSourceArray, {'spike_times': spikeTimes},
                            label='inputspikes')
    layer2 = sim.Population(output_size,
                            sim.IF_curr_exp,
                            cellparams=cell_params_lif,
                            label='outputspikes')
    supsignal = sim.Population(output_size,
                               sim.SpikeSourceArray,
                               {'spike_times': labelSpikes},
                               label='supersignal')

    #def learning rule
    stdp = sim.STDPMechanism(
        #weight=untrained_weights,
        #weight=0.02,  # this is the initial value of the weight
        #delay="0.2 + 0.01*d",
        timing_dependence=sim.SpikePairRule(tau_plus=tauPlus,
                                            tau_minus=tauMinus,
                                            A_plus=aPlus,
                                            A_minus=aMinus),
        #weight_dependence=sim.MultiplicativeWeightDependence(w_min=wMin, w_max=wMax),
        weight_dependence=sim.AdditiveWeightDependence(w_min=wMin, w_max=wMax),
        dendritic_delay_fraction=0)
    #def projections

    stdp_proj = sim.Projection(layer1,
                               layer2,
                               sim.FromListConnector(connections),
                               synapse_type=stdp)
    inhibitory_connections = sim.Projection(
        layer2,
        layer2,
        sim.AllToAllConnector(allow_self_connections=False),
        synapse_type=sim.StaticSynapse(weight=inhibWeight, delay=__delay__),
        receptor_type='inhibitory')
    stim_proj = sim.Projection(supsignal,
                               layer2,
                               sim.OneToOneConnector(),
                               synapse_type=sim.StaticSynapse(
                                   weight=stimWeight, delay=__delay__))

    layer1.record(['spikes'])

    layer2.record(['v', 'spikes'])
    supsignal.record(['spikes'])
    sim.run(runTime)

    print("Weights:{}".format(stdp_proj.get('weight', 'list')))

    weight_list = [
        stdp_proj.get('weight', 'list'),
        stdp_proj.get('weight', format='list', with_address=False)
    ]
    neo = layer2.get_data(["spikes", "v"])
    spikes = neo.segments[0].spiketrains
    v = neo.segments[0].filter(name='v')[0]
    neostim = supsignal.get_data(["spikes"])
    print(label)
    spikestim = neostim.segments[0].spiketrains
    neoinput = layer1.get_data(["spikes"])
    spikesinput = neoinput.segments[0].spiketrains

    plt.close('all')
    pplt.Figure(pplt.Panel(v,
                           ylabel="Membrane potential (mV)",
                           xticks=True,
                           yticks=True,
                           xlim=(0, runTime)),
                pplt.Panel(spikesinput,
                           xticks=True,
                           yticks=True,
                           markersize=2,
                           xlim=(0, runTime)),
                pplt.Panel(spikestim,
                           xticks=True,
                           yticks=True,
                           markersize=2,
                           xlim=(0, runTime)),
                pplt.Panel(spikes,
                           xticks=True,
                           xlabel="Time (ms)",
                           yticks=True,
                           markersize=2,
                           xlim=(0, runTime)),
                title="Training" + str(label),
                annotations="Training" +
                str(label)).save('plot/' + str(trylabel) + str(label) +
                                 '_training.png')
    #plt.hist(weight_list[1], bins=100)
    #plt.show()
    plt.close('all')
    print(wMax)
    '''
    plt.hist([weight_list[1][0:input_size], weight_list[1][input_size:input_size*2], weight_list[1][input_size*2:]], bins=20, label=['neuron 0', 'neuron 1', 'neuron 2'], range=(0, wMax))
    plt.title('weight distribution')
    plt.xlabel('Weight value')
    plt.ylabel('Weight count')
    '''
    #plt.show()
    #plt.show()

    sim.end()
    for i in weight_list[0]:
        #training_weights[int(i[0])][int(i[1])]=float(i[2])
        weight_list[1][int(i[0]) * output_size + int(i[1])] = i[2]
    return weight_list[1]
Esempio n. 5
0
                                        tau_minus=20.0,
                                        A_plus=0.01,
                                        A_minus=0.012),
    weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=0.04),
    dendritic_delay_fraction=0)
#Error: The pyNN.brian backend does not currently support dendritic delays:
# for the purpose of STDP calculations all delays are assumed to be axonal
#for brian dendritic_delay_fraction=0 default value 1.0
'''
Connection algorithms
'''

connector = sim.AllToAllConnector(allow_self_connections=False)  # no autapses
#default True

connector = sim.OneToOneConnector()

#Connecting neurons with a fixed probability
connector = sim.FixedProbabilityConnector(p_connect=0.2)

#Connecting neurons with a position-dependent probability
DDPC = sim.DistanceDependentProbabilityConnector
connector = DDPC("exp(-d)")
connector = DDPC("d<3")
#The constructor requires a string d_expression, which should be a distance expression,
# as described above for delays, but returning a probability (a value between 0 and 1)

#Divergent/fan-out connections
#connects each pre-synaptic neuron to exactly n post-synaptic neurons chosen at random
connector = sim.FixedNumberPostConnector(n=30)
Esempio n. 6
0
 def testOneToOne(self):
     """For all connections created with "OneToOne" ..."""
     prj = sim.Projection(self.source33, self.target33,
                          sim.OneToOneConnector(weights=0.5))
     self.assertEqual(prj._connections.W.getnnz(), self.target33.cell.size)