Exemple #1
0
def BinSearchWeights(net, T, num_spikes, w_low, w_high, v0, u0, I0, ge0, neuron_names, synapse_names, \
        state_monitor_names, spike_monitor_names, parameters, end='low'):

    j = 0
    print "\tHoming in on ", end, " bound:"
    while True:

        net.restore('5')
        net[synapse_names[3]].w[0, 0] = (w_low + w_high) / 2
        net.store('5')
        snn.Run(net, T, v0, u0, I0, ge0, neuron_names, synapse_names, \
                state_monitor_names, spike_monitor_names, \
                parameters, 5)
        j += 1
        n_outspikes, spikes_out = GetSpikes(net,
                                            T,
                                            v0,
                                            u0,
                                            I0,
                                            ge0,
                                            neuron_names,
                                            synapse_names,
                                            state_monitor_names,
                                            spike_monitor_names,
                                            parameters,
                                            number=5)
        net.restore('5')

        print "\t\tw_low, w_high, n_outspikes = ", w_low, ", ", w_high, ", ", n_outspikes
        print "\t\t\tspikes_out", spikes_out[0]
        if end == 'low':
            if n_outspikes < num_spikes:
                w_low = (w_low + w_high) / 2
            elif n_outspikes >= num_spikes:
                if n_outspikes == num_spikes and abs(w_high - w_low) < 0.01:
                    break
                w_high = (w_low + w_high) / 2
        else:
            if n_outspikes <= num_spikes:
                if n_outspikes == num_spikes and abs(w_high - w_low) < 0.01:
                    break
                w_low = (w_low + w_high) / 2
            elif n_outspikes > num_spikes:
                w_high = (w_low + w_high) / 2

    return spikes_out[0]
Exemple #2
0
def TestNodeRange(T, N, v0, u0, bench, number, input_neurons, liquid_neurons,
                  hidden_neurons, output_neurons, Sin, Sliq, Sa, Sb, M, Mv, Mu,
                  S_in, S_hidden, S_out):
    n_hidden_last = len(hidden_neurons[-1])
    old_weights = np.empty(n_hidden_last)

    return_val = [-1, -1]

    for i in range(n_hidden_last):
        old_weights[i] = Sb.w[i]
        Sb.w[i] = 0

    j = 0
    Sb.w[0] = 0
    while True:

        snn.Run(T, v0, u0, bench, number, input_neurons, liquid_neurons, hidden_neurons, output_neurons, \
                Sin, Sliq, Sa, Sb, M, Mv, Mu, S_in, S_hidden, S_out, train=True, letter=None)
        #pudb.set_trace()
        spikes_out = S_out.spiketimes[0]
        #spikes_hidden = S_hidden.spiketimes[0]
        n_outspikes = len(spikes_out)
        print "n_outspikes, Sb.w[0] = ", n_outspikes, ", ", Sb.w[0]

        if n_outspikes == 1:
            if return_val[0] == -1:
                #pudb.set_trace()
                return_val[0] = spikes_out[0]  # - spikes_hidden[0]
            return_val[1] = spikes_out[0]
        elif n_outspikes > 1:
            #pudb.set_trace()
            break

        Sb.w[0] = Sb.w[0] + 0.001

        #if j % 1 == 0:
        #    snn.Plot(Mv, 0)
        #
        #j += 1

    for i in range(n_hidden_last):
        Sb.w[i] = old_weights[i]

    return return_val
Exemple #3
0
def Test(net, mnist, start, end, N_hidden, T, v0, u0, I0, ge0, \
        neuron_names, synapse_names, state_monitor_names, spike_monitor_names, parameters):

    hit, miss = 0, 0
    hit_ind, miss_ind = np.zeros(10, dtype=int), np.zeros(10, dtype=int)

    print "Testing"
    #for number in range(start, end):
    number = start - 1
    count = 0
    while count < end - start:
        number += 1
        #pudb.set_trace()
        label = mnist[1][number]
        if label[0] == 0 or label[0] == 1:
            count += 1
            print "\tlabel = ", label,
            print "\tnumber = ", number
            net = snn.Run(net, mnist, number, T, v0, u0, I0, ge0, \
                        neuron_names, synapse_names, state_monitor_names, \
                        spike_monitor_names, parameters)
            S_l, S_i = _netoutput(net, spike_monitor_names, N_hidden)
            S_d = init.out(label)
            print "\t\tS_l = ", S_l
            print "\t\tS_d = ", S_d
            print "\t\tS_i = ", S_i
            index = init.out_inverse(S_d)
            result = Compare(S_l, S_d)
            if result == True:
                hit_ind[index] += 1
                hit += 1
            else:
                miss_ind[index] += 1
                miss += 1

    return hit, miss, hit_ind, miss_ind
Exemple #4
0
def GetWeightRange(net,
                   T,
                   num_spikes,
                   v0,
                   u0,
                   I0,
                   ge0,
                   neuron_names,
                   synapse_names,
                   state_monitor_names,
                   spike_monitor_names,
                   parameters,
                   number=5):

    #pudb.set_trace()
    net.restore()
    n_hidden_last = len(net[neuron_names[2][-1]])
    old_weights = np.empty(n_hidden_last)

    extreme_spikes = [-1, -1]

    w_last = 1
    net[synapse_names[3]].w[:, 0] = np.zeros(n_hidden_last, dtype=float)
    net[synapse_names[3]].w[0, 0] = w_last
    net.store('5')

    j = 0
    print "\tDetermining weight range:"
    while True:

        net = snn.Run(net, 2*T, v0, u0, I0, ge0, neuron_names, synapse_names, \
                state_monitor_names, spike_monitor_names, \
                parameters, number=number)

        #pudb.set_trace()
        n_outspikes, spikes_out = GetSpikes(net, T, v0, u0, I0, ge0, neuron_names, synapse_names, \
                    state_monitor_names, spike_monitor_names, \
                    parameters, number=number)

        #pudb.set_trace()
        print "\t\tj, w, n_outspikes = ", j, ", ", net[synapse_names[3]].w[
            0, 0][0], ", ", n_outspikes
        print "\t\t\tspikes_out", spikes_out[0]
        if n_outspikes < num_spikes:
            w_last = net[synapse_names[3]].w[0, 0]
            net.restore('5')
            w_new = 2 * w_last
            net[synapse_names[3]].w[0, 0] = w_new
            net.store('5')
        elif n_outspikes == num_spikes:
            #w_last =
            net.restore('5')
            w_new = 1.2 * net[synapse_names[3]].w[0, 0]
            net[synapse_names[3]].w[0, 0] = w_new
            net.store('5')
        elif n_outspikes > num_spikes:
            break

        j += 1

    #w_low, w_high = w_last, net[synapse_names[3]].w[0, 0]
    net.restore()

    return net, w_last, w_new
Exemple #5
0
def set_number_spikes(net, mnist, layer, T, N_h, N_o, v0, u0, I0, ge0,
                      neuron_names, synapse_names, state_monitor_names,
                      spike_monitor_names, parameters):
    """
    This sets the number of spikes in the last hidden layer, and in the output layer, to
    N_h and N_o, respectively

    The network should start off with the last hidden layer having small but random weights 
    feeding into it such that the last hidden layer produces no spikes. Then,
    for each neuron in the last hidden layer, the weights feeding into it are gradually increased
    randomly through addiction of small numbers. If the number of spikes is too much, small random
    values are subtracted from the weights leading to it, until the desired number of spikes is
    emitted for every single input value produced in the input neurons.

    One issue is that it may take a long time to do this for more than one input sequence to the
    network as a whole, because the operations done for one input would be somewhat reversing
    the operations done for the other input, hence the likely usefullness of modifcation through
    random values.

    For each input combination that is fed into the network as a whole, it might help to have 
    different vectors which corresond to modification of weights. For instance, for network input
    0, you could modify every 4th neuron, for network input 1 you could modify every forth neuron
    but with an offset of 1, for network input 2 you modify every 4th neuron with an offset of 2,
    and so forth. That might be usefull.
    """

    dw_abs = 0.02
    min_dw_abs = 0.001
    i = 0

    print "layer = ", layer
    if layer == 0:
        dw_abs = 0.5
        #right_dw_abs = True
    else:
        dw_abs = 0.5
        #div = 0
    modified = True
    j = 0
    N = len(mnist[0])

    # Loop until no more modifications are made
    while modified == True:

        modified = False
        print "\tj = ", j
        j += 1
        k = 0

        # Loop over the different input values
        for number in xrange(N):
            #has_desired_spike_number = False
            print "\t\tNumber = ", number, "\t"
            if layer == 0:

                while True:
                    snn.Run(net, mnist, number, T, v0, u0, I0, ge0, \
                            neuron_names, synapse_names, state_monitor_names, \
                            spike_monitor_names, parameters)

                    #print "\t\t\tk = ", k, "\t",
                    k_modified, net = _basic_training(net, \
                            neuron_names[1], synapse_names[0], spike_monitor_names[1], number, dw_abs, N_h)

                    if k_modified == True:
                        modified = True
                    else:
                        break

                    k += 1

            elif layer == 1:

                #pudb.set_trace()
                N_h = out(mnist[1][number][0])

                while True:
                    snn.Run(net, mnist, number, T, v0, u0, I0, ge0, \
                            neuron_names, synapse_names, state_monitor_names, \
                            spike_monitor_names, parameters)

                    #pudb.set_trace()
                    k_modified, net = _basic_training(net, \
                            neuron_names[2], synapse_names[1], spike_monitor_names[2], number, dw_abs, N_h)

                    if k_modified == True:
                        modified = True
                    else:
                        break

                    k += 1
        if layer == 1:
            break

    return net
Exemple #6
0
def ReSuMe(desired_times, Pc, T, N, v0, u0, bench, number, input_neurons,
           liquid_neurons, hidden_neurons, output_neurons, Sin, Sliq, Sa, Sb,
           M, Mv, Mu, S_in, S_hidden, S_out):

    img, label = snn.ReadImg(number=number, bench=bench)
    N_hidden_last = len(hidden_neurons[-1])
    N_out = len(output_neurons)

    N_h = 1
    N_o = 1

    trained = False

    while trained == False:
        for i in range(N_hidden_last):

            #pudb.set_trace()
            #print "\t\ti = ", i
            label = snn.Run(T, v0, u0, bench, number, input_neurons, liquid_neurons, hidden_neurons, output_neurons, \
                Sin, Sliq, Sa, Sb, M, Mv, Mu, S_in, S_hidden, S_out, train=True, letter=None)

            print "Hidden Times: ",
            for j in range(len(S_hidden)):
                print S_hidden[j].spiketimes, " ",

            print "\nOutput Times: ", S_out.spiketimes
            done = snn.CheckNumSpikes(T,
                                      N_h,
                                      N_o,
                                      v0,
                                      u0,
                                      bench,
                                      number,
                                      input_neurons,
                                      liquid_neurons,
                                      hidden_neurons,
                                      output_neurons,
                                      Sin,
                                      Sliq,
                                      Sa,
                                      Sb,
                                      M,
                                      Mv,
                                      Mu,
                                      S_in,
                                      S_hidden,
                                      S_out,
                                      train=False,
                                      letter=None)

            if done == False:
                print "ERROR!! WRONG NUMBER OF SPIKES!! Resetting No. Spikes!!!"
                #pudb.set_trace()
                snn.SetNumSpikes(T,
                                 N_h,
                                 N_o,
                                 v0,
                                 u0,
                                 bench,
                                 number,
                                 input_neurons,
                                 liquid_neurons,
                                 hidden_neurons,
                                 output_neurons,
                                 Sin,
                                 Sliq,
                                 Sa,
                                 Sb,
                                 M,
                                 Mv,
                                 Mu,
                                 S_in,
                                 S_hidden,
                                 S_out,
                                 train=False,
                                 letter=None)
            #pudb.set_trace()
            S_l = S_out.spiketimes
            S_i = S_hidden[-1].spiketimes
            S_d = desired_times[label]

            P = P_Index(S_l, S_d)
            print "\t\t\tP = ", P
            if P < Pc:
                trained = True
                break

            print "i = ", i
            #if i == 2:
            #    pudb.set_trace()
            sd = max(0, float(S_d) - S_i[i][0])
            sl = max(0, S_l[0][0] - S_i[i][0])
            Wd = WeightChange(sd)
            Wl = -WeightChange(sl)
            Sb.w[i] = Sb.w[i] + Wd + Wl
Exemple #7
0
                             Sb,
                             M,
                             Mv,
                             Mu,
                             S_in,
                             S_hidden,
                             S_out,
                             train=False,
                             letter=None)
            print "\tDone! for number = ", number

        #snn.SaveWeights(Sa, Sb, "weights.txt")

#pudb.set_trace()
#Sa[0].w[:] = '0*br.mV'
snn.Run(T, v0, u0, bench, 0, input_neurons, liquid_neurons, hidden_neurons,
        output_neurons, Sin, Sliq, Sa, Sb, M, Mv, Mu, S_in, S_hidden, S_out)
#pudb.set_trace()
#snn.Plot(N, Nu, Nv, 1)
snn.Plot(M, Mu, Mv, 1)

print "======================================================================"
print "\t\t\tTraining with ReSuMe"
print "======================================================================"

if bench == 'xor':
    if op.isfile("times.txt"):
        desired_times = train.ReadTimes("times.txt")
    else:

        desired_times = [-1, -1]
        extreme_spikes = train.TestNodeRange(T, N, v0, u0, bench, number,
Exemple #8
0
def ReSuMe(net, mnist, start, end, Pc, N_hidden, T, N_h, N_o, v0, u0, I0, ge0, neuron_names, synapse_names, state_monitor_names, spike_monitor_names, parameters):

    trained = False
    N = len(mnist[0])
    N_hidden_last = len(net[neuron_names[-2]])
    N_out = len(net[neuron_names[-1]])

    N_h = 1
    N_o = 1
    correct = 0
    while True:
        print "========================== R O U N D =========================="
        correct = 0
        number = start - 1
        count = 0
        while count < end - start:

            dw = np.zeros(len(net[synapse_names[-1]]))

            number += 1
            label = mnist[1][number]
            if label[0] == 0 or label[0] == 1:
                count += 1
                print "number = ", number
                k = 0
                for i in range(1):

                    k += 1
                    N_h = init.out(mnist[1][number][0])
                    desired_index = number / 2

                    lst = range(N_hidden_last)
                    rnd.shuffle(lst)

                    net = snn.Run(net, mnist, number, T, v0, u0, I0, ge0, \
                                neuron_names, synapse_names, state_monitor_names, \
                                spike_monitor_names, parameters)

                    S_l, S_i = _netoutput(net, spike_monitor_names, N_hidden)
                    S_d = init.out(label)
                    if len(S_l[0]) == S_d[0]:
                        net.restore()
                        correct += 1
                        break

                    print "\t\tS_l = ", S_l
                    print "\t\tS_d = ", S_d
                    print "\t\tS_i = ", S_i

                    modified = False
                    w = net[synapse_names[-1]].w[:]
                    j = 0
                    if min(S_i) == []:
                        pudb.set_trace()
                    t_in_tmp = np.copy(S_i / br.ms)
                    t_in = t_in_tmp.flatten()
                    dw = _set_out_spike(net, j, t_in, S_l[j], S_d[j], N_hidden)
                    if type(dw) == np.ndarray:
                        print "\t\t\tdw = ", dw
                        modified = True
                        w += dw
                    net.restore()
                    net[synapse_names[-1]].w[:] = w.clip(0.1)
                    net.store()
                    if modified == False:
                        break

        if correct >= 0.8*(end - start):
            break

    init._save_weights(net, synapse_names, 0, len(synapse_names))
    F = open("weights/trained.txt", 'w')
    F.write("True")
    F.close()

    return net