예제 #1
0
def plot_data_and_spikes(data,
                         spike_mon,
                         test_dt,
                         min_run=0,
                         unique=False,
                         buckets=100):
    """
        Expects data to be scaled to the buckets that the spike monitor, spike_mon
        monitored.

        The dt - test_dt - is to scale spike times to indices in the data.

        min_run lets you lower bound the run you wish to see. It's assumed that
        a run is of duration len(data) * test_dt.

        unique is set to plot only differing x values (so if all neurons spike at
        the same time, this will effectively pick a random representative).k
    """
    y_list, x_list = [], []
    uniq = dict()
    for neuron in spike_mon:
        for time in spike_mon[neuron]:
            if time >= min_run * test_dt:
                x_list.append(int((time + 5 * test_dt) / test_dt) % len(data))
                y_list.append(neuron * Hz)
            if unique and int(
                (time + 5 * test_dt) /
                    test_dt) not in uniq and time >= min_run * test_dt:
                uniq[int((time + 5 * test_dt) / test_dt) %
                     len(data)] = neuron * second

    rms = 0
    mi = min(test)
    ma = max(test)
    for time in uniq:
        actual = int((test[time] - mi) / (ma - mi) * buckets)
        pred = uniq[time] * Hz
        rms += (actual - pred)**2

    if unique:
        scatter(uniq.keys(), uniq.values(), color="red")
    else:
        scatter(x_list, y_list, color="red")
    plot(csv_parse.buildInputArray(100, test)[0], color="blue")
    show()
    return np.sqrt(rms / len(data))
예제 #2
0
def make_snn_and_run_once(ts,
                          numNeurons,
                          runs,
                          dt_ts=0.0001 * second,
                          use_weights=None,
                          save_as=None):
    # constants, equations, detritus
    start_scope()
    duration = len(ts)

    idxs, ts2 = csv_parse.buildInputArray(numNeurons, ts, repeats=runs)
    input_neur = SpikeGeneratorGroup(numNeurons, idxs, ts2 * dt_ts)
    #5*dt_ts is the lag
    idxs, ts = csv_parse.buildInputArray(numNeurons,
                                         ts,
                                         5 * dt_ts * Hz,
                                         repeats=runs)
    ash_excite = SpikeGeneratorGroup(numNeurons, idxs, ts * dt_ts)
    ash_inhib = SpikeGeneratorGroup(numNeurons, idxs, ts * dt_ts)

    taupre = 20 * ms
    taupost = taupre
    taue = 1 / 0.9 * ms
    gmax = 10
    dApre = .01
    dApost = -dApre * taupre / taupost * 1.05
    dApost *= gmax
    dApre *= gmax
    ged = 1
    a = 0.02 / ms
    b = 0.2 / ms
    c = -65 * mV  # resting potential
    d = 8 * mV / ms

    reset = '''
        v = c
        u += d
        '''

    eqs = '''
        dv/dt = (0.04/ms/mV)*v**2+(5/ms)*v+140*mV/ms-u + I : volt
        du/dt = a*(b*v-u) : volt/second
        dI/dt = -I / taue : volt/second
        '''
    # old: dI/dt = -I / taue : volt/second

    neurons = NeuronGroup(numNeurons,
                          eqs,
                          threshold='v>30*mV',
                          reset=reset,
                          method='euler',
                          dt=dt_ts)

    # synapses
    S = Synapses(
        input_neur,
        neurons,
        '''w : 1
                    dApre/dt = -Apre / taupre : 1 (event-driven)
                    dApost/dt = -Apost / taupost : 1 (event-driven)''',
        on_pre='''I  +=w / radian * volt/second
                        Apre += dApre
                        w = clip(w + Apost, 0, gmax)''',
        on_post='''Apost += dApost
                        w = clip(w + Apre, 0, gmax)''',
    )
    S.connect()
    # S.w = np.random.rand(numNeurons ** 2)
    S.w = 6
    S2 = Synapses(
        ash_excite,
        neurons,
        '''w : 1''',
        on_pre='''I  +=w / radian * volt/second ''',
    )
    S2.connect('i==j')
    S2.w = 6
    S3 = Synapses(
        ash_inhib,
        neurons,
        '''w : 1''',
        on_pre='''I  +=w / radian * volt/second ''',
    )
    S3.connect('i!=j')
    S3.w = -5

    # Monitors
    mon = SpikeMonitor(neurons)
    # Run and record
    net = Network(input_neur, neurons, S, mon, ash_excite, S2, ash_inhib, S3)
    if use_weights is not None:
        net.restore('training', use_weights)
        return S.w

    for j in range(runs):
        print("training iter ", j)
        net.run(duration * dt_ts * (j + 1), report='text')

    if save_as is not None:
        net.store('training', save_as)

    spoke = mon.spike_trains()
    print("GAY", mon.spike_trains())

    # d = list(zip(mon.t, mon.smooth_rate(window="flat", width=normalization * dt_ts * second * second)))
    # list(map(print, d))
    # plot([i[0] for i in d], [i[1] for i in d])
    # show()
    return S.w
예제 #3
0
def train_and_run(train_data,
                  test_data,
                  numNeurons,
                  runs,
                  dt_ts=0.0001 * second,
                  use_weights=None,
                  save_as=None):
    duration = len(test_data)

    sss = make_snn_and_run_once(train_data,
                                numNeurons,
                                runs,
                                dt_ts=dt_ts,
                                use_weights=use_weights,
                                save_as=save_as)
    print("Got weights", sss)

    # brian detrius
    start_scope()
    taue = 1 / 0.9 * ms
    a = 0.02 / ms
    b = 0.2 / ms
    c = -65 * mV  # resting potential
    d = 8 * mV / ms

    reset = '''
        v = c
        u += d
        '''

    eqs = '''
        dv/dt = (0.04/ms/mV)*v**2+(5/ms)*v+140*mV/ms-u + I : volt
        du/dt = a*(b*v-u) : volt/second
        dI/dt = -I / taue : volt/second
        '''
    idxs, ts = csv_parse.buildInputArray(numNeurons, test_data, repeats=runs)
    input_neur = SpikeGeneratorGroup(numNeurons, idxs, ts * dt_ts)

    neurons = NeuronGroup(numNeurons,
                          eqs,
                          threshold='v>30*mV',
                          reset=reset,
                          method='euler',
                          dt=dt_ts)

    S2 = Synapses(
        input_neur,
        neurons,
        '''w : 1''',
        on_pre='''I += w / radian * volt/second ''',
    )
    S2.connect()
    S2.w = sss

    mon = SpikeMonitor(neurons)
    net = Network(input_neur, neurons, S2, mon)
    if use_weights is not None:
        net.restore('testing', use_weights)
        return mon.spike_trains()

    for t in range(runs):
        print("testing iter", t)
        net.run(dt_ts * duration * (t + 1), report='text')

    if save_as is not None:
        net.store('testing', save_as)

    spike_trains = mon.spike_trains()
    print('RETARDED', spike_trains)

    return spike_trains
예제 #4
0
def make_snn_and_run_once(ts,
                          lags=[2, 3, 5],
                          duration=None,
                          dt_ts=0.0001 * second,
                          normalization=None):
    # constants, equations, detritus
    start_scope()
    if duration is None: duration = len(ts)

    numNeurons = 100  # csv_parse.getMinMaxDiff(FILE)
    idxs, ts2 = csv_parse.buildInputArray(numNeurons, ts, repeats=1000)
    test = idxs
    input_neur = SpikeGeneratorGroup(numNeurons, idxs, ts2 * dt_ts)
    #5*dt_ts is the lag
    idxs, ts = csv_parse.buildInputArray(numNeurons,
                                         ts,
                                         5 * dt_ts * Hz,
                                         repeats=1000)
    ash_excite = SpikeGeneratorGroup(numNeurons, idxs, ts * dt_ts)
    ash_inhib = SpikeGeneratorGroup(numNeurons, idxs, ts * dt_ts)

    N = 1000
    taupre = 20 * ms
    taupost = taupre
    taue = 1 / 0.9 * ms
    gmax = 10
    dApre = .01
    dApost = -dApre * taupre / taupost * 1.05
    dApost *= gmax
    dApre *= gmax
    ged = 1
    a = 0.02 / ms
    b = 0.2 / ms
    c = -65 * mV  # resting potential
    d = 8 * mV / ms

    reset = '''
        v = c
        u += d
        '''

    eqs = '''
        dv/dt = (0.04/ms/mV)*v**2+(5/ms)*v+140*mV/ms-u + I : volt
        du/dt = a*(b*v-u) : volt/second
        dI/dt = -I / taue : volt/second
        '''
    # old: dI/dt = -I / taue : volt/second

    neurons = NeuronGroup(numNeurons,
                          eqs,
                          threshold='v>30*mV',
                          reset=reset,
                          method='euler',
                          dt=dt_ts)

    # synapses
    S = Synapses(input_neur,
                 neurons,
                 '''w : 1
                    dApre/dt = -Apre / taupre : 1 (event-driven)
                    dApost/dt = -Apost / taupost : 1 (event-driven)''',
                 on_pre='''I  +=w / radian * volt/second
                        Apre += dApre
                        w = clip(w + Apost, 0, gmax)''',
                 on_post='''Apost += dApost
                        w = clip(w + Apre, 0, gmax)''',
                 delay=dt_ts)
    S.connect()
    # S.w = np.random.rand(numNeurons ** 2)

    # Monitors
    # sss = StateMonitor(S, variables=['w'], record=range(10000), dt=dt_ts)
    # mon = StateMonitor(neurons, variables = ['v'],record=range(10000), dt=0.0001 * second )
    # mon = PopulationRateMonitor(neurons)
    mon = SpikeMonitor(neurons)
    # Run and record
    # net = Network(ash, input_neur, neurons, S, S2, sss)
    net = Network(input_neur, neurons, S, mon)
    for j in range(100):
        print("training iter ", j)
        net.run(duration * dt_ts * (j + 1), report='text')

    spoke = mon.spike_trains()
    y_list, x_list = [], []
    uniq_pts = dict()
    for neuron in spoke:
        for time in spoke[neuron]:
            x_list.append(time * 10 * 1000)
            y_list.append(min_stock + neuron * Hz)
    scatter(x_list, y_list, color="red")
    plot(test, color="blue")
    show()

    print("GAY", mon.spike_trains())

    # d = list(zip(mon.t, mon.smooth_rate(window="flat", width=normalization * dt_ts * second * second)))
    # list(map(print, d))
    # plot([i[0] for i in d], [i[1] for i in d])
    # show()
    return S.w
예제 #5
0
def train_and_run(train_data,
                  test_data,
                  lags=[2, 3, 5],
                  dt_ts=0.0001 * second,
                  rate_est_window=None):
    #TODO: normalize: max(ts) is OK but not enough for increasing series (esp given cross validation)
    # this feels like c
    normie = max(max(train_data), max(test_data)) * second
    if rate_est_window is None: rate_est_window = normie
    duration = len(test_data)

    sss = make_snn_and_run_once(train_data,
                                lags,
                                dt_ts=dt_ts,
                                normalization=normie)
    print("Got weights", sss)

    # brian detrius
    start_scope()
    taue = 1 / 0.9 * ms
    a = 0.02 / ms
    b = 0.2 / ms
    c = -65 * mV  # resting potential
    d = 8 * mV / ms

    reset = '''
        v = c
        u += d
        '''

    eqs = '''
        dv/dt = (0.04/ms/mV)*v**2+(5/ms)*v+140*mV/ms-u + I : volt
        du/dt = a*(b*v-u) : volt/second
        dI/dt = -I / taue : volt/second
        '''
    numNeurons = 100  #csv_parse.getMinMaxDiff(FILE)
    min_stock = min(test_data)
    idxs, ts = csv_parse.buildInputArray(numNeurons, test_data, repeats=1000)
    input_neur = SpikeGeneratorGroup(numNeurons, idxs, ts * dt_ts)

    neurons = NeuronGroup(numNeurons,
                          eqs,
                          threshold='v>30*mV',
                          reset=reset,
                          method='euler',
                          dt=dt_ts)

    S2 = Synapses(input_neur,
                  neurons,
                  '''w : 1''',
                  on_pre='''I += w / radian * volt/second ''',
                  delay=dt_ts)
    S2.connect()
    S2.w = sss

    mon = SpikeMonitor(neurons)
    net = Network(input_neur, neurons, S2, mon)
    for t in range(100):
        print("testing iter", t)
        net.run(dt_ts * duration * (t + 1), report='text')
    #TODO: is this too a use after free? - consume iter to avoid
    #return list(zip(mon.t, mon.smooth_rate(window='flat', width=rate_est_window * dt_ts)))
    spike_trains = mon.spike_trains()
    print('RETARDED', spike_trains)
    return spike_trains
예제 #6
0
if __name__ == "__main__":
    daddy_bezos = csv_parse.return2018Data(FILE) * Hz
    test = csv_parse.return2019Data(FILE) * Hz
    # test = np.fromiter(it.repeat(min(test), test.shape[0] * 3), int) * Hz

    test_dt = 0.0001 * second
    #spoke = list(train_and_run(daddy_bezos, test, [1], dt_ts=test_dt))
    min_stock = min(min(test), min(daddy_bezos))
    spoke = train_and_run(daddy_bezos, test, [1], dt_ts=test_dt)
    y_list, x_list = [], []
    uniq = dict()
    for neuron in spoke:
        for time in spoke[neuron]:
            x_list.append(time * 10 * 1000)
            y_list.append(min_stock + neuron * Hz)
            if (time * 10000 *
                    Hz) not in uniq and time >= 99 * len(test) * test_dt:
                uniq[(time * 10000 * Hz) % len(test)] = min_stock + neuron * Hz

    print("ore wa mou plotto, ikimashou")
    scatter(x_list, y_list, color="red")
    plot(csv_parse.buildInputArray(1000, test, repeats=1000)[0], color="blue")
    show()

    scatter(uniq.keys(), uniq.values(), color="red")
    data = csv_parse.buildInputArray(100, test)[0]
    plot(data, color="blue")
    show()
#    print(rms_error(spoke, test, test_dt))
#    plot_exp_vs_obs(spoke, test, test_dt)