Exemplo n.º 1
0
def test_sample_by_variance():
    pytest.importorskip("scipy")

    dt = 0.001
    t = np.arange(0.0, 1.0, dt) + dt

    spikes = np.zeros((len(t), 4))
    spikes[::, 1] = 1.0 / dt
    spikes[::10, 2] = 1.0 / dt
    spikes[::100, 3] = 1.0 / dt

    t_sampled, spikes_sampled = sample_by_variance(t, spikes, num=1, filter_width=0.001)
    assert (t_sampled == t).all()
    assert (spikes_sampled == spikes[:, [2]]).all()

    t_sampled, spikes_sampled = sample_by_variance(t, spikes, num=1, filter_width=0.1)
    assert (t_sampled == t).all()
    assert (spikes_sampled == spikes[:, [3]]).all()

    t_sampled, spikes_sampled = sample_by_variance(t, spikes, num=2, filter_width=0.1)
    assert (t_sampled == t).all()
    assert (spikes_sampled == spikes[:, [3, 2]]).all()

    t_sampled, spikes_sampled = sample_by_variance(t, spikes, num=20, filter_width=0.1)
    assert (t_sampled == t).all()
    assert (spikes_sampled == spikes[:, [3, 2, 1, 0]]).all()
Exemplo n.º 2
0
plt.plot(t, xy[:, 0], label="X")
plt.plot(t, xy[:, 1], label="Y")
plt.legend()
plt.show()

plt.figure(figsize=(16, 6))
rates = rates_kernel(t, place_spikes).T
im = plt.imshow(rates,
                origin='upper',
                aspect='auto',
                interpolation='none',
                extent=[np.min(t), np.max(t), 0, rates.shape[0]])
plt.show()

t_sample, place_spikes_sample = sample_by_variance(t,
                                                   place_spikes,
                                                   num=250,
                                                   filter_width=0.5)
t_cycle_idxs = np.where(np.logical_and(t_sample >= 4.5, t_sample <= 10))[0]

plt.figure(figsize=(16, 6))
rates = rates_kernel(t_sample, place_spikes_sample, tau=0.25).T
peaks = np.argmax(rates, axis=1)
sorted_peaks = np.argsort(peaks)

plt.subplot(211)

plt.plot(t[t_cycle_idxs], xy[t_cycle_idxs, 0], label="X")
plt.plot(t[t_cycle_idxs], xy[t_cycle_idxs, 1], label="Y")

plt.subplot(212)
Exemplo n.º 3
0
t = np.linspace(0, tstop, int(tstop / dt))
xy = np.load("xy.npy")
xy_reader = np.load("xy_reader.npy")
xy_reader_spikes = np.load("xy_reader_spikes.npy")

plt.figure(figsize=(16, 6))
rates = rates_kernel(t, xy_reader_spikes).T
im = plt.imshow(rates,
                origin='upper',
                aspect='auto',
                interpolation='none',
                extent=[np.min(t), np.max(t), 0, rates.shape[0]])
plt.show()

t_sample, xy_reader_spikes_sample = sample_by_variance(t,
                                                       xy_reader_spikes,
                                                       num=250,
                                                       filter_width=0.1)
t_cycle_idxs = np.where(np.logical_and(t_sample >= 4.5, t_sample <= 10))[0]

plt.figure(figsize=(16, 6))
rates = rates_kernel(t_sample, xy_reader_spikes_sample, tau=0.1).T
peaks = np.argmax(rates, axis=1)
sorted_peaks = np.argsort(peaks)

plt.subplot(311)

plt.plot(t[t_cycle_idxs], xy[t_cycle_idxs, 0], label="X")
plt.plot(t[t_cycle_idxs], xy[t_cycle_idxs, 1], label="Y")
plt.legend()

plt.subplot(312)
        plt.ylabel("Position")
        plt.show()

        plt.figure(figsize=(16, 6))
        plt.title("Training Error")
        plt.plot(sim.trange()[t_train_idxs], np.mean(sim.data[p_place_error][t_train_idxs], axis=1),
                 alpha=0.8, label="Mean place error")
        plt.xlabel("Time (s)")
        plt.ylabel("Output")
        plt.legend()
        plt.show()

        plt.figure(figsize=(16, 6))
        plt.title("Place Reader Spikes")
        plot_spikes(
            *merge(
                *cluster(
                    *sample_by_variance(
                        sim.trange(), sim.data[p_place_reader_spikes],
                        num=500, filter_width=.02),
                    filter_width=.002),
                num=50))
        plt.show()