Beispiel #1
0
def test_all_negative_activities(allclose, plt, seed, Simulator, Solver):
    class CheckActivitiesSolver(Solver):
        def __call__(self, A, Y, rng=np.random):
            assert np.all(A < 0)
            return super().__call__(A, Y, rng=rng)

    val = -0.5

    with nengo.Network(seed=seed) as net:
        input = nengo.Node(output=val, label="input")
        ens = nengo.Ensemble(
            30,
            1,
            neuron_type=nengo.Tanh(),
            encoders=Choice([[1]]),
            intercepts=Uniform(0, 0.95),
            eval_points=Uniform(-1, -0.1),
        )
        nengo.Connection(input, ens)
        in_p = nengo.Probe(input, "output")
        ens_p = nengo.Probe(
            ens, "decoded_output", synapse=0.05, solver=CheckActivitiesSolver()
        )

    with Simulator(net) as sim:
        sim.run(0.3)

    t = sim.trange()
    plt.plot(t, sim.data[in_p], label="Input")
    plt.plot(t, sim.data[ens_p], label="Neuron approximation, pstc=0.05")
    plt.xlim(right=t[-1])
    plt.legend(loc=0)

    assert allclose(sim.data[in_p], val, atol=0.1, rtol=0.01)
    assert allclose(sim.data[ens_p][-10:], val, atol=0.1, rtol=0.01)
Beispiel #2
0
def go(N=2000, d=None, f=None, t=100, l=False, neuron_type=LIF(),
       m=Uniform(30, 30), i=Uniform(-1, 1), r=30, IC=np.array([0,0,0]),
       seed=0, dt=0.001, dtSample=0.001):

    with nengo.Network(seed=seed) as model:
        inpt = nengo.Node(lambda t: IC*(t<=1.0))
        tar = nengo.Ensemble(1, 3, neuron_type=nengo.Direct())
        ens = nengo.Ensemble(N, 3, max_rates=m, intercepts=i, neuron_type=neuron_type, seed=seed, radius=r)
        dss = nengo.Node(DownsampleNode(size_in=N, size_out=N, dt=dt, dtSample=dtSample), size_in=N, size_out=N)
        nengo.Connection(inpt, tar, synapse=None)
        nengo.Connection(tar, tar, function=feedback, synapse=~s)
        if l:
            nengo.Connection(tar, ens, synapse=None, seed=seed)
        else:
            nengo.Connection(inpt, ens, synapse=None, seed=seed)
            nengo.Connection(ens, ens, synapse=f, solver=NoSolver(d), seed=seed)
        nengo.Connection(ens.neurons, dss, synapse=None)
        pTar = nengo.Probe(tar, synapse=None, sample_every=dtSample)
        pEns = nengo.Probe(dss, synapse=None, sample_every=dtSample)

    with nengo.Simulator(model, seed=seed, dt=dt, progress_bar=False) as sim:
        sim.run(t+dt, progress_bar=True)

    return dict(
        times=sim.trange(),
        tar=sim.data[pTar],
        ens=sim.data[pEns])
Beispiel #3
0
def test_noise_gen(Simulator, nl_nodirect, seed, plt, allclose):
    """Ensure that setting Ensemble.noise generates noise."""
    with nengo.Network(seed=seed) as model:
        intercepts = -0.5
        neg_noise, pos_noise = -5, 5
        model.config[nengo.Ensemble].neuron_type = nl_nodirect()
        model.config[nengo.Ensemble].encoders = Choice([[1]])
        model.config[nengo.Ensemble].intercepts = Choice([intercepts])
        pos = nengo.Ensemble(1, 1, noise=WhiteNoise(Uniform(0, pos_noise)))
        normal = nengo.Ensemble(1, 1)
        neg = nengo.Ensemble(1, 1, noise=WhiteNoise(Uniform(neg_noise, 0)))
        pos_p = nengo.Probe(pos.neurons, synapse=0.1)
        normal_p = nengo.Probe(normal.neurons, synapse=0.1)
        neg_p = nengo.Probe(neg.neurons, synapse=0.1)
    with Simulator(model) as sim:
        sim.run(0.06)

    t = sim.trange()
    plt.title("intercepts=%d" % intercepts)
    plt.plot(t, sim.data[pos_p], c="b", label="noise=%d" % pos_noise)
    plt.plot(t, sim.data[normal_p], c="k", label="no noise")
    plt.plot(t, sim.data[neg_p], c="r", label="noise=%d" % neg_noise)
    plt.legend(loc="best")

    assert np.sum(sim.data[pos_p], axis=0) >= np.sum(sim.data[normal_p],
                                                     axis=0)
    assert np.sum(sim.data[normal_p], axis=0) >= np.sum(sim.data[neg_p],
                                                        axis=0)
    assert not allclose(sim.data[normal_p], sim.data[pos_p], record_rmse=False)
    assert not allclose(sim.data[normal_p], sim.data[neg_p], record_rmse=False)
Beispiel #4
0
def go(d_ens, f_ens, n_neurons=3000, t=100, L=False, neuron_type=LIF(),
       m=Uniform(30, 40), i=Uniform(-1, 1), r=40, IC=np.array([1,1,1]),
       seed=0, dt=0.001, dt_sample=0.001, f=DoubleExp(1e-3, 1e-1)):

    with nengo.Network(seed=seed) as model:
        # Ensembles
        u = nengo.Node(lambda t: IC*(t<=1.0))
        x = nengo.Ensemble(1, 3, neuron_type=nengo.Direct())
        ens = nengo.Ensemble(n_neurons, 3, max_rates=m, intercepts=i, neuron_type=neuron_type, seed=seed, radius=r)
        dss = nengo.Node(DownsampleNode(size_in=n_neurons, size_out=n_neurons, dt=dt, dt_sample=dt_sample), size_in=n_neurons, size_out=n_neurons)

        # Connections
        nengo.Connection(u, x, synapse=None)
        nengo.Connection(x, x, function=feedback, synapse=~s)
        if L:
            supv = nengo.Ensemble(n_neurons, 3, neuron_type=SpikingRectifiedLinear(), radius=r, seed=seed)
            nengo.Connection(x, supv, synapse=None)
            nengo.Connection(supv, ens, synapse=f, seed=seed)
        else:
            nengo.Connection(ens, ens, synapse=f_ens, solver=NoSolver(d_ens), seed=seed)

        # Probes
        nengo.Connection(ens.neurons, dss, synapse=None)
        p_x = nengo.Probe(x, synapse=None, sample_every=dt_sample)
        p_ens = nengo.Probe(dss, synapse=None, sample_every=dt_sample)

    with nengo.Simulator(model, seed=seed, dt=dt) as sim:
        sim.run(t)

    return dict(
        times=sim.trange(),
        x=sim.data[p_x],
        ens=sim.data[p_ens])
    def build(self, testY):
        self.count = 0

        def update(x):
            """
                Kalman Filter: X_k = A * X_k_1 + B * Y_k

            """
            Externalmat = np.mat(x[2:4]).T
            Inputmat = np.mat(x[0:2]).T
            Controlmat = np.matrix([[x[4], x[5]], [x[6], x[7]]])

            next_state = np.squeeze(
                np.asarray(Controlmat * Inputmat + Externalmat))
            return next_state

        with self.model:
            Dir_Nurons = nengo.Ensemble(1,
                                        dimensions=2 + 2 + 4,
                                        neuron_type=nengo.Direct())

            LIF_Neurons = nengo.Ensemble(
                self.N_A,
                dimensions=2,
                intercepts=Uniform(-1, 1),
                max_rates=Uniform(self.rate_A[0], self.rate_A[1]),
                neuron_type=nengo.LIFRate(tau_rc=self.t_rc,
                                          tau_ref=self.t_ref))

            state_func = Piecewise({
                0.0: [0.0, 0.0],
                self.dt:
                np.squeeze(np.asarray(np.mat([testY[0], testY[1]]).T)),
                2 * self.dt: [0.0, 0.0]
            })

            state = nengo.Node(output=state_func)
            # state_probe = nengo.Probe(state)

            external_input = nengo.Node(output=lambda t: self.data(t))
            # external_input_probe = nengo.Probe(external_input)

            control_signal = nengo.Node(output=lambda t: self.control(t))

            conn0 = nengo.Connection(state, Dir_Nurons[0:2])
            #
            conn1 = nengo.Connection(external_input, Dir_Nurons[2:4])

            conn2 = nengo.Connection(control_signal, Dir_Nurons[4:8])

            conn3 = nengo.Connection(Dir_Nurons,
                                     LIF_Neurons[0:2],
                                     function=update,
                                     synapse=self.tau)

            conn4 = nengo.Connection(LIF_Neurons[0:2], Dir_Nurons[0:2])

            self.output = nengo.Probe(LIF_Neurons[0:2])
            self.sim = nengo.Simulator(self.model, dt=self.dt)
Beispiel #6
0
def test_distributions():
    check_init_args(PDF, ["x", "p"])
    check_repr(PDF([1, 2, 3], [0.1, 0.8, 0.1]))
    assert (repr(PDF(
        [1, 2], [0.4, 0.6])) == "PDF(x=array([1., 2.]), p=array([0.4, 0.6]))")

    check_init_args(Uniform, ["low", "high", "integer"])
    check_repr(Uniform(1, 3))
    check_repr(Uniform(1, 4, integer=True))
    assert repr(Uniform(0, 1)) == "Uniform(low=0, high=1)"
    assert repr(Uniform(
        0, 5, integer=True)) == "Uniform(low=0, high=5, integer=True)"

    check_init_args(Gaussian, ["mean", "std"])
    check_repr(Gaussian(0, 2))
    assert repr(Gaussian(1, 0.1)) == "Gaussian(mean=1, std=0.1)"

    check_init_args(Exponential, ["scale", "shift", "high"])
    check_repr(Exponential(2.0))
    check_repr(Exponential(2.0, shift=0.1))
    check_repr(Exponential(2.0, shift=0.1, high=10.0))
    assert repr(Exponential(2.0)) == "Exponential(scale=2.0)"

    check_init_args(UniformHypersphere, ["surface", "min_magnitude"])
    check_repr(UniformHypersphere())
    check_repr(UniformHypersphere(surface=True))
    check_repr(UniformHypersphere(min_magnitude=0.3))
    assert repr(UniformHypersphere()) == "UniformHypersphere()"
    assert repr(
        UniformHypersphere(surface=True)) == "UniformHypersphere(surface=True)"

    check_init_args(Choice, ["options", "weights"])
    check_repr(Choice([3, 2, 1]))
    check_repr(Choice([3, 2, 1], weights=[0.1, 0.2, 0.7]))
    assert repr(Choice([1, 2, 3])) == "Choice(options=array([1., 2., 3.]))"
    assert (repr(
        Choice([1, 2, 3], weights=[0.1, 0.5, 0.4])
    ) == "Choice(options=array([1., 2., 3.]), weights=array([0.1, 0.5, 0.4]))")

    check_init_args(Samples, ["samples"])
    check_repr(Samples([3, 2, 1]))
    assert repr(Samples([3, 2, 1])) == "Samples(samples=array([3., 2., 1.]))"

    check_init_args(SqrtBeta, ["n", "m"])
    check_repr(SqrtBeta(3))
    check_repr(SqrtBeta(3, m=2))
    assert repr(SqrtBeta(3)) == "SqrtBeta(n=3)"
    assert repr(SqrtBeta(3, 2)) == "SqrtBeta(n=3, m=2)"

    check_init_args(SubvectorLength, ["dimensions", "subdimensions"])
    check_repr(SubvectorLength(6))
    check_repr(SubvectorLength(6, 2))
    assert repr(SubvectorLength(3)) == "SubvectorLength(dimensions=3)"

    check_init_args(CosineSimilarity, ["dimensions"])
    check_repr(CosineSimilarity(6))
    assert repr(CosineSimilarity(6)) == "CosineSimilarity(dimensions=6)"
Beispiel #7
0
 def thresh_ens_config(self):
     cfg = nengo.Config(nengo.Ensemble)
     cfg[nengo.Ensemble].update({
         'radius': 1,
         'intercepts': Uniform(0.5, 1.0),
         'encoders': Choice([[1]]),
         'eval_points': Uniform(0.75, 1.1),
         'n_eval_points': self.n_eval_points,
     })
     return cfg
Beispiel #8
0
 def thresh_ens_config(self):
     """(Config) Defaults for threshold ensemble creation."""
     cfg = Config(Ensemble)
     cfg[Ensemble].update({
         "radius": 1,
         "intercepts": Uniform(0.5, 1.0),
         "encoders": Choice([[1]]),
         "eval_points": Uniform(0.75, 1.1),
         "n_eval_points": self.n_eval_points,
     })
     return cfg
Beispiel #9
0
 def __init__(self,
              theta=Uniform(-np.pi, np.pi),
              freq=Uniform(0.2, 2),
              phase=Uniform(-np.pi, np.pi),
              sigma_x=Choice([0.45]),
              sigma_y=Choice([0.45])):
     self.theta = theta
     self.freq = freq
     self.phase = phase
     self.sigma_x = sigma_x
     self.sigma_y = sigma_y
def goTarget(f1=Lowpass(0.01),
             f2=Lowpass(0.1),
             stim=lambda t: 0,
             gating=lambda t: 0,
             N=100,
             t=10,
             dt=0.001,
             m=Uniform(30, 30),
             i=Uniform(-1, 0.6),
             kInh=-1.5,
             seed=0):
    wInh = kInh * np.ones((N, 1))
    with nengo.Network(seed=seed) as model:
        inpt = nengo.Node(stim)
        gate = nengo.Node(gating)
        fdfw = nengo.Ensemble(N, 1, seed=seed)
        fdbk = nengo.Ensemble(N,
                              1,
                              max_rates=m,
                              intercepts=i,
                              neuron_type=nengo.LIF(),
                              seed=seed)
        ens = nengo.Ensemble(N,
                             1,
                             max_rates=m,
                             intercepts=i,
                             neuron_type=nengo.LIF(),
                             seed=seed)
        nengo.Connection(inpt, fdfw, synapse=None)
        nengo.Connection(fdfw, ens, synapse=f1)
        nengo.Connection(ens, fdbk, synapse=f1)
        nengo.Connection(fdbk, ens, synapse=f2)
        nengo.Connection(gate,
                         fdfw.neurons,
                         transform=wInh,
                         function=lambda x: x)
        nengo.Connection(gate,
                         fdbk.neurons,
                         transform=wInh,
                         function=lambda x: 1 - x)
        pInpt = nengo.Probe(inpt, synapse=f2)
        pGate = nengo.Probe(gate, synapse=None)
        pFdfw = nengo.Probe(fdfw, synapse=f2)
        pFdbk = nengo.Probe(fdbk, synapse=f2)
        pEns = nengo.Probe(ens, synapse=f2)
    with nengo.Simulator(model, seed=seed) as sim:
        sim.run(t)
    return dict(times=sim.trange(),
                inpt=sim.data[pInpt],
                gate=sim.data[pGate],
                fdfw=sim.data[pFdfw],
                fdbk=sim.data[pFdbk],
                ens=sim.data[pEns])
Beispiel #11
0
def test_uniform(low, high, rng):
    n = 100
    dist = Uniform(low, high)
    samples = dist.sample(n, rng=rng)
    if low < high:
        assert np.all(samples >= low)
        assert np.all(samples < high)
    else:
        assert np.all(samples <= low)
        assert np.all(samples > high)
    hist, _ = np.histogram(samples, bins=5)
    assert np.allclose(hist - np.mean(hist), 0, atol=0.1 * n)
Beispiel #12
0
def test_concatenate(plt, rng):
    n = 10000

    dist = Concatenate([Uniform(-1, 1),
                        Uniform(0, 1),
                        MultivariateGaussian([0, 2], [2, 1]),
                        Gaussian(3, 0.5)])
    pts = dist.sample(n, rng=rng)
    assert pts.shape == (n, 5)
    n, d = pts.shape

    for i in range(d):
        plt.subplot(d, 1, i+1)
        plt.hist(pts[:, i], bins=np.linspace(-4, 4, 101))
def test_tuning_curves(Simulator, nl_nodirect, plt, seed, dimensions):
    radius = 10
    max_rate = 400
    model = nengo.Network(seed=seed)
    with model:
        ens = nengo.Ensemble(
            10,
            dimensions=dimensions,
            neuron_type=nl_nodirect(),
            max_rates=Uniform(200, max_rate),
            radius=radius,
        )
    with Simulator(model) as sim:
        eval_points, activities = tuning_curves(ens, sim)

    plot_tuning_curves(plt, eval_points, activities)

    # Check that eval_points cover up to the radius.
    assert np.abs(radius - np.max(np.abs(eval_points))) <= (2 * radius /
                                                            dimensions)

    assert np.all(activities >= 0)

    d = np.sqrt(np.sum(np.asarray(eval_points)**2, axis=-1))
    assert np.all(activities[d <= radius] <= max_rate)
Beispiel #14
0
def test_frozen():
    """Test attributes inherited from FrozenObject"""
    a = Uniform(-0.3, 0.6)
    b = Uniform(-0.3, 0.6)
    c = Uniform(-0.2, 0.6)

    assert hash(a) == hash(a)
    assert hash(b) == hash(b)
    assert hash(c) == hash(c)

    assert a == b
    assert hash(a) == hash(b)
    assert a != c
    assert hash(a) != hash(c)  # not guaranteed, but highly likely
    assert b != c
    assert hash(b) != hash(c)  # not guaranteed, but highly likely
def create_model(size, random_tuning_curves):
    names = ["One Neuron", "Two Neurons", "N Neurons"]
    encoders = [[[1]], [[1], [-1]], [[1], [-1]]]
    if size >= len(names):
        index = len(names) - 1
    else:
        index = size - 1

    model = nengo.Network(label=names[index])
    with model:
        if random_tuning_curves:
            neurons = nengo.Ensemble(size, dimensions=1)
        else:
            neurons = nengo.Ensemble(size, dimensions=1, intercepts=Uniform(-0.5, -0.5), max_rates=Uniform(100, 100),
                                     encoders=encoders[index])

        cos = nengo.Node(lambda t: np.cos(8 * t))
        # Connect the input signal to the neuron
        nengo.Connection(cos, neurons)
        # The original input
        cos_probe_ = nengo.Probe(cos)
        # The raw spikes from the neuron
        spikes = nengo.Probe(neurons.neurons)
        # Spikes filtered by a 10ms post-synaptic filter
        filtered = nengo.Probe(neurons, synapse=0.01)

    return model, neurons, cos_probe_, spikes, filtered
Beispiel #16
0
class SpikingRectifiedLinear(RectifiedLinear):
    """A rectified integrate and fire neuron model.

    Each neuron is modeled as a rectified line. That is, the neuron's activity
    scales linearly with current, unless the current is less than zero, at
    which point the neural activity will stay at zero. This is a spiking
    version of the RectifiedLinear neuron model.

    Parameters
    ----------
    amplitude : float
        Scaling factor on the neuron output. Corresponds to the relative
        amplitude of the output spikes of the neuron.
    initial_state : {str: Distribution or array_like}
        Mapping from state variables names to their desired initial value.
        These values will override the defaults set in the class's state attribute.
    """

    state = {"spikes": Choice([0]), "voltage": Uniform(low=0, high=1)}

    def rates(self, x, gain, bias):
        """Use RectifiedLinear to determine rates."""

        J = self.current(x, gain, bias)
        out = np.zeros_like(J)
        RectifiedLinear.step(self, dt=1.0, J=J, rates=out)
        return out

    def step(self, dt, J, spikes, voltage):
        """Implement the integrate and fire nonlinearity."""

        voltage += np.maximum(J, 0) * dt
        n_spikes = np.floor(voltage)
        spikes[:] = (self.amplitude / dt) * n_spikes
        voltage -= n_spikes
def Thalamus(dimensions,
             n_neurons_per_ensemble=50,
             mutual_inhib=1,
             threshold=0,
             net=None):
    """Inhibits non-selected actions.

    Converts basal ganglia output into a signal with
    (approximately) 1 for the selected action and 0 elsewhere.
    """

    if net is None:
        net = nengo.Network("Thalamus")

    with net:
        net.actions = EnsembleArray(n_neurons_per_ensemble,
                                    dimensions,
                                    intercepts=Uniform(threshold, 1),
                                    encoders=Choice([[1.0]]),
                                    label="actions")
        nengo.Connection(net.actions.output,
                         net.actions.input,
                         transform=(np.eye(dimensions) - 1) * mutual_inhib)
        net.bias = nengo.Node([1], label="thalamus bias")
        nengo.Connection(net.bias,
                         net.actions.input,
                         transform=np.ones((dimensions, 1)))

    net.input = net.actions.input
    net.output = net.actions.output
    return net
Beispiel #18
0
def AuditoryPeriphery(freqs,
                      sound_process,
                      auditory_filter,
                      neurons_per_freq=12,
                      fs=50000.,
                      adaptive_neurons=False,
                      net=None):
    if net is None:
        net = nengo.Network(label="Auditory Periphery")

    net.freqs = freqs
    net.sound_process = sound_process
    net.auditory_filter = auditory_filter
    net.fs = fs

    with net:
        # Inner hair cell activity
        net.fb = AuditoryFilterBank(freqs,
                                    sound_process,
                                    filterbank=auditory_filter,
                                    samplerate=fs)
        net.ihc = nengo.Node(output=net.fb, size_out=freqs.size)

        # Cochlear neurons projecting down auditory nerve
        neuron_type = nengo.AdaptiveLIF() if adaptive_neurons else nengo.LIF()
        net.an = nengo.networks.EnsembleArray(neurons_per_freq,
                                              freqs.size,
                                              intercepts=Uniform(-0.1, 0.5),
                                              encoders=Choice([[1]]),
                                              neuron_type=neuron_type)
        nengo.Connection(net.ihc, net.an.input)
    return net
Beispiel #19
0
def test_response_curves(Simulator, NonDirectNeuronType, plt, seed):
    max_rate = 400
    model = nengo.Network(seed=seed)
    with model:
        ens = nengo.Ensemble(
            10,
            dimensions=10,
            neuron_type=NonDirectNeuronType(),
            radius=1.5,
            max_rates=Uniform(200, max_rate),
        )

    with Simulator(model) as sim:
        eval_points, activities = response_curves(ens, sim)

    plot_tuning_curves(plt, eval_points, activities)

    assert eval_points.ndim == 1 and eval_points.size > 0
    assert np.all(eval_points >= -1.0) and np.all(eval_points <= 1.0)

    if not NonDirectNeuronType.negative:
        assert np.all(activities >= 0.0)

    assert np.all(activities <= max_rate)
    # Activities along preferred direction must increase monotonically.
    assert np.all(np.diff(activities, axis=0) >= 0.0)
Beispiel #20
0
    def __init__(self,
                 dimensions,
                 n_neurons_per_ensemble=50,
                 mutual_inhib=1.0,
                 threshold=0.0,
                 **kwargs):
        if "net" in kwargs:
            raise ObsoleteError("The 'net' argument is no longer supported.")
        kwargs.setdefault("label", "Thalamus")
        super().__init__(**kwargs)

        with self:
            self.actions = EnsembleArray(
                n_neurons_per_ensemble,
                dimensions,
                intercepts=Uniform(threshold, 1),
                encoders=Choice([[1.0]]),
                label="actions",
            )
            nengo.Connection(
                self.actions.output,
                self.actions.input,
                transform=(np.eye(dimensions) - 1) * mutual_inhib,
            )
            self.bias = nengo.Node([1], label="thalamus bias")
            nengo.Connection(self.bias,
                             self.actions.input,
                             transform=np.ones((dimensions, 1)))

        self.input = self.actions.input
        self.output = self.actions.output
Beispiel #21
0
class RegularSpiking(RatesToSpikesNeuronType):
    """Turn a rate neuron type into a spiking one with regular inter-spike intervals.

    Spikes at regular intervals based on the rates of the base neuron type. [1]_

    Parameters
    ----------
    base_type : NeuronType
        A rate-based neuron type to convert to a regularly spiking neuron.
    amplitude : float
        Scaling factor on the neuron output. Corresponds to the relative
        amplitude of the output spikes of the neuron.
    initial_state : {str: Distribution or array_like}
        Mapping from state variables names to their desired initial value.
        These values will override the defaults set in the class's state attribute.

    References
    ----------
    .. [1] Voelker, A. R., Rasmussen, D., & Eliasmith, C. (2020). A Spike in
       Performance: Training Hybrid-Spiking Neural Networks with Quantized Activation
       Functions. arXiv preprint arXiv:2002.03553. (https://arxiv.org/abs/2002.03553)
    """

    state = {"voltage": Uniform(low=0, high=1)}

    def step(self, dt, J, output, voltage):
        # Note: J is the desired output rate, not the input current
        voltage += dt * J
        n_spikes = np.floor(voltage)
        output[...] = (self.amplitude / dt) * n_spikes
        voltage -= n_spikes
Beispiel #22
0
    def make_thresh_ens_net(self,
                            threshold=0.5,
                            thresh_func=lambda x: 1,
                            exp_scale=None,
                            num_ens=1,
                            net=None,
                            **args):
        if net is None:
            label_str = args.get('label', 'Threshold_Ens_Net')
            net = nengo.Network(label=label_str)
        if exp_scale is None:
            exp_scale = (1 - threshold) / 10.0

        with net:
            ens_args = dict(args)
            ens_args['n_neurons'] = args.get('n_neurons', self.n_neurons_ens)
            ens_args['dimensions'] = args.get('dimensions', 1)
            ens_args['intercepts'] = \
                Exponential(scale=exp_scale, shift=threshold,
                            high=1)
            ens_args['encoders'] = Choice([[1]])
            ens_args['eval_points'] = Uniform(min(threshold + 0.1, 1.0), 1.1)
            ens_args['n_eval_points'] = 5000

            net.input = nengo.Node(size_in=num_ens)
            net.output = nengo.Node(size_in=num_ens)

            for i in range(num_ens):
                thresh_ens = nengo.Ensemble(**ens_args)
                nengo.Connection(net.input[i], thresh_ens, synapse=None)
                nengo.Connection(thresh_ens,
                                 net.output[i],
                                 function=thresh_func,
                                 synapse=None)
        return net
Beispiel #23
0
    def __init__(
        self,
        n_filters,
        input_shape,
        kernel_size=(3, 3),
        strides=(1, 1),
        padding="valid",
        channels_last=True,
        init=Uniform(-1, 1),
        groups=1,
    ):
        super().__init__(
            n_filters=n_filters,
            input_shape=input_shape,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            channels_last=channels_last,
            init=init,
            groups=groups,
        )

        if self.padding == "valid":
            for i in range(self.dimensions):
                if self.kernel_size[i] > self.input_shape.spatial_shape[i]:
                    raise ValidationError(
                        f"Kernel size for spatial dimension {i} "
                        f"({self.kernel_size[i]}) exceeds the spatial size of that "
                        f"dimension ({self.input_shape.spatial_shape[i]}). With the "
                        "requested 'valid' padding, this will result in empty output.",
                        attr="padding",
                        obj=self,
                    )
Beispiel #24
0
    def get_gate(self, index, target_name):
        """Return the gate for an action

        The gate will be created if it does not already exist.  The gate
        neurons have no activity when the action is selected, but are
        active when the action is not selected.  This makes the gate useful
        for inhibiting ensembles that should only be active when this
        action is active.
        """

        target_module = self.spa.get_module(target_name)

        if index not in self.gates:
            with target_module:
                intercepts = Uniform(self.threshold_gate, 1)
                gate = nengo.Ensemble(self.neurons_gate,
                                      dimensions=1,
                                      intercepts=intercepts,
                                      label='gate[%d]' % index,
                                      encoders=[[1]] * self.neurons_gate)
                if not hasattr(target_module, 'bias'):
                    target_module.bias = nengo.Node([1],
                                                    label=target_name +
                                                    " bias")
                nengo.Connection(target_module.bias, gate, synapse=None)

            with self.spa:
                nengo.Connection(self.actions.ensembles[index],
                                 gate,
                                 synapse=self.synapse_to_gate,
                                 transform=-1)

            with self:
                self.gates[index] = gate
        return self.gates[index]
Beispiel #25
0
def test_get_nengo_params(Simulator, seed):
    with nengo.Network(seed=seed) as net:
        a = nengo.Ensemble(12, 3, label="a")
        b = nengo.Ensemble(10, 4, label="b")
        n = nengo.Node([1])
        c = nengo.Connection(a.neurons[:5],
                             b[:2],
                             transform=Uniform(-1, 1),
                             label="c")
        d = nengo.Connection(a,
                             b.neurons,
                             function=lambda x: np.ones(5),
                             transform=Uniform(-1, 1),
                             label="d")
        e = nengo.Connection(n, b, transform=Uniform(-1, 1), label="e")
        f = nengo.Ensemble(5, 1, label="a")
        p = nengo.Probe(b.neurons)

    with Simulator(net, seed=seed) as sim:
        # check that we get an error for non-ensemble/connection objects
        with pytest.raises(ValueError):
            sim.get_nengo_params(n)

        # check that we get an error for duplicate labels
        with pytest.raises(ValueError):
            sim.get_nengo_params([a, f], as_dict=True)

        # check that single objects are returned as single dicts
        params = sim.get_nengo_params(d)
        assert params["transform"] == 1

        params = sim.get_nengo_params([a.neurons, b, c, d, e], as_dict=True)
        sim.run_steps(100)

    with nengo.Network(seed=seed + 1) as net:
        a2 = nengo.Ensemble(12, 3, **params["a"])
        b2 = nengo.Ensemble(10, 4, **params["b"])
        n2 = nengo.Node([1])
        nengo.Connection(a2.neurons[:5], b2[:2], **params["c"])
        nengo.Connection(a2, b2.neurons, **params["d"])
        nengo.Connection(n2, b2, **params["e"])
        p2 = nengo.Probe(b2.neurons)

    with Simulator(net, seed=seed) as sim2:
        sim2.run_steps(100)

        assert np.allclose(sim.data[p], sim2.data[p2])
Beispiel #26
0
 def __init__(self, base_type, amplitude=1.0, initial_state=None):
     if "voltage" in base_type.state:
         raise ValidationError(
             "Cannot already have a 'voltage' state variable",
             attr="base_type",
             obj=self,
         )
     self.state = {"spikes": Choice([0]), "voltage": Uniform(low=0, high=1)}
     super().__init__(base_type, amplitude=amplitude, initial_state=initial_state)
Beispiel #27
0
def test_argreprs():
    def check_init_args(cls, args):
        assert getfullargspec(cls.__init__).args[1:] == args

    def check_repr(obj):
        assert eval(repr(obj)) == obj

    check_init_args(PDF, ["x", "p"])
    check_repr(PDF([1, 2, 3], [0.1, 0.8, 0.1]))

    check_init_args(Uniform, ["low", "high", "integer"])
    check_repr(Uniform(1, 3))
    check_repr(Uniform(1, 4, integer=True))

    check_init_args(Gaussian, ["mean", "std"])
    check_repr(Gaussian(0, 2))

    check_init_args(Exponential, ["scale", "shift", "high"])
    check_repr(Exponential(2.0))
    check_repr(Exponential(2.0, shift=0.1))
    check_repr(Exponential(2.0, shift=0.1, high=10.0))

    check_init_args(UniformHypersphere, ["surface", "min_magnitude"])
    check_repr(UniformHypersphere())
    check_repr(UniformHypersphere(surface=True))
    check_repr(UniformHypersphere(min_magnitude=0.3))

    check_init_args(Choice, ["options", "weights"])
    check_repr(Choice([3, 2, 1]))
    check_repr(Choice([3, 2, 1], weights=[0.1, 0.2, 0.7]))

    check_init_args(Samples, ["samples"])
    check_repr(Samples([3, 2, 1]))

    check_init_args(SqrtBeta, ["n", "m"])
    check_repr(SqrtBeta(3))
    check_repr(SqrtBeta(3, m=2))

    check_init_args(SubvectorLength, ["dimensions", "subdimensions"])
    check_repr(SubvectorLength(6))
    check_repr(SubvectorLength(6, 2))

    check_init_args(CosineSimilarity, ["dimensions"])
    check_repr(CosineSimilarity(6))
Beispiel #28
0
def test_argreprs():
    def check_init_args(cls, args):
        assert getfullargspec(cls.__init__).args[1:] == args

    def check_repr(obj):
        assert eval(repr(obj)) == obj

    check_init_args(PDF, ['x', 'p'])
    check_repr(PDF([1, 2, 3], [0.1, 0.8, 0.1]))

    check_init_args(Uniform, ['low', 'high', 'integer'])
    check_repr(Uniform(1, 3))
    check_repr(Uniform(1, 4, integer=True))

    check_init_args(Gaussian, ['mean', 'std'])
    check_repr(Gaussian(0, 2))

    check_init_args(Exponential, ['scale', 'shift', 'high'])
    check_repr(Exponential(2.))
    check_repr(Exponential(2., shift=0.1))
    check_repr(Exponential(2., shift=0.1, high=10.))

    check_init_args(UniformHypersphere, ['surface', 'min_magnitude'])
    check_repr(UniformHypersphere())
    check_repr(UniformHypersphere(surface=True))
    check_repr(UniformHypersphere(min_magnitude=0.3))

    check_init_args(Choice, ['options', 'weights'])
    check_repr(Choice([3, 2, 1]))
    check_repr(Choice([3, 2, 1], weights=[0.1, 0.2, 0.7]))

    check_init_args(Samples, ['samples'])
    check_repr(Samples([3, 2, 1]))

    check_init_args(SqrtBeta, ['n', 'm'])
    check_repr(SqrtBeta(3))
    check_repr(SqrtBeta(3, m=2))

    check_init_args(SubvectorLength, ['dimensions', 'subdimensions'])
    check_repr(SubvectorLength(6))
    check_repr(SubvectorLength(6, 2))

    check_init_args(CosineSimilarity, ['dimensions'])
    check_repr(CosineSimilarity(6))
Beispiel #29
0
    def __init__(
        self,
        n_filters,
        input_shape,
        output_shape=None,
        kernel_size=(3, 3),
        strides=(1, 1),
        padding="valid",
        channels_last=True,
        init=Uniform(-1, 1),
    ):
        super().__init__(
            n_filters=n_filters,
            input_shape=input_shape,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            channels_last=channels_last,
            init=init,
        )

        self.output_shape = (
            self._reverse_shape(self.input_shape.spatial_shape, self.n_filters)
            if output_shape is None
            else output_shape
        )

        if self.output_shape.dimensions != self.input_shape.dimensions:
            raise ValidationError(
                f"The number of dimensions ({self.output_shape.dimensions}) in the "
                f"provided `output_shape` {self.output_shape} does not match the number"
                f" of dimensions ({self.input_shape.dimensions}) in the input shape.",
                attr="output_shape",
                obj=self,
            )
        if self.output_shape.n_channels != self.n_filters:
            raise ValidationError(
                f"The number of channels in the provided `output_shape` "
                f"{self.output_shape} does not match the requested number "
                f"of filters ({self.n_filters}).",
                attr="output_shape",
                obj=self,
            )

        expected_input_shape = self._forward_shape(
            self.output_shape.spatial_shape, self.input_shape.n_channels
        )
        if self.input_shape != expected_input_shape:
            raise ValidationError(
                f"The provided `output_shape` {self.output_shape} would not produce "
                f"`input_shape` {self.input_shape} in a forward Convolution, "
                f"and is therefore not a valid output shape.",
                attr="output_shape",
                obj=self,
            )
Beispiel #30
0
    def __init__(
        self,
        n_filters,
        input_shape,
        kernel_size=(3, 3),
        strides=(1, 1),
        padding="valid",
        channels_last=True,
        init=Uniform(-1, 1),
        groups=1,
    ):
        super().__init__()

        self.n_filters = n_filters
        self.channels_last = channels_last  # must be set before input_shape
        self.input_shape = input_shape
        self.kernel_size = kernel_size
        self.strides = strides
        self.padding = padding
        self.init = init
        self.groups = groups

        if len(kernel_size) != self.dimensions:
            raise ValidationError(
                f"Kernel dimensions ({len(kernel_size)}) does not match "
                f"input dimensions ({self.dimensions})",
                attr="kernel_size",
            )
        if len(strides) != self.dimensions:
            raise ValidationError(
                f"Stride dimensions ({len(strides)}) does not match "
                f"input dimensions ({self.dimensions})",
                attr="strides",
            )
        if not isinstance(init, Distribution):
            if init.shape != self.kernel_shape:
                raise ValidationError(
                    f"Kernel shape {init.shape} does not match "
                    f"expected shape {self.kernel_shape}",
                    attr="init",
                )

        in_channels = self.input_shape.n_channels
        if groups > in_channels:
            raise ValidationError(
                f"Groups ({groups}) cannot be greater than "
                f"the number of input channels ({in_channels})",
                attr="groups",
            )
        if in_channels % groups != 0 or self.n_filters % groups != 0:
            raise ValidationError(
                f"Both the number of input channels ({in_channels}) and filters "
                f"({self.n_filters}) must be evenly divisible by ``groups`` ({groups})",
                attr="groups",
            )
Beispiel #31
0
def plot_tuning_curves(filename, plot_decoding=False, show=False):
    """
    Plot tuning curves for an association population and for a standard
    subpopulation (of the neural extraction network).
    """
    import matplotlib as mpl
    mpl.rcParams['font.size'] = '10'

    if show:
        mpl.use('Qt4Agg')
    else:
        mpl.use('Agg')

    import matplotlib.pyplot as plt

    plt.figure(figsize=(5, 3))

    neurons_per_item = 20
    neurons_per_dim = 50
    intercepts_low = 0.29
    intercepts_range = 0.00108

    intercepts = Uniform(intercepts_low, intercepts_low + intercepts_range)

    tau_rc = 0.034
    tau_ref = 0.0026
    radius = 1.0
    assoc_encoders = np.ones((neurons_per_item, 1))
    standard_encoders = np.ones((neurons_per_dim, 1))

    threshold = 0.3
    threshold_func = lambda x: 1 if x > threshold else 0

    max_rates = Uniform(200, 350)

    model = nengo.Network("Associative Memory")
    with model:
        neuron_type = nengo.LIF(
            tau_rc=tau_rc, tau_ref=tau_ref)

        assoc = nengo.Ensemble(
            n_neurons=neurons_per_item, dimensions=1, intercepts=intercepts,
            encoders=assoc_encoders, label="assoc", radius=radius,
            max_rates=max_rates, neuron_type=neuron_type)

        n_eval_points = 750
        eval_points = np.random.normal(0, 0.06, (n_eval_points, 1))
        eval_points.T[0].sort()
        radius = 5.0 / np.sqrt(512)
        standard = nengo.Ensemble(n_neurons=neurons_per_dim, dimensions=1,
                                  eval_points=eval_points, radius=radius,
                                  encoders=standard_encoders)

        if plot_decoding:
            dummy = nengo.Ensemble(1, 1)
            conn = nengo.Connection(assoc, dummy, function=threshold_func)
            dummy2 = nengo.Ensemble(1, 1)
            conn2 = nengo.Connection(standard, dummy2)

    sim = nengo.Simulator(model)

    if plot_decoding:
        gs = gridspec.GridSpec(3, 2)
    else:
        gs = gridspec.GridSpec(2, 2)

    plt.subplot(gs[0:2, 0])

    assoc_eval_points, assoc_activities = tuning_curves(assoc, sim)

    for neuron in assoc_activities.T:
        plt.plot(assoc_eval_points.T[0], neuron)
    plt.title("Association")
    plt.ylabel("Firing Rate (spikes/s)")
    plt.xlabel(r"$e_ix$")
    plt.ylim((0, 400))
    plt.yticks([0, 100, 200, 300, 400])

    ax = plt.subplot(gs[0:2, 1])

    # We want different eval points for display purposes than for
    # optimization purposes
    eval_points = Uniform(-radius, radius).sample(n_eval_points)
    eval_points.sort()
    eval_points = eval_points.reshape((n_eval_points, 1))

    # have to divide by radius on our own since tuning_curves skips that step
    _, activities = tuning_curves(standard, sim, eval_points/radius)
    for neuron in activities.T:
        plt.plot(eval_points, neuron)

    plt.title("Standard")
    plt.xlabel(r"$e_ix$")
    plt.xlim((-radius, radius))
    plt.ylim((0, 400))
    plt.setp(ax, yticks=[])

    if plot_decoding:
        plt.subplot(gs[2, 0])
        decoders = sim.data[conn].decoders
        plt.plot(assoc_eval_points.T[0],
                 0.001 * np.dot(assoc_activities, decoders.T))
        plt.axhline(y=1.0, ls='--')

        plt.subplot(gs[2, 1])
        x, activities2 = tuning_curves(standard, sim, assoc_eval_points/radius)
        decoders = sim.data[conn2].decoders
        plt.plot(
            assoc_eval_points.T[0],
            0.001 * np.dot(activities2, decoders.T))
        plt.plot([-1.0, 1.0], [-1.0, 1.0], c='k', ls='--')
        plt.axvline(x=radius, c='k', ls='--')
        plt.axvline(x=-radius, c='k', ls='--')

    plt.tight_layout()

    plt.subplots_adjust(right=0.89, left=0.11)

    if filename:
            plt.savefig(filename)
    if show:
            plt.show()