def test_noise_gen(Simulator, nl_nodirect, seed, plt):
    """Ensure that setting Ensemble.noise generates noise."""
    with nengo.Network(seed=seed) as model:
        gain, bias = 1, 2
        neg_noise, pos_noise = -4, 5
        model.config[nengo.Ensemble].neuron_type = nl_nodirect()
        model.config[nengo.Ensemble].encoders = Choice([[1]])
        model.config[nengo.Ensemble].gain = Choice([gain])
        model.config[nengo.Ensemble].bias = Choice([bias])
        pos = nengo.Ensemble(1, 1, noise=WhiteNoise(Gaussian(pos_noise, 0.01)))
        normal = nengo.Ensemble(1, 1)
        neg = nengo.Ensemble(1, 1, noise=WhiteNoise(Gaussian(neg_noise, 0.01)))
        pos_p = nengo.Probe(pos.neurons, synapse=0.1)
        normal_p = nengo.Probe(normal.neurons, synapse=0.1)
        neg_p = nengo.Probe(neg.neurons, synapse=0.1)
    with Simulator(model) as sim:
        sim.run(0.06)

    t = sim.trange()
    plt.title("bias=%d, gain=%d" % (bias, gain))
    plt.plot(t, sim.data[pos_p], c='b', label="noise=%d" % pos_noise)
    plt.plot(t, sim.data[normal_p], c='k', label="no noise")
    plt.plot(t, sim.data[neg_p], c='r', label="noise=%d" % neg_noise)
    plt.legend(loc="best")

    assert np.all(sim.data[pos_p] >= sim.data[normal_p])
    assert np.all(sim.data[normal_p] >= sim.data[neg_p])
    assert not np.allclose(sim.data[normal_p], sim.data[pos_p])
    assert not np.allclose(sim.data[normal_p], sim.data[neg_p])
Exemple #2
0
def test_noise(Simulator, nl_nodirect, seed, plt):
    """Ensure that setting Ensemble.noise generates noise."""
    with nengo.Network(seed=seed) as model:
        inp, gain, bias = 1, 5, 2
        neg_noise, pos_noise = -4, 20
        model.config[nengo.Ensemble].neuron_type = nl_nodirect()
        model.config[nengo.Ensemble].encoders = Choice([[1]])
        model.config[nengo.Ensemble].gain = Choice([gain])
        model.config[nengo.Ensemble].bias = Choice([bias])
        const = nengo.Node(output=inp)
        pos = nengo.Ensemble(
            1, 1, noise=StochasticProcess(Choice([pos_noise])))
        normal = nengo.Ensemble(1, 1)
        neg = nengo.Ensemble(
            1, 1, noise=StochasticProcess(Choice([neg_noise])))
        nengo.Connection(const, pos)
        nengo.Connection(const, normal)
        nengo.Connection(const, neg)
        pos_p = nengo.Probe(pos.neurons, synapse=0.1)
        normal_p = nengo.Probe(normal.neurons, synapse=0.1)
        neg_p = nengo.Probe(neg.neurons, synapse=0.1)
    sim = Simulator(model)
    sim.run(0.06)

    t = sim.trange()
    plt.title("input=%d, bias=%d, gain=%d" % (inp, bias, gain))
    plt.plot(t, sim.data[pos_p], c='b', label="noise=%d" % pos_noise)
    plt.plot(t, sim.data[normal_p], c='k', label="no noise")
    plt.plot(t, sim.data[neg_p], c='r', label="noise=%d" % neg_noise)
    plt.legend(loc="best")

    assert np.all(sim.data[pos_p] >= sim.data[normal_p])
    assert np.all(sim.data[normal_p] >= sim.data[neg_p])
    assert not np.all(sim.data[normal_p] == sim.data[pos_p])
    assert not np.all(sim.data[normal_p] == sim.data[neg_p])
Exemple #3
0
def test_noise_gen(Simulator, nl_nodirect, seed, plt, allclose):
    """Ensure that setting Ensemble.noise generates noise."""
    with nengo.Network(seed=seed) as model:
        intercepts = -0.5
        neg_noise, pos_noise = -5, 5
        model.config[nengo.Ensemble].neuron_type = nl_nodirect()
        model.config[nengo.Ensemble].encoders = Choice([[1]])
        model.config[nengo.Ensemble].intercepts = Choice([intercepts])
        pos = nengo.Ensemble(1, 1, noise=WhiteNoise(Uniform(0, pos_noise)))
        normal = nengo.Ensemble(1, 1)
        neg = nengo.Ensemble(1, 1, noise=WhiteNoise(Uniform(neg_noise, 0)))
        pos_p = nengo.Probe(pos.neurons, synapse=0.1)
        normal_p = nengo.Probe(normal.neurons, synapse=0.1)
        neg_p = nengo.Probe(neg.neurons, synapse=0.1)
    with Simulator(model) as sim:
        sim.run(0.06)

    t = sim.trange()
    plt.title("intercepts=%d" % intercepts)
    plt.plot(t, sim.data[pos_p], c="b", label="noise=%d" % pos_noise)
    plt.plot(t, sim.data[normal_p], c="k", label="no noise")
    plt.plot(t, sim.data[neg_p], c="r", label="noise=%d" % neg_noise)
    plt.legend(loc="best")

    assert np.sum(sim.data[pos_p], axis=0) >= np.sum(sim.data[normal_p],
                                                     axis=0)
    assert np.sum(sim.data[normal_p], axis=0) >= np.sum(sim.data[neg_p],
                                                        axis=0)
    assert not allclose(sim.data[normal_p], sim.data[pos_p], record_rmse=False)
    assert not allclose(sim.data[normal_p], sim.data[neg_p], record_rmse=False)
Exemple #4
0
def motor_cortex(command_threshold, n_neurons_per_command=30):
    with nengo.Network() as motor_cortex:
        ens_args = {'dimensions': 1,
                    'encoders': Choice([[1]]),
                    'intercepts': Choice([command_threshold])}
        motor_cortex.press = nengo.Ensemble(n_neurons_per_command, **ens_args)
        motor_cortex.release = nengo.Ensemble(n_neurons_per_command, **ens_args)
    return motor_cortex
Exemple #5
0
def test_distributions():
    check_init_args(PDF, ["x", "p"])
    check_repr(PDF([1, 2, 3], [0.1, 0.8, 0.1]))
    assert (repr(PDF(
        [1, 2], [0.4, 0.6])) == "PDF(x=array([1., 2.]), p=array([0.4, 0.6]))")

    check_init_args(Uniform, ["low", "high", "integer"])
    check_repr(Uniform(1, 3))
    check_repr(Uniform(1, 4, integer=True))
    assert repr(Uniform(0, 1)) == "Uniform(low=0, high=1)"
    assert repr(Uniform(
        0, 5, integer=True)) == "Uniform(low=0, high=5, integer=True)"

    check_init_args(Gaussian, ["mean", "std"])
    check_repr(Gaussian(0, 2))
    assert repr(Gaussian(1, 0.1)) == "Gaussian(mean=1, std=0.1)"

    check_init_args(Exponential, ["scale", "shift", "high"])
    check_repr(Exponential(2.0))
    check_repr(Exponential(2.0, shift=0.1))
    check_repr(Exponential(2.0, shift=0.1, high=10.0))
    assert repr(Exponential(2.0)) == "Exponential(scale=2.0)"

    check_init_args(UniformHypersphere, ["surface", "min_magnitude"])
    check_repr(UniformHypersphere())
    check_repr(UniformHypersphere(surface=True))
    check_repr(UniformHypersphere(min_magnitude=0.3))
    assert repr(UniformHypersphere()) == "UniformHypersphere()"
    assert repr(
        UniformHypersphere(surface=True)) == "UniformHypersphere(surface=True)"

    check_init_args(Choice, ["options", "weights"])
    check_repr(Choice([3, 2, 1]))
    check_repr(Choice([3, 2, 1], weights=[0.1, 0.2, 0.7]))
    assert repr(Choice([1, 2, 3])) == "Choice(options=array([1., 2., 3.]))"
    assert (repr(
        Choice([1, 2, 3], weights=[0.1, 0.5, 0.4])
    ) == "Choice(options=array([1., 2., 3.]), weights=array([0.1, 0.5, 0.4]))")

    check_init_args(Samples, ["samples"])
    check_repr(Samples([3, 2, 1]))
    assert repr(Samples([3, 2, 1])) == "Samples(samples=array([3., 2., 1.]))"

    check_init_args(SqrtBeta, ["n", "m"])
    check_repr(SqrtBeta(3))
    check_repr(SqrtBeta(3, m=2))
    assert repr(SqrtBeta(3)) == "SqrtBeta(n=3)"
    assert repr(SqrtBeta(3, 2)) == "SqrtBeta(n=3, m=2)"

    check_init_args(SubvectorLength, ["dimensions", "subdimensions"])
    check_repr(SubvectorLength(6))
    check_repr(SubvectorLength(6, 2))
    assert repr(SubvectorLength(3)) == "SubvectorLength(dimensions=3)"

    check_init_args(CosineSimilarity, ["dimensions"])
    check_repr(CosineSimilarity(6))
    assert repr(CosineSimilarity(6)) == "CosineSimilarity(dimensions=6)"
Exemple #6
0
 def __init__(self,
              theta=Uniform(-np.pi, np.pi),
              freq=Uniform(0.2, 2),
              phase=Uniform(-np.pi, np.pi),
              sigma_x=Choice([0.45]),
              sigma_y=Choice([0.45])):
     self.theta = theta
     self.freq = freq
     self.phase = phase
     self.sigma_x = sigma_x
     self.sigma_y = sigma_y
Exemple #7
0
def test_choice_errors():
    with pytest.raises(ValidationError, match="Number of weights.*must match.*options"):
        Choice([2], [1, 2, 3])

    with pytest.raises(ValidationError, match="All weights must be non-negative"):
        Choice([2], [-1])

    with pytest.raises(ValidationError, match="Sum of weights must be positive"):
        Choice([1, 2], [0, 0])

    with pytest.raises(ValidationError, match="Options must be of dimensionality 1"):
        Choice([0]).sample(n=2, d=1)
Exemple #8
0
def RhythmicDMP(n_per_d, freq, forcing_f, tau=0.025, net=None):
    if net is None:
        net = nengo.Network(label="Rhythmic DMP")

    out_dims = forcing_f(0.).size
    omega = freq * tau * 2 * np.pi
    with net:
        # --- Decode forcing_f from oscillator
        net.osc = nengo.Ensemble(n_per_d * 2,
                                 dimensions=2,
                                 intercepts=Exponential(0.15, 0.3, 0.6),
                                 label=forcing_f.__name__)
        nengo.Connection(net.osc,
                         net.osc,
                         synapse=tau,
                         transform=[[1, -omega], [omega, 1]])
        net.output = nengo.Node(size_in=out_dims)
        nengo.Connection(net.osc,
                         net.output,
                         function=radial_f(forcing_f),
                         synapse=None)

        # --- Drive the oscillator to a starting position
        net.reset = nengo.Node(size_in=1)
        d_intercepts = Exponential(0.2, -0.5, 0.1)
        net.diff_inhib = nengo.Ensemble(20,
                                        dimensions=1,
                                        intercepts=d_intercepts,
                                        encoders=Choice([[1]]))
        net.diff = nengo.Ensemble(n_per_d, dimensions=2)
        nengo.Connection(net.reset, net.diff_inhib, transform=-1, synapse=None)
        nengo.Connection(net.diff_inhib.neurons,
                         net.diff.neurons,
                         transform=-np.ones((n_per_d, 20)))
        nengo.Connection(net.osc, net.diff)
        reset_goal = np.array([-1, omega * 0])
        nengo.Connection(net.diff, net.osc, function=lambda x: reset_goal - x)

        # --- Inhibit the oscillator by default
        i_intercepts = Exponential(0.15, -0.5, 0.1)
        net.inhibit = nengo.Ensemble(20,
                                     dimensions=1,
                                     intercepts=i_intercepts,
                                     encoders=Choice([[1]]))
        nengo.Connection(net.inhibit.neurons,
                         net.osc.neurons,
                         transform=-np.ones((n_per_d * 2, 20)))

        # --- Disinhibit when appropriate
        net.disinhibit = nengo.Node(size_in=1)
        nengo.Connection(net.disinhibit, net.inhibit, transform=-1)
    return net
Exemple #9
0
def _test_rates(Simulator, rates, plt, seed, name=None):
    if name is None:
        name = rates.__name__

    n = 100
    intercepts = np.linspace(-0.99, 0.99, n)

    model = nengo.Network(seed=seed)
    with model:
        model.config[nengo.Ensemble].max_rates = Choice([50])
        model.config[nengo.Ensemble].encoders = Choice([[1]])
        u = nengo.Node(output=WhiteNoise(2., 5).f(
            rng=np.random.RandomState(seed=seed)))
        a = nengo.Ensemble(n, 1,
                           intercepts=intercepts, neuron_type=nengo.LIFRate())
        b = nengo.Ensemble(n, 1,
                           intercepts=intercepts, neuron_type=nengo.LIF())
        nengo.Connection(u, a, synapse=0)
        nengo.Connection(u, b, synapse=0)
        up = nengo.Probe(u)
        ap = nengo.Probe(a.neurons)
        bp = nengo.Probe(b.neurons)

    sim = Simulator(model)
    sim.run(2.)

    t = sim.trange()
    x = sim.data[up]
    a_rates = sim.data[ap]
    spikes = sim.data[bp]
    b_rates = rates(t, spikes)

    ax = plt.subplot(411)
    plt.plot(t, x)
    ax = plt.subplot(412)
    implot(plt, t, intercepts, a_rates.T, ax=ax)
    ax.set_ylabel('intercept')
    ax = plt.subplot(413)
    implot(plt, t, intercepts, b_rates.T, ax=ax)
    ax.set_ylabel('intercept')
    ax = plt.subplot(414)
    implot(plt, t, intercepts, (b_rates - a_rates).T, ax=ax)
    ax.set_xlabel('time [s]')
    ax.set_ylabel('intercept')
    plt.saveas = 'utils.test_neurons.test_rates.%s.pdf' % name

    tmask = (t > 0.1) & (t < 1.9)
    relative_rmse = rms(b_rates[tmask] - a_rates[tmask]) / rms(a_rates[tmask])
    return relative_rmse
Exemple #10
0
def _test_rates(Simulator, rates, plt, seed):
    n = 100
    intercepts = np.linspace(-0.99, 0.99, n)

    model = nengo.Network(seed=seed)
    with model:
        model.config[nengo.Ensemble].max_rates = Choice([50])
        model.config[nengo.Ensemble].encoders = Choice([[1]])
        u = nengo.Node(output=WhiteSignal(2, high=5))
        a = nengo.Ensemble(n,
                           1,
                           intercepts=intercepts,
                           neuron_type=nengo.LIFRate())
        b = nengo.Ensemble(n,
                           1,
                           intercepts=intercepts,
                           neuron_type=nengo.LIF())
        nengo.Connection(u, a, synapse=0)
        nengo.Connection(u, b, synapse=0)
        up = nengo.Probe(u)
        ap = nengo.Probe(a.neurons)
        bp = nengo.Probe(b.neurons)

    with Simulator(model, seed=seed + 1) as sim:
        sim.run(2.)

    t = sim.trange()
    x = sim.data[up]
    a_rates = sim.data[ap]
    spikes = sim.data[bp]
    b_rates = rates(t, spikes)

    if plt is not None:
        ax = plt.subplot(411)
        plt.plot(t, x)
        ax = plt.subplot(412)
        implot(plt, t, intercepts, a_rates.T, ax=ax)
        ax.set_ylabel('intercept')
        ax = plt.subplot(413)
        implot(plt, t, intercepts, b_rates.T, ax=ax)
        ax.set_ylabel('intercept')
        ax = plt.subplot(414)
        implot(plt, t, intercepts, (b_rates - a_rates).T, ax=ax)
        ax.set_xlabel('time [s]')
        ax.set_ylabel('intercept')

    tmask = (t > 0.1) & (t < 1.9)
    relative_rmse = rms(b_rates[tmask] - a_rates[tmask]) / rms(a_rates[tmask])
    return relative_rmse
def motor_cortex(command_threshold,
                 n_neurons_per_command=30,
                 ens_config=None,
                 net=None):
    if net is None:
        net = nengo.Network()
    if ens_config is None:
        ens_config = nengo.Config(nengo.Ensemble)
        ens_config[nengo.Ensemble].encoders = Choice([[1]])
        ens_config[nengo.Ensemble].intercepts = Choice([command_threshold])
    with net:
        with ens_config:
            net.press = nengo.Ensemble(n_neurons_per_command, dimensions=1)
            net.release = nengo.Ensemble(n_neurons_per_command, dimensions=1)
    return net
Exemple #12
0
class LoihiSpikingRectifiedLinear(SpikingRectifiedLinear):
    """Simulate spiking rectified linear neurons as done by Loihi.

    On Loihi, the inter-spike interval has to be an integer. This causes
    aliasing in the firing rates such that a wide variety of inputs produce the
    same output firing rate. This class reproduces this effect. It can be used
    in e.g. ``nengo`` or ``nengo_dl`` to reproduce these unique Loihi effects.
    """

    state = {
        "voltage": Choice([0]),
    }

    def __init__(self, amplitude=1, **kwargs):
        super().__init__(amplitude=amplitude, **kwargs)
        install_dl_builders()

    def rates(self, x, gain, bias, dt=0.001):
        return loihi_spikingrectifiedlinear_rates(self, x, gain, bias, dt)

    def step(self, dt, J, output, voltage):
        voltage += J * dt

        spikes_mask = voltage > 1
        output[:] = spikes_mask * (self.amplitude / dt)

        voltage[voltage < 0] = 0
        voltage[spikes_mask] = 0
Exemple #13
0
def test_all_negative_activities(allclose, plt, seed, Simulator, Solver):
    class CheckActivitiesSolver(Solver):
        def __call__(self, A, Y, rng=np.random):
            assert np.all(A < 0)
            return super().__call__(A, Y, rng=rng)

    val = -0.5

    with nengo.Network(seed=seed) as net:
        input = nengo.Node(output=val, label="input")
        ens = nengo.Ensemble(
            30,
            1,
            neuron_type=nengo.Tanh(),
            encoders=Choice([[1]]),
            intercepts=Uniform(0, 0.95),
            eval_points=Uniform(-1, -0.1),
        )
        nengo.Connection(input, ens)
        in_p = nengo.Probe(input, "output")
        ens_p = nengo.Probe(
            ens, "decoded_output", synapse=0.05, solver=CheckActivitiesSolver()
        )

    with Simulator(net) as sim:
        sim.run(0.3)

    t = sim.trange()
    plt.plot(t, sim.data[in_p], label="Input")
    plt.plot(t, sim.data[ens_p], label="Neuron approximation, pstc=0.05")
    plt.xlim(right=t[-1])
    plt.legend(loc=0)

    assert allclose(sim.data[in_p], val, atol=0.1, rtol=0.01)
    assert allclose(sim.data[ens_p][-10:], val, atol=0.1, rtol=0.01)
def Thalamus(dimensions,
             n_neurons_per_ensemble=50,
             mutual_inhib=1,
             threshold=0,
             net=None):
    """Inhibits non-selected actions.

    Converts basal ganglia output into a signal with
    (approximately) 1 for the selected action and 0 elsewhere.
    """

    if net is None:
        net = nengo.Network("Thalamus")

    with net:
        net.actions = EnsembleArray(n_neurons_per_ensemble,
                                    dimensions,
                                    intercepts=Uniform(threshold, 1),
                                    encoders=Choice([[1.0]]),
                                    label="actions")
        nengo.Connection(net.actions.output,
                         net.actions.input,
                         transform=(np.eye(dimensions) - 1) * mutual_inhib)
        net.bias = nengo.Node([1], label="thalamus bias")
        nengo.Connection(net.bias,
                         net.actions.input,
                         transform=np.ones((dimensions, 1)))

    net.input = net.actions.input
    net.output = net.actions.output
    return net
Exemple #15
0
    def __init__(self,
                 n_neurons,
                 dimensions,
                 radius=1.0,
                 encoders=nengo.Default,
                 **ens_kwargs):
        super(Product, self).__init__(self)

        with self:
            self.config[nengo.Ensemble].update(ens_kwargs)
            self.A = nengo.Node(size_in=dimensions, label="A")
            self.B = nengo.Node(size_in=dimensions, label="B")
            self.dimensions = dimensions

            if encoders is nengo.Default:
                encoders = Choice([[1, 1], [1, -1], [-1, 1], [-1, -1]])

            optimizer = SubvectorRadiusOptimizer(n_neurons,
                                                 2,
                                                 ens_kwargs=ens_kwargs)
            scaled_r = radius * optimizer.find_optimal_radius(dimensions, 1)

            self.product = EnsembleArray(n_neurons,
                                         n_ensembles=dimensions,
                                         ens_dimensions=2,
                                         radius=scaled_r,
                                         encoders=encoders,
                                         **ens_kwargs)

            nengo.Connection(self.A, self.product.input[::2], synapse=None)
            nengo.Connection(self.B, self.product.input[1::2], synapse=None)

            self.output = self.product.add_output('product',
                                                  lambda x: x[0] * x[1])
Exemple #16
0
def make_mem_network(net, n_neurons, dimensions, make_mem_func, make_mem_args,
                     make_diff_func, make_diff_args, mem_synapse=0.1,
                     fdbk_transform=1.0, input_transform=1.0,
                     difference_gain=1.0, gate_gain=3):
    with net:
        net.input = nengo.Node(size_in=dimensions)

        # integrator to store value
        if np.isscalar(fdbk_transform):
            fdbk_matrix = np.eye(dimensions) * fdbk_transform
        else:
            fdbk_matrix = np.matrix(fdbk_transform)

        net.mem = make_mem_func(n_neurons=n_neurons, dimensions=dimensions,
                                label="mem", **make_mem_args)
        if isinstance(net.mem, nengo.Network):
            mem_output = net.mem.output
            mem_input = net.mem.input
        else:
            mem_output = mem_input = net.mem

        nengo.Connection(mem_output, mem_input,
                         synapse=mem_synapse, transform=fdbk_matrix)

        # calculate difference between stored value and input
        net.diff = make_diff_func(n_neurons=n_neurons, dimensions=dimensions,
                                  label="Diff", **make_diff_args)

        if isinstance(net.diff, nengo.Network):
            net.diff_input = net.diff.input
            diff_output = net.diff.output
        else:
            net.diff_input = diff_output = net.diff

        nengo.Connection(net.input, net.diff_input, synapse=None,
                         transform=net.input_transform)
        nengo.Connection(mem_output, net.diff_input, transform=-1)

        # feed difference into integrator
        nengo.Connection(diff_output, mem_input,
                         transform=difference_gain, synapse=mem_synapse)

        # gate difference (if gate==0, update stored value,
        # otherwise retain stored value)
        # Note: A node is used for the input to make reset circuit more
        #       straightforward
        net.gate = nengo.Ensemble(n_neurons, 1, encoders=Choice([[1]]),
                                  intercepts=Exponential(0.15, 0.5, 1),
                                  label='Gate')

        if isinstance(net.diff, nengo.Network):
            for e in net.diff.ensembles:
                nengo.Connection(net.gate, e.neurons,
                                 transform=[[-gate_gain]] * e.n_neurons)
        else:
            nengo.Connection(net.gate, net.diff.neurons,
                             transform=[[-gate_gain]] * e.n_neurons)

        # Make output
        net.output = net.mem.output
Exemple #17
0
def AuditoryPeriphery(freqs,
                      sound_process,
                      auditory_filter,
                      neurons_per_freq=12,
                      fs=50000.,
                      adaptive_neurons=False,
                      net=None):
    if net is None:
        net = nengo.Network(label="Auditory Periphery")

    net.freqs = freqs
    net.sound_process = sound_process
    net.auditory_filter = auditory_filter
    net.fs = fs

    with net:
        # Inner hair cell activity
        net.fb = AuditoryFilterBank(freqs,
                                    sound_process,
                                    filterbank=auditory_filter,
                                    samplerate=fs)
        net.ihc = nengo.Node(output=net.fb, size_out=freqs.size)

        # Cochlear neurons projecting down auditory nerve
        neuron_type = nengo.AdaptiveLIF() if adaptive_neurons else nengo.LIF()
        net.an = nengo.networks.EnsembleArray(neurons_per_freq,
                                              freqs.size,
                                              intercepts=Uniform(-0.1, 0.5),
                                              encoders=Choice([[1]]),
                                              neuron_type=neuron_type)
        nengo.Connection(net.ihc, net.an.input)
    return net
Exemple #18
0
def Product(neuron_per_dimension, input_magnitude):
    model = nengo.Network(label="Product")
    with model:

        model.A = nengo.Node(output=None, size_in=1)
        model.B = nengo.Node(output=None, size_in=1)

        model.combined = nengo.Ensemble(neuron_per_dimension * 2,
                                        dimensions=2,
                                        radius=np.sqrt(input_magnitude**2 +
                                                       input_magnitude**2),
                                        encoders=Choice([[1, 1], [-1, 1],
                                                         [1, -1], [-1, -1]]))

        model.prod = nengo.Ensemble(neuron_per_dimension,
                                    dimensions=1,
                                    radius=input_magnitude * 2)

        nengo.Connection(model.A, model.combined[0], synapse=None)
        nengo.Connection(model.B, model.combined[1], synapse=None)

        def product(x):
            return x[0] * x[1]

        nengo.Connection(model.combined, model.prod, function=product)

    return model
Exemple #19
0
class SpikingRectifiedLinear(RectifiedLinear):
    """A rectified integrate and fire neuron model.

    Each neuron is modeled as a rectified line. That is, the neuron's activity
    scales linearly with current, unless the current is less than zero, at
    which point the neural activity will stay at zero. This is a spiking
    version of the RectifiedLinear neuron model.

    Parameters
    ----------
    amplitude : float
        Scaling factor on the neuron output. Corresponds to the relative
        amplitude of the output spikes of the neuron.
    initial_state : {str: Distribution or array_like}
        Mapping from state variables names to their desired initial value.
        These values will override the defaults set in the class's state attribute.
    """

    state = {"spikes": Choice([0]), "voltage": Uniform(low=0, high=1)}

    def rates(self, x, gain, bias):
        """Use RectifiedLinear to determine rates."""

        J = self.current(x, gain, bias)
        out = np.zeros_like(J)
        RectifiedLinear.step(self, dt=1.0, J=J, rates=out)
        return out

    def step(self, dt, J, spikes, voltage):
        """Implement the integrate and fire nonlinearity."""

        voltage += np.maximum(J, 0) * dt
        n_spikes = np.floor(voltage)
        spikes[:] = (self.amplitude / dt) * n_spikes
        voltage -= n_spikes
Exemple #20
0
 def sample(self, num, d=1, rng=np.random):
     """Samples ``n`` points in ``d`` dimensions."""
     y = self._sample(d, rng)
     r = 1./np.max(np.abs(y), axis=0)
     assert r.shape == (d,)
     rI = r*np.eye(d)
     return Choice(np.vstack([rI, -rI])).sample(num, d=d, rng=rng)
Exemple #21
0
    def __init__(self,
                 dimensions,
                 n_neurons_per_ensemble=50,
                 mutual_inhib=1.0,
                 threshold=0.0,
                 **kwargs):
        if "net" in kwargs:
            raise ObsoleteError("The 'net' argument is no longer supported.")
        kwargs.setdefault("label", "Thalamus")
        super().__init__(**kwargs)

        with self:
            self.actions = EnsembleArray(
                n_neurons_per_ensemble,
                dimensions,
                intercepts=Uniform(threshold, 1),
                encoders=Choice([[1.0]]),
                label="actions",
            )
            nengo.Connection(
                self.actions.output,
                self.actions.input,
                transform=(np.eye(dimensions) - 1) * mutual_inhib,
            )
            self.bias = nengo.Node([1], label="thalamus bias")
            nengo.Connection(self.bias,
                             self.actions.input,
                             transform=np.ones((dimensions, 1)))

        self.input = self.actions.input
        self.output = self.actions.output
Exemple #22
0
    def make_thresh_ens_net(self,
                            threshold=0.5,
                            thresh_func=lambda x: 1,
                            exp_scale=None,
                            num_ens=1,
                            net=None,
                            **args):
        if net is None:
            label_str = args.get('label', 'Threshold_Ens_Net')
            net = nengo.Network(label=label_str)
        if exp_scale is None:
            exp_scale = (1 - threshold) / 10.0

        with net:
            ens_args = dict(args)
            ens_args['n_neurons'] = args.get('n_neurons', self.n_neurons_ens)
            ens_args['dimensions'] = args.get('dimensions', 1)
            ens_args['intercepts'] = \
                Exponential(scale=exp_scale, shift=threshold,
                            high=1)
            ens_args['encoders'] = Choice([[1]])
            ens_args['eval_points'] = Uniform(min(threshold + 0.1, 1.0), 1.1)
            ens_args['n_eval_points'] = 5000

            net.input = nengo.Node(size_in=num_ens)
            net.output = nengo.Node(size_in=num_ens)

            for i in range(num_ens):
                thresh_ens = nengo.Ensemble(**ens_args)
                nengo.Connection(net.input[i], thresh_ens, synapse=None)
                nengo.Connection(thresh_ens,
                                 net.output[i],
                                 function=thresh_func,
                                 synapse=None)
        return net
Exemple #23
0
def Product_2D_ens(n_neurons,
                   dimensions,
                   input_magnitude=1,
                   config=None,
                   net=None):
    """Computes the element-wise product of two equally sized vectors."""
    if net is None:
        net = nengo.Network(label="Product")

    if config is None:
        config = nengo.Config(nengo.Ensemble)
        config[nengo.Ensemble].encoders = Choice([[1, 1], [1, -1], [-1, 1],
                                                  [-1, -1]])

    with net, config:
        net.A = nengo.Node(size_in=dimensions, label="A")
        net.B = nengo.Node(size_in=dimensions, label="B")
        net.output = nengo.Node(size_in=dimensions, label="output")

        net.product = EnsembleArray(n_neurons,
                                    n_ensembles=dimensions,
                                    ens_dimensions=2,
                                    radius=input_magnitude * np.sqrt(2))
        nengo.Connection(net.A, net.product.input[::2], synapse=None)
        nengo.Connection(net.B, net.product.input[1::2], synapse=None)
        net.output = net.product.add_output('product', lambda x: x[0] * x[1])
    return net
def Product(neuron_per_dimension, input_magnitude):
    # Create the model object
    model = nengo.Network(label='Product')
    with model:
        # Create passthrough nodes to redirect both inputs
        model.A = nengo.Node(output=None, size_in=1)
        model.B = nengo.Node(output=None, size_in=1)

        model.combined = nengo.Ensemble(neuron_per_dimension * 2,
                                        dimensions=2,
                                        radius=np.sqrt(input_magnitude**2 +
                                                       input_magnitude**2),
                                        encoders=Choice([[1, 1], [-1, 1],
                                                         [1, -1], [-1, -1]]))

        model.prod = nengo.Ensemble(neuron_per_dimension,
                                    dimensions=1,
                                    radius=input_magnitude * 2)

        # Connect everything up
        nengo.Connection(model.A, model.combined[0], synapse=None)
        nengo.Connection(model.B, model.combined[1], synapse=None)

        def product(x):
            return x[0] * x[1]

        nengo.Connection(model.combined, model.prod, function=product)
    return model
Exemple #25
0
def test_noise_copies_ok(Simulator, NonDirectNeuronType, seed, plt, allclose):
    """Make sure the same noise process works in multiple ensembles.

    We test this both with the default system and without.
    """

    process = FilteredNoise(synapse=nengo.Alpha(1.0), dist=Choice([[0.5]]))
    with nengo.Network(seed=seed) as model:
        if (
            NonDirectNeuronType.spiking
            or RegularSpiking in NonDirectNeuronType.__bases__
        ):
            neuron_type = NonDirectNeuronType(initial_state={"voltage": Choice([0])})
        else:
            neuron_type = NonDirectNeuronType()
        model.config[nengo.Ensemble].neuron_type = neuron_type
        model.config[nengo.Ensemble].encoders = Choice([[1]])
        model.config[nengo.Ensemble].gain = Choice([5])
        model.config[nengo.Ensemble].bias = Choice([2])
        model.config[nengo.Ensemble].noise = process
        const = nengo.Node(output=1)
        a = nengo.Ensemble(1, 1, noise=process)
        b = nengo.Ensemble(1, 1, noise=process)
        c = nengo.Ensemble(1, 1)  # defaults to noise=process
        nengo.Connection(const, a)
        nengo.Connection(const, b)
        nengo.Connection(const, c)
        ap = nengo.Probe(a.neurons, synapse=0.01)
        bp = nengo.Probe(b.neurons, synapse=0.01)
        cp = nengo.Probe(c.neurons, synapse=0.01)
    with Simulator(model) as sim:
        sim.run(0.06)
    t = sim.trange()

    plt.subplot(2, 1, 1)
    plt.plot(t, sim.data[ap], lw=3)
    plt.plot(t, sim.data[bp], lw=2)
    plt.plot(t, sim.data[cp])
    plt.subplot(2, 1, 2)
    plt.plot(*nengo.utils.ensemble.tuning_curves(a, sim), lw=3)
    plt.plot(*nengo.utils.ensemble.tuning_curves(b, sim), lw=2)
    plt.plot(*nengo.utils.ensemble.tuning_curves(c, sim))

    assert allclose(sim.data[ap], sim.data[bp])
    assert allclose(sim.data[bp], sim.data[cp])
Exemple #26
0
 def __init__(self, base_type, amplitude=1.0, initial_state=None):
     if "voltage" in base_type.state:
         raise ValidationError(
             "Cannot already have a 'voltage' state variable",
             attr="base_type",
             obj=self,
         )
     self.state = {"spikes": Choice([0]), "voltage": Uniform(low=0, high=1)}
     super().__init__(base_type, amplitude=amplitude, initial_state=initial_state)
Exemple #27
0
def test_argreprs():
    def check_init_args(cls, args):
        assert getfullargspec(cls.__init__).args[1:] == args

    def check_repr(obj):
        assert eval(repr(obj)) == obj

    check_init_args(PDF, ["x", "p"])
    check_repr(PDF([1, 2, 3], [0.1, 0.8, 0.1]))

    check_init_args(Uniform, ["low", "high", "integer"])
    check_repr(Uniform(1, 3))
    check_repr(Uniform(1, 4, integer=True))

    check_init_args(Gaussian, ["mean", "std"])
    check_repr(Gaussian(0, 2))

    check_init_args(Exponential, ["scale", "shift", "high"])
    check_repr(Exponential(2.0))
    check_repr(Exponential(2.0, shift=0.1))
    check_repr(Exponential(2.0, shift=0.1, high=10.0))

    check_init_args(UniformHypersphere, ["surface", "min_magnitude"])
    check_repr(UniformHypersphere())
    check_repr(UniformHypersphere(surface=True))
    check_repr(UniformHypersphere(min_magnitude=0.3))

    check_init_args(Choice, ["options", "weights"])
    check_repr(Choice([3, 2, 1]))
    check_repr(Choice([3, 2, 1], weights=[0.1, 0.2, 0.7]))

    check_init_args(Samples, ["samples"])
    check_repr(Samples([3, 2, 1]))

    check_init_args(SqrtBeta, ["n", "m"])
    check_repr(SqrtBeta(3))
    check_repr(SqrtBeta(3, m=2))

    check_init_args(SubvectorLength, ["dimensions", "subdimensions"])
    check_repr(SubvectorLength(6))
    check_repr(SubvectorLength(6, 2))

    check_init_args(CosineSimilarity, ["dimensions"])
    check_repr(CosineSimilarity(6))
Exemple #28
0
def test_argreprs():
    def check_init_args(cls, args):
        assert getfullargspec(cls.__init__).args[1:] == args

    def check_repr(obj):
        assert eval(repr(obj)) == obj

    check_init_args(PDF, ['x', 'p'])
    check_repr(PDF([1, 2, 3], [0.1, 0.8, 0.1]))

    check_init_args(Uniform, ['low', 'high', 'integer'])
    check_repr(Uniform(1, 3))
    check_repr(Uniform(1, 4, integer=True))

    check_init_args(Gaussian, ['mean', 'std'])
    check_repr(Gaussian(0, 2))

    check_init_args(Exponential, ['scale', 'shift', 'high'])
    check_repr(Exponential(2.))
    check_repr(Exponential(2., shift=0.1))
    check_repr(Exponential(2., shift=0.1, high=10.))

    check_init_args(UniformHypersphere, ['surface', 'min_magnitude'])
    check_repr(UniformHypersphere())
    check_repr(UniformHypersphere(surface=True))
    check_repr(UniformHypersphere(min_magnitude=0.3))

    check_init_args(Choice, ['options', 'weights'])
    check_repr(Choice([3, 2, 1]))
    check_repr(Choice([3, 2, 1], weights=[0.1, 0.2, 0.7]))

    check_init_args(Samples, ['samples'])
    check_repr(Samples([3, 2, 1]))

    check_init_args(SqrtBeta, ['n', 'm'])
    check_repr(SqrtBeta(3))
    check_repr(SqrtBeta(3, m=2))

    check_init_args(SubvectorLength, ['dimensions', 'subdimensions'])
    check_repr(SubvectorLength(6))
    check_repr(SubvectorLength(6, 2))

    check_init_args(CosineSimilarity, ['dimensions'])
    check_repr(CosineSimilarity(6))
Exemple #29
0
def test_choice(weights, rng):
    n = 1000
    choices = [[1, 1], [1, -1], [-1, 1], [-1, -1]]
    N = len(choices)

    dist = Choice(choices, weights=weights)
    # If d is passed, it has to match
    with pytest.raises(ValueError):
        dist.sample(n, d=4, rng=rng)
    sample = dist.sample(n, rng=rng)
    tsample, tchoices = list(map(tuple, sample)), list(map(tuple, choices))

    # check that frequency of choices matches weights
    inds = [tchoices.index(s) for s in tsample]
    hist, bins = np.histogram(inds, bins=np.linspace(-0.5, N - 0.5, N + 1))
    p_empirical = hist / float(hist.sum())
    p = np.ones(N) / N if dist.p is None else dist.p
    sterr = 1. / np.sqrt(n)  # expected maximum standard error
    assert np.allclose(p, p_empirical, atol=2 * sterr)
Exemple #30
0
 def thresh_ens_config(self):
     cfg = nengo.Config(nengo.Ensemble)
     cfg[nengo.Ensemble].update({
         'radius': 1,
         'intercepts': Uniform(0.5, 1.0),
         'encoders': Choice([[1]]),
         'eval_points': Uniform(0.75, 1.1),
         'n_eval_points': self.n_eval_points,
     })
     return cfg