Exemple #1
0
def test_relu_response_curves(Simulator, plt, allclose):
    n = 256
    encoders = np.ones((n, 1))
    gain = np.zeros(n)
    bias = np.linspace(0, 50, n)

    with nengo.Network() as model:
        a = nengo.Ensemble(n,
                           1,
                           neuron_type=nengo.SpikingRectifiedLinear(),
                           encoders=encoders,
                           gain=gain,
                           bias=bias)
        ap = nengo.Probe(a.neurons)

    dt = 0.001
    t_final = 1.0
    with Simulator(model, dt=dt) as sim:
        sim.run(t_final)

    scount = np.sum(sim.data[ap] > 0, axis=0)
    actual = nengo.SpikingRectifiedLinear().rates(0., gain, bias)
    plt.plot(bias, actual, "b", label="Ideal")
    plt.plot(bias, scount, "g", label="Loihi")
    plt.xlabel("Bias current")
    plt.ylabel("Firing rate (Hz)")
    plt.legend(loc="best")

    assert allclose(actual, scount, atol=5)
def test_regular_spiking(Simulator, inference_only, seed):
    with nengo.Network() as net:
        config.configure_settings(inference_only=inference_only)

        inp = nengo.Node([1])
        ens0 = nengo.Ensemble(
            100,
            1,
            neuron_type=nengo.SpikingRectifiedLinear(amplitude=2),
            seed=seed)
        ens1 = nengo.Ensemble(
            100,
            1,
            neuron_type=nengo.RegularSpiking(nengo.RectifiedLinear(),
                                             amplitude=2),
            seed=seed,
        )

        nengo.Connection(inp, ens0)
        nengo.Connection(inp, ens1)

        p0 = nengo.Probe(ens0.neurons)
        p1 = nengo.Probe(ens1.neurons)

    with pytest.warns(None) as recwarns:
        with Simulator(net) as sim:
            sim.run_steps(50)

    assert np.allclose(sim.data[p0], sim.data[p1])
    # check that it is actually using the tensorflow implementation
    assert not any("native TensorFlow implementation" in str(w.message)
                   for w in recwarns)
Exemple #3
0
def test_neuron_to_neuron(Simulator, factor, seed, allclose, plt):
    # note: we use these weird factor values so that voltages don't line up
    # exactly with the firing threshold.  since loihi neurons fire when
    # voltage > threshold (rather than >=), if the voltages line up
    # exactly then we need an extra spike each time to push `b` over threshold
    dt = 5e-4

    with nengo.Network(seed=seed) as net:
        n = 10
        stim = nengo.Node(lambda t: [np.sin(t * 2 * np.pi)])
        a = nengo.Ensemble(n, 1)
        nengo.Connection(stim, a)

        b = nengo.Ensemble(n, 1, neuron_type=nengo.SpikingRectifiedLinear(),
                           gain=np.ones(n), bias=np.zeros(n))
        nengo.Connection(a.neurons, b.neurons, synapse=None,
                         transform=np.eye(n) * factor)

        p_a = nengo.Probe(a.neurons)
        p_b = nengo.Probe(b.neurons)

    with Simulator(net, dt=dt) as sim:
        sim.run(1.0)

    y_ref = np.floor(np.sum(sim.data[p_a] > 0, axis=0) * factor)
    y_sim = np.sum(sim.data[p_b] > 0, axis=0)
    plt.plot(y_ref, c='k')
    plt.plot(y_sim)

    assert allclose(y_sim, y_ref, atol=1)
Exemple #4
0
def test_conv_onchip(Simulator, plt):
    """Tests a fully on-chip conv connection. """
    from nengo._vendor.npconv2d.conv2d import conv2d

    kernel = np.array([[-1, 2, -1], [-1, 2, -1], [-1, 2, -1]], dtype=float)
    kernel /= kernel.max()

    image = np.array([[1, 2, 1, 2, 0], [2, 3, 2, 1, 1], [1, 2, 1, 2, 3],
                      [2, 3, 2, 1, 1], [1, 2, 1, 2, 0]],
                     dtype=float)
    image /= image.max()

    input_scale = 119.
    bias = input_scale * image.ravel()

    neuron_type = nengo.SpikingRectifiedLinear()

    y_ref = LoihiSpikingRectifiedLinear().rates(image.ravel(), input_scale, 0)
    y_ref = conv2d(y_ref.reshape(1, 5, 5, 1),
                   kernel.reshape(3, 3, 1, 1),
                   pad='VALID')
    y_ref = LoihiSpikingRectifiedLinear().rates(y_ref.ravel(), 1.,
                                                0.).reshape(3, 3)

    with nengo.Network() as net:
        a = nengo.Ensemble(bias.size,
                           1,
                           neuron_type=neuron_type,
                           gain=nengo.dists.Choice([0]),
                           bias=bias)

        transform = nengo_transforms.Convolution(n_filters=1,
                                                 input_shape=(5, 5, 1),
                                                 init=kernel.reshape(
                                                     3, 3, 1, 1))

        b = nengo.Ensemble(transform.output_shape.size,
                           1,
                           neuron_type=neuron_type,
                           gain=nengo.dists.Choice([1]),
                           bias=nengo.dists.Choice([0]))

        nengo.Connection(a.neurons, b.neurons, transform=transform)
        bp = nengo.Probe(b.neurons, synapse=nengo.Alpha(0.02))

    with Simulator(net) as sim:
        sim.run(0.3)

    y_ref = y_ref / input_scale
    y = sim.data[bp][-1].reshape(3, -1) / input_scale

    plt.subplot(121)
    plt.imshow(y_ref)
    plt.colorbar()
    plt.subplot(122)
    plt.imshow(y)
    plt.colorbar()

    assert np.allclose(y, y_ref, atol=0.02, rtol=0.1)
Exemple #5
0
def test_amplitude(Simulator, amplitude, neuron_type, seed, plt, allclose):
    with nengo.Network(seed=seed) as net:
        a = nengo.Node([0.5])
        n = 100
        ens = nengo.Ensemble(n,
                             1,
                             neuron_type=neuron_type(amplitude=amplitude))
        ens2 = nengo.Ensemble(
            n,
            1,
            gain=np.ones(n),
            bias=np.zeros(n),
            neuron_type=nengo.SpikingRectifiedLinear(),
        )
        nengo.Connection(a, ens)

        # note: slight boost on transform so that the post neurons are pushed
        # over threshold, rather than ==threshold
        nengo.Connection(ens.neurons,
                         ens2.neurons,
                         synapse=None,
                         transform=np.eye(n) * 1.02)

        node = nengo.Node(size_in=n)
        nengo.Connection(ens.neurons, node, synapse=None)

        ens_p = nengo.Probe(ens, synapse=0.1)
        neuron_p = nengo.Probe(ens.neurons)
        indirect_p = nengo.Probe(node)
        neuron2_p = nengo.Probe(ens2.neurons)

    with Simulator(net, precompute=True) as sim:
        sim.run(1)

    spikemean1 = np.mean(sim.data[neuron_p], axis=0)
    spikemean2 = np.mean(sim.data[neuron2_p], axis=0)

    plt.subplot(211)
    plt.plot(sim.trange(), sim.data[ens_p])
    plt.subplot(212)
    i = np.argsort(spikemean1)
    plt.plot(spikemean1[i])
    plt.plot(spikemean2[i], linestyle="--")

    assert allclose(sim.data[ens_p][sim.trange() > 0.9], 0.5, atol=0.05)
    assert np.max(sim.data[neuron_p]) == amplitude / sim.dt

    # the identity neuron-to-neuron connection causes `ens2` to fire at
    # `amplitude` * the firing rate of `ens` (i.e., the same overall firing
    # rate as `ens`)
    assert allclose(spikemean1, spikemean2, atol=1)

    # note: one-timestep delay, despite synapse=None
    assert allclose(sim.data[neuron_p][:-1], sim.data[indirect_p][1:])
Exemple #6
0
def test_loihi_rates(dt, neuron_type, Simulator, plt, allclose):
    n = 256
    x = np.linspace(-0.1, 1, n)

    encoders = np.ones((n, 1))
    max_rates = 400 * np.ones(n)
    intercepts = 0 * np.ones(n)
    gain, bias = neuron_type.gain_bias(max_rates, intercepts)
    j = x * gain + bias

    with nengo.Network() as model:
        a = nengo.Ensemble(n,
                           1,
                           neuron_type=neuron_type,
                           encoders=encoders,
                           gain=gain,
                           bias=j)
        ap = nengo.Probe(a.neurons)

    with Simulator(model, dt=dt) as sim:
        sim.run(1.0)

    est_rates = sim.data[ap].mean(axis=0)
    ref_rates = loihi_rates(neuron_type, x[np.newaxis, :], gain, bias,
                            dt=dt).squeeze(axis=0)

    ref_rates2 = None
    if isinstance(neuron_type, nengo.RegularSpiking):
        if isinstance(neuron_type.base_type, nengo.LIFRate):
            neuron_type2 = nengo.LIF(
                tau_rc=neuron_type.base_type.tau_rc,
                tau_ref=neuron_type.base_type.tau_ref,
                amplitude=neuron_type.amplitude,
            )
        elif isinstance(neuron_type.base_type, nengo.RectifiedLinear):
            neuron_type2 = nengo.SpikingRectifiedLinear(
                amplitude=neuron_type.amplitude, )

        ref_rates2 = loihi_rates(neuron_type2,
                                 x[np.newaxis, :],
                                 gain,
                                 bias,
                                 dt=dt).squeeze(axis=0)

    plt.plot(x, ref_rates, "k", label="predicted")
    if ref_rates2 is not None:
        plt.plot(x, ref_rates2, "b", label="predicted-base")
    plt.plot(x, est_rates, "g", label="measured")
    plt.legend(loc="best")

    assert ref_rates.shape == est_rates.shape
    assert allclose(est_rates, ref_rates, atol=1, rtol=0, xtol=1)
    if ref_rates2 is not None:
        assert allclose(ref_rates2, ref_rates)
Exemple #7
0
def test_conv_input(channels_last, Simulator, plt, allclose):
    input_shape = ImageShape(4, 4, 1, channels_last=channels_last)
    seed = 3  # fix seed to do the same computation for both channel positions
    rng = np.random.RandomState(seed + 1)

    with nengo.Network(seed=seed) as net:
        nengo_loihi.add_params(net)

        a = nengo.Node(rng.uniform(0, 1, size=input_shape.size))

        nc = 2
        kernel = np.array([1., -1.]).reshape((1, 1, 1, nc))
        transform = nengo_loihi.Conv2D.from_kernel(kernel, input_shape)
        b = nengo.Ensemble(transform.output_shape.size,
                           1,
                           neuron_type=nengo.SpikingRectifiedLinear(),
                           max_rates=nengo.dists.Choice([50]),
                           intercepts=nengo.dists.Choice([0]))
        net.config[b].on_chip = False
        nengo.Connection(a, b.neurons, transform=transform)
        output_shape = transform.output_shape

        nf = 4
        kernel = rng.uniform(-0.005, 0.005, size=(nc, 3, 3, nf))
        transform = nengo_loihi.Conv2D.from_kernel(kernel, output_shape)
        c = nengo.Ensemble(transform.output_shape.size,
                           1,
                           neuron_type=nengo.LIF(),
                           max_rates=nengo.dists.Choice([100]),
                           intercepts=nengo.dists.Choice([0]))
        nengo.Connection(b.neurons, c.neurons, transform=transform)
        output_shape = transform.output_shape

        p = nengo.Probe(c.neurons)

    with nengo.Simulator(net, optimize=False) as sim:
        sim.run(1.0)

    with Simulator(net, seed=seed) as sim_loihi:
        sim_loihi.run(1.0)

    p0 = np.sum(sim.data[p] > 0, axis=0).reshape(output_shape.shape())
    p1 = np.sum(sim_loihi.data[p] > 0, axis=0).reshape(output_shape.shape())
    if not output_shape.channels_last:
        p0 = np.transpose(p0, (1, 2, 0))
        p1 = np.transpose(p1, (1, 2, 0))

    plt.plot(p0.ravel(), 'k')
    plt.plot(p1.ravel(), 'b--')

    # loihi spikes are not exactly the same, but should be close-ish
    assert allclose(p0, p1, rtol=0.15, atol=1)
Exemple #8
0
def run_snn(model,
            x_test,
            y_test,
            params_load_path,
            iteration,
            timesteps=50,
            scale_firing_rates=1000,
            synapse=0.01,
            batch_size=16):
    """
    Run model in spiking setting
    :param batch_size: batch size
    :param model: model reference
    :param x_test: testing features
    :param y_test: testing labels
    :param params_load_path: path to load parameters
    :param iteration: number of current iteration
    :param timesteps: number of timesteps
    :param scale_firing_rates: firing rate scaling
    :param synapse: synaptic smoothing
    :return: accuracy, precision, recall, f1 and confusion matrix from the testing data
    """
    converter = nengo_dl.Converter(
        model,
        swap_activations={tf.nn.relu: nengo.SpikingRectifiedLinear()},
        scale_firing_rates=scale_firing_rates,
        synapse=synapse
    )  # create a Nengo converter object and swap all relu activations with spiking relu

    with converter.net:
        nengo_dl.configure_settings(stateful=False)

    output_layer = converter.outputs[model.get_layer(
        'output_layer')]  # output layer for simulator

    x_test_tiled = np.tile(x_test,
                           (1, timesteps, 1))  # tile test data to timesteps

    with nengo_dl.Simulator(converter.net) as simulator:
        simulator.load_params(params_load_path)

        # Get the statistics
        accuracy, precision, recall, f1, confusion_matrix = get_metrics(
            simulator, output_layer, x_test_tiled, y_test, batch_size,
            f'{iteration}. CNN (SNN conversion)')
        return {
            'accuracy': accuracy,
            'precision': precision,
            'recall': recall,
            'f1': f1,
            'confusion_matrix': confusion_matrix
        }
Exemple #9
0
def test_drop_trace_spikes(Simulator, seed):
    with nengo.Network(seed=seed) as net:
        a = nengo.Ensemble(10, 1, gain=nengo.dists.Choice([1]),
                           bias=nengo.dists.Choice([2000]),
                           neuron_type=nengo.SpikingRectifiedLinear())
        b = nengo.Node(size_in=1)

        conn = nengo.Connection(a, b, learning_rule_type=nengo.PES(1))

        nengo.Connection(b, conn.learning_rule)

    with Simulator(net, target="sim") as sim:
        with pytest.raises(SimulationError):
            sim.run(1.0)
def test_swap_activations_key_never_used():
    """
    Ensure warnings are thrown properly when there is an unused swap activations key.
    """

    def relu(x):
        return tf.maximum(x, 0)

    def relu2(x):
        return tf.maximum(x, 0)

    inp = tf.keras.Input((1,))
    out = tf.keras.layers.Dense(units=10, activation=relu)(inp)

    # Test that swap_activations are throwing warnings when not used
    with pytest.warns(UserWarning, match="no layers in the model with that activation"):
        conv = converter.Converter(
            tf.keras.Model(inp, out),
            allow_fallback=False,
            swap_activations={
                relu: nengo.RectifiedLinear(),
                relu2: nengo.RectifiedLinear(),
            },
        )
    assert conv.swap_activations.unused_keys() == {relu2}

    # Test that there is no warning if all keys are used
    inp = tf.keras.Input((1,))
    out = tf.keras.layers.Dense(units=10, activation=relu)(inp)
    out = tf.keras.layers.Dense(units=10, activation=relu2)(out)
    with pytest.warns(None) as recwarns:
        conv = converter.Converter(
            tf.keras.Model(inp, out),
            allow_fallback=False,
            swap_activations={
                relu: nengo.RectifiedLinear(),
                relu2: nengo.RectifiedLinear(),
                nengo.RectifiedLinear(): nengo.SpikingRectifiedLinear(),
            },
        )
    assert not any(
        "no layers in the model with that activation" in w.message for w in recwarns
    )
    assert len(conv.swap_activations.unused_keys()) == 0

    # check swap_activations dict functions
    assert len(conv.swap_activations) == 3
    assert set(conv.swap_activations.keys()) == {relu, relu2, nengo.RectifiedLinear()}
def test_scale_firing_rates_cases(Simulator, scale_firing_rates, expected_rates):
    input_val = 100
    bias_val = 50
    n_steps = 100

    inp = tf.keras.Input(shape=(1,))
    x0 = tf.keras.layers.ReLU()(inp)
    x1 = tf.keras.layers.Dense(
        units=1,
        activation=tf.nn.relu,
        kernel_initializer=tf.initializers.constant([[1]]),
        bias_initializer=tf.initializers.constant([[bias_val]]),
    )(inp)
    model = tf.keras.Model(inp, [x0, x1])

    # convert indices to layers
    scale_firing_rates = (
        {model.layers[k]: v for k, v in scale_firing_rates.items()}
        if isinstance(scale_firing_rates, dict)
        else scale_firing_rates
    )

    conv = converter.Converter(
        model,
        swap_activations={tf.nn.relu: nengo.SpikingRectifiedLinear()},
        scale_firing_rates=scale_firing_rates,
    )

    with Simulator(conv.net) as sim:
        sim.run_steps(
            n_steps, data={conv.inputs[inp]: np.ones((1, n_steps, 1)) * input_val}
        )

        for i, p in enumerate(conv.net.probes):
            # spike heights are scaled down
            assert np.allclose(np.max(sim.data[p]), 1 / sim.dt / expected_rates[i])

            # number of spikes is scaled up
            assert np.allclose(
                np.count_nonzero(sim.data[p]),
                (input_val if i == 0 else input_val + bias_val)
                * expected_rates[i]
                * n_steps
                * sim.dt,
                atol=1,
            )
Exemple #12
0
def test_learning_phase(Simulator):
    with nengo.Network() as net:
        inp = nengo.Node([0])
        ens = nengo.Ensemble(1,
                             1,
                             gain=[0],
                             bias=[1],
                             neuron_type=nengo.SpikingRectifiedLinear())
        nengo.Connection(inp, ens, synapse=None)
        p = nengo.Probe(ens.neurons)

    with tf.keras.backend.learning_phase_scope(1):
        with Simulator(net) as sim:
            sim.run_steps(10)
            assert np.allclose(sim.data[p], 1)

    with Simulator(net) as sim:
        sim.run_steps(10)
        assert np.allclose(sim.data[p], 0)
    def get_ensemble(self, dim):
        if self.pairs_per_dim != 1:
            # To support this, we need to figure out how to deal with the
            # `post_inds` that map neurons to axons. Either we can do this
            # on the host, in which case we'd have inputs going to the chip
            # where we can have multiple spikes per axon per timestep, or we
            # need to do it on the chip with one input axon per neuron.
            raise NotImplementedError(
                "Input neurons with more than one neuron per dimension")

        n_neurons = 2 * dim * self.pairs_per_dim
        encoders = np.vstack([np.eye(dim), -np.eye(dim)] * self.pairs_per_dim)
        ens = nengo.Ensemble(n_neurons,
                             dim,
                             neuron_type=nengo.SpikingRectifiedLinear(),
                             encoders=encoders,
                             gain=self.gain.repeat(dim),
                             bias=self.bias.repeat(dim),
                             add_to_container=False)
        return ens
def test_neuron_to_neuron(Simulator, factor, do_pre_slice,
                          seed, allclose, plt):
    # note: we use these weird factor values so that voltages don't line up
    # exactly with the firing threshold.  since loihi neurons fire when
    # voltage > threshold (rather than >=), if the voltages line up
    # exactly then we need an extra spike each time to push `b` over threshold
    dt = 5e-4
    simtime = 0.2

    na = 500  # test big to ensure full weight matrices are not being used

    if do_pre_slice:
        nb = int(np.ceil(na / 2.))
        pre_slice = slice(None, None, 2)
    else:
        nb = na
        pre_slice = slice(None)

    with nengo.Network(seed=seed) as net:

        stim = nengo.Node(lambda t: [np.sin(t * 2 * np.pi / simtime)])
        a = nengo.Ensemble(na, 1)
        nengo.Connection(stim, a)

        b = nengo.Ensemble(nb, 1, neuron_type=nengo.SpikingRectifiedLinear(),
                           gain=np.ones(nb), bias=np.zeros(nb))
        nengo.Connection(a.neurons[pre_slice], b.neurons, synapse=None,
                         transform=factor)

        p_a = nengo.Probe(a.neurons)
        p_b = nengo.Probe(b.neurons)

    with Simulator(net, dt=dt) as sim:
        sim.run(simtime)

    y_ref = np.floor(np.sum(sim.data[p_a][:, pre_slice] > 0, axis=0) * factor)
    y_sim = np.sum(sim.data[p_b] > 0, axis=0)
    plt.plot(y_ref, c='k')
    plt.plot(y_sim)

    assert allclose(y_sim, y_ref, atol=1)
Exemple #15
0
def build_SNN_simple(image_size, config):
    with nengo.Network(seed=config["seed"]) as net:
        # remove some unnecessary features to speed up the training
        nengo_dl.configure_settings(stateful=False)
        n_ensembles = 10000
        # input node

        inp = nengo.Node(np.zeros(image_size[-1]))  #
        u = nengo.networks.EnsembleArray(
            n_neurons=50,
            n_ensembles=n_ensembles,
            neuron_type=nengo.SpikingRectifiedLinear(),
        )
        nengo.Connection(inp, u.input, transform=np.zeros((n_ensembles, 512)))
        out = nengo.Node(size_in=101)
        nengo.Connection(u.output,
                         out,
                         transform=nengo_dl.dists.Glorot(),
                         synapse=None)
        p = nengo.Probe(out)
    return net
Exemple #16
0
def test_run_profile(network, train, pytestconfig, monkeypatch, tmpdir):
    monkeypatch.chdir(tmpdir)

    if network == "integrator":
        net = benchmarks.integrator(3, 2, nengo.SpikingRectifiedLinear())
    elif network == "cconv":
        net = benchmarks.cconv(3, 10, nengo.LIF())
    elif network == "test":
        with nengo.Network() as net:
            ens = nengo.Ensemble(10, 1)
            net.p = nengo.Probe(ens)

    benchmarks.run_profile(
        net,
        train=train,
        n_steps=10,
        do_profile=True,
        device=pytestconfig.getoption("--device"),
        unroll_simulation=pytestconfig.getoption("--unroll-simulation"),
        dtype=pytestconfig.getoption("dtype"),
    )

    assert net.config[net].inference_only == (not train)
def test_neuron_to_neuron(Simulator, factor, do_pre_slice, sparse, seed,
                          allclose, plt):
    # note: we use these weird factor values so that voltages don't line up
    # exactly with the firing threshold.  since loihi neurons fire when
    # voltage > threshold (rather than >=), if the voltages line up
    # exactly then we need an extra spike each time to push `b` over threshold
    dt = 5e-4
    simtime = 0.2

    na = 500  # test big to ensure full weight matrices are not being used

    if do_pre_slice:
        nb = int(np.ceil(na / 2.0))
        pre_slice = slice(None, None, 2)
    else:
        nb = na
        pre_slice = slice(None)

    if sparse != "dense":
        if nengo_transforms is None:
            pytest.skip("Sparse matrices require nengo transforms")

        shape = (nb, nb)
        data = factor * np.ones(nb)
        rowi = coli = np.arange(nb)
        if sparse == "nengo":
            transform = nengo_transforms.Sparse(shape,
                                                indices=np.array(
                                                    (rowi, coli)).T,
                                                init=data)
        elif sparse == "scipy":
            transform = nengo_transforms.Sparse(shape,
                                                init=scipy.sparse.coo_matrix(
                                                    (data, (rowi, coli)),
                                                    shape=shape))
    else:
        transform = factor

    with nengo.Network(seed=seed) as net:

        stim = nengo.Node(lambda t: [np.sin(t * 2 * np.pi / simtime)])
        a = nengo.Ensemble(na, 1)
        nengo.Connection(stim, a)

        b = nengo.Ensemble(
            nb,
            1,
            neuron_type=nengo.SpikingRectifiedLinear(),
            gain=np.ones(nb),
            bias=np.zeros(nb),
        )
        nengo.Connection(a.neurons[pre_slice],
                         b.neurons,
                         synapse=None,
                         transform=transform)

        p_a = nengo.Probe(a.neurons)
        p_b = nengo.Probe(b.neurons)

    with Simulator(net, dt=dt) as sim:
        sim.run(simtime)

    y_ref = np.floor(np.sum(sim.data[p_a][:, pre_slice] > 0, axis=0) * factor)
    y_sim = np.sum(sim.data[p_b] > 0, axis=0)
    plt.plot(y_ref, c="k")
    plt.plot(y_sim)

    assert allclose(y_sim, y_ref, atol=1)
Exemple #18
0
    LoihiSpikingRectifiedLinear,
    install_dl_builders,
    loihi_rates,
    nengo_rates,
)

v0_arg = dict(initial_state={"voltage": nengo.dists.Choice([0])})


@pytest.mark.parametrize("dt", [3e-4, 1e-3])
@pytest.mark.parametrize(
    "neuron_type",
    [
        nengo.LIF(**v0_arg),
        nengo.LIF(tau_ref=0.001, tau_rc=0.07, amplitude=0.34, **v0_arg),
        nengo.SpikingRectifiedLinear(**v0_arg),
        nengo.SpikingRectifiedLinear(amplitude=0.23, **v0_arg),
        nengo.RegularSpiking(nengo.LIFRate(), **v0_arg),
        nengo.RegularSpiking(nengo.LIFRate(tau_ref=0.001, tau_rc=0.03),
                             amplitude=0.31,
                             **v0_arg),
        nengo.RegularSpiking(nengo.RectifiedLinear(), **v0_arg),
        nengo.RegularSpiking(nengo.RectifiedLinear(), amplitude=0.46, **
                             v0_arg),
    ],
)
def test_loihi_rates(dt, neuron_type, Simulator, plt, allclose):
    n = 256
    x = np.linspace(-0.1, 1, n)

    encoders = np.ones((n, 1))
    model.add(tf.keras.layers.Dense(32))

    conv = converter.Converter(model, allow_fallback=False)
    assert conv.verify(training=False)
    assert conv.verify(training=True)


@pytest.mark.parametrize(
    "keras_activation, nengo_activation, swap",
    [
        (tf.nn.sigmoid, nengo.RectifiedLinear(), {
            tf.nn.sigmoid: tf.nn.relu
        }),
        (
            tf.nn.relu,
            nengo.SpikingRectifiedLinear(),
            {
                tf.nn.relu: nengo.SpikingRectifiedLinear()
            },
        ),
        (tf.nn.relu, nengo.LIF(), {
            nengo.RectifiedLinear(): nengo.LIF()
        }),
    ],
)
def test_activation_swap(Simulator, keras_activation, nengo_activation, swap,
                         rng):
    inp = x = tf.keras.Input(shape=(100, ))
    x = tf.keras.layers.Activation(activation=keras_activation)(x)
    x = tf.keras.layers.Dense(
        units=100,
Exemple #20
0
def test_conv_preslice(Simulator, plt):
    from nengo._vendor.npconv2d.conv2d import conv2d

    kernel = np.array([[-1, 2, -1], [-1, 2, -1], [-1, 2, -1]], dtype=float)
    kernel /= kernel.max()

    image = np.array([[1, 2, 1, 2, 0], [2, 3, 2, 1, 1], [1, 2, 1, 2, 3],
                      [2, 3, 2, 1, 1], [1, 2, 1, 2, 0]],
                     dtype=float)
    image /= image.max()

    image2 = np.column_stack([c * x for c in image.T for x in (1, -1)])

    input_gain = 149.

    neuron_type = nengo.SpikingRectifiedLinear()

    y_ref = LoihiSpikingRectifiedLinear().rates(image.ravel(), input_gain, 0)
    y_ref = conv2d(y_ref.reshape(1, 5, 5, 1),
                   kernel.reshape(3, 3, 1, 1),
                   pad='VALID')
    y_ref = LoihiSpikingRectifiedLinear().rates(y_ref.ravel(), 1.,
                                                0.).reshape(3, 3)

    with nengo.Network() as net:
        u = nengo.Node(image2.ravel())
        a = nengo.Ensemble(50,
                           1,
                           neuron_type=neuron_type,
                           gain=nengo.dists.Choice([input_gain]),
                           bias=nengo.dists.Choice([0]))

        transform = nengo_transforms.Convolution(n_filters=1,
                                                 input_shape=(5, 5, 1),
                                                 init=kernel.reshape(
                                                     3, 3, 1, 1))

        b = nengo.Ensemble(transform.output_shape.size,
                           1,
                           neuron_type=neuron_type,
                           gain=nengo.dists.Choice([1]),
                           bias=nengo.dists.Choice([0]))

        nengo.Connection(u, a.neurons, synapse=None)
        nengo.Connection(a.neurons[::2], b.neurons, transform=transform)
        bp = nengo.Probe(b.neurons, synapse=nengo.Alpha(0.02))

    hw_opts = dict(snip_max_spikes_per_step=100)
    with Simulator(net, hardware_options=hw_opts) as sim:
        sim.run(0.3)

    y_ref = y_ref / input_gain
    y = sim.data[bp][-1].reshape(3, -1) / input_gain

    plt.subplot(121)
    plt.imshow(y_ref)
    plt.colorbar()
    plt.subplot(122)
    plt.imshow(y)
    plt.colorbar()

    assert np.allclose(y, y_ref, atol=0.02, rtol=0.1)
Exemple #21
0
def test_conv_overlap_input(Simulator, plt):
    """Tests a fully on-chip conv connection. """
    conv2d = pytest.importorskip("nengo._vendor.npconv2d.conv2d")

    kernel = np.array([[-1, 2, -1], [-1, 2, -1], [-1, 2, -1]], dtype=float)
    kernel /= kernel.max()

    image = np.array(
        [
            [1, 2, 1, 2, 0],
            [2, 3, 2, 1, 1],
            [1, 2, 1, 2, 3],
            [2, 3, 2, 1, 1],
            [1, 2, 1, 2, 0],
        ],
        dtype=float,
    )
    image /= image.max()

    input_scale = 119.0
    bias = input_scale * image.ravel()

    neuron_type = nengo.SpikingRectifiedLinear()

    y_ref = LoihiSpikingRectifiedLinear().rates(image.ravel(), input_scale, 0)
    y_ref = conv2d.conv2d(y_ref.reshape((1, 5, 5, 1)),
                          kernel.reshape((3, 3, 1, 1)),
                          pad="VALID")
    y_ref = LoihiSpikingRectifiedLinear().rates(y_ref.ravel(), 1.0,
                                                0.0).reshape((3, 3))

    with nengo.Network() as net:
        a = nengo.Ensemble(
            bias.size,
            1,
            neuron_type=neuron_type,
            gain=nengo.dists.Choice([0]),
            bias=bias,
        )

        transform = nengo_transforms.Convolution(n_filters=1,
                                                 input_shape=(4, 5, 1),
                                                 init=kernel.reshape(
                                                     (3, 3, 1, 1)))

        b0 = nengo.Ensemble(
            transform.output_shape.size,
            1,
            neuron_type=neuron_type,
            gain=nengo.dists.Choice([1]),
            bias=nengo.dists.Choice([0]),
        )
        b1 = nengo.Ensemble(
            transform.output_shape.size,
            1,
            neuron_type=neuron_type,
            gain=nengo.dists.Choice([1]),
            bias=nengo.dists.Choice([0]),
        )

        nengo.Connection(a.neurons[:20], b0.neurons, transform=transform)
        nengo.Connection(a.neurons[5:], b1.neurons, transform=transform)
        b0p = nengo.Probe(b0.neurons, synapse=nengo.Alpha(0.02))
        b1p = nengo.Probe(b1.neurons, synapse=nengo.Alpha(0.02))

    with Simulator(net) as sim:
        sim.run(0.3)

    y_ref = y_ref / input_scale
    y0 = sim.data[b0p][-1].reshape((2, -1)) / input_scale
    y1 = sim.data[b1p][-1].reshape((2, -1)) / input_scale

    plt.subplot(131)
    plt.imshow(y_ref)
    plt.colorbar()
    plt.subplot(132)
    plt.imshow(b0)
    plt.colorbar()
    plt.subplot(133)
    plt.imshow(b1)
    plt.colorbar()

    assert np.allclose(y0, y_ref[:2], atol=0.02, rtol=0.1)
    assert np.allclose(y1, y_ref[1:], atol=0.02, rtol=0.1)
Exemple #22
0
def test_conv2d_weights(channels_last, hw_opts, request, plt, seed, rng,
                        allclose):
    def loihi_rates_n(neuron_type, x, gain, bias, dt):
        """Compute Loihi rates on higher dimensional inputs"""
        y = x.reshape(-1, x.shape[-1])
        gain = np.asarray(gain)
        bias = np.asarray(bias)
        if gain.ndim == 0:
            gain = gain * np.ones(x.shape[-1])
        if bias.ndim == 0:
            bias = bias * np.ones(x.shape[-1])
        rates = loihi_rates(neuron_type, y, gain, bias, dt)
        return rates.reshape(*x.shape)

    if channels_last:
        plt.saveas = None
        pytest.xfail("Blocked by CxBase cannot be > 256 bug")

    target = request.config.getoption("--target")
    if target != 'loihi' and len(hw_opts) > 0:
        pytest.skip("Hardware options only available on hardware")

    pop_type = 32

    # load data
    with open(os.path.join(test_dir, 'mnist10.pkl'), 'rb') as f:
        test10 = pickle.load(f)

    test_x = test10[0][0].reshape(28, 28)
    test_x = test_x[3:24, 3:24]
    test_x = 1.999 * test_x - 0.999

    filters = Gabor(freq=Uniform(0.5, 1)).generate(8, (7, 7), rng=rng)
    sti, stj = 2, 2
    tau_rc = 0.02
    tau_ref = 0.002
    tau_s = 0.005
    dt = 0.001

    encode_type = nengo.SpikingRectifiedLinear()
    encode_gain = 1. / dt
    encode_bias = 0.
    neuron_type = nengo.LIF(tau_rc=tau_rc, tau_ref=tau_ref)
    neuron_gain = 1.
    neuron_bias = 1.

    pres_time = 0.2

    # --- compute ideal outputs
    def conv_pm(x, kernel):
        y0 = scipy.signal.correlate2d(x[0], kernel, mode='valid')[::sti, ::stj]
        y1 = scipy.signal.correlate2d(x[1], kernel, mode='valid')[::sti, ::stj]
        return [y0, -y1]

    ref_out = np.array([test_x, -test_x])
    ref_out = loihi_rates_n(encode_type, ref_out, encode_gain, encode_bias, dt)
    ref_out = ref_out / encode_gain
    ref_out = np.array([conv_pm(ref_out, kernel) for kernel in filters])
    ref_out = ref_out.sum(axis=1)  # sum positive and negative parts
    ref_out = loihi_rates_n(neuron_type, ref_out, neuron_gain, neuron_bias, dt)

    # --- compute nengo_loihi outputs
    inp_biases = np.stack([test_x, -test_x], axis=-1 if channels_last else 0)
    inp_shape = nengo_transforms.ChannelShape(inp_biases.shape,
                                              channels_last=channels_last)

    kernel = np.array([filters, -filters])  # two channels, pos and neg
    kernel = np.transpose(kernel, (2, 3, 0, 1))
    conv2d_transform = nengo_transforms.Convolution(
        8,
        inp_shape,
        strides=(sti, stj),
        channels_last=channels_last,
        kernel_size=(7, 7),
        init=kernel)

    out_size = ref_out.size
    nf, nyi, nyj = ref_out.shape
    assert out_size <= 1024

    model = Model()

    # input block
    inp = LoihiBlock(inp_shape.size, label='inp')
    assert inp.n_neurons <= 1024
    inp.compartment.configure_relu()
    inp.compartment.bias[:] = inp_biases.ravel()

    inp_ax = Axon(np.prod(inp_shape.spatial_shape), label='inp_ax')
    inp_ax.set_compartment_axon_map(target_axons=conv.pixel_idxs(inp_shape),
                                    atoms=conv.channel_idxs(inp_shape))
    inp.add_axon(inp_ax)

    model.add_block(inp)

    # conv block
    neurons = LoihiBlock(out_size, label='neurons')
    assert neurons.n_neurons <= 1024
    neurons.compartment.configure_lif(tau_rc=tau_rc, tau_ref=tau_ref, dt=dt)
    neurons.compartment.configure_filter(tau_s, dt=dt)
    neurons.compartment.bias[:] = neuron_bias

    synapse = Synapse(np.prod(inp_shape.spatial_shape), label='synapse')
    weights, indices, axon_to_weight_map, bases = conv.conv2d_loihi_weights(
        conv2d_transform)
    synapse.set_population_weights(weights,
                                   indices,
                                   axon_to_weight_map,
                                   bases,
                                   pop_type=pop_type)

    neurons.add_synapse(synapse)

    out_probe = Probe(target=neurons, key='spiked')
    neurons.add_probe(out_probe)

    inp_ax.target = synapse
    model.add_block(neurons)

    # simulation
    discretize_model(model)

    n_steps = int(pres_time / dt)
    if target == 'loihi':
        with HardwareInterface(model, use_snips=False, seed=seed,
                               **hw_opts) as sim:
            sim.run_steps(n_steps)
            sim_out = sim.get_probe_output(out_probe)
    else:
        with EmulatorInterface(model, seed=seed) as sim:
            sim.run_steps(n_steps)
            sim_out = sim.get_probe_output(out_probe)

    sim_out = np.sum(sim_out, axis=0) / pres_time
    if channels_last:
        sim_out.shape = (nyi, nyj, nf)
        sim_out = np.transpose(sim_out, (2, 0, 1))
    else:
        sim_out.shape = (nf, nyi, nyj)

    out_max = max(ref_out.max(), sim_out.max())

    # --- plot results
    rows = 2
    cols = 2

    ax = plt.subplot(rows, cols, 1)
    tile(filters, cols=8, ax=ax)

    ax = plt.subplot(rows, cols, 2)
    tile(ref_out, vmin=0, vmax=out_max, cols=8, ax=ax)

    ax = plt.subplot(rows, cols, 3)
    plt.hist(ref_out.ravel(), bins=31)
    plt.hist(sim_out.ravel(), bins=31)

    ax = plt.subplot(rows, cols, 4)
    # tile(sim_out, vmin=0, vmax=1, cols=8, ax=ax)
    tile(sim_out, vmin=0, vmax=out_max, cols=8, ax=ax)

    assert allclose(sim_out, ref_out, atol=10, rtol=1e-3)
Exemple #23
0
def test_conv_split(Simulator, rng, plt, allclose):
    channels_last = False

    # load data
    with open(os.path.join(test_dir, 'mnist10.pkl'), 'rb') as f:
        test10 = pickle.load(f)

    input_shape = nengo_transforms.ChannelShape((1, 28, 28),
                                                channels_last=channels_last)

    n_filters = 8
    kernel_size = (7, 7)
    kernel = Gabor(freq=Uniform(0.5, 1)).generate(n_filters,
                                                  kernel_size,
                                                  rng=rng)
    kernel = kernel[None, :, :, :]  # single channel
    kernel = np.transpose(kernel, (2, 3, 0, 1))
    strides = (2, 2)

    seed = 3  # fix seed to do the same computation for both channel positions

    with nengo.Network(seed=seed) as net:
        nengo_loihi.add_params(net)

        a = nengo.Node(test10[0][0].ravel())

        # --- make population to turn image into spikes
        nc = 1
        in_kernel = np.array([1.]).reshape((1, 1, 1, nc))
        transform = nengo_transforms.Convolution(1,
                                                 input_shape,
                                                 kernel_size=(1, 1),
                                                 init=in_kernel,
                                                 channels_last=channels_last)
        b = nengo.Ensemble(transform.output_shape.size,
                           1,
                           neuron_type=nengo.SpikingRectifiedLinear(),
                           max_rates=nengo.dists.Choice([50]),
                           intercepts=nengo.dists.Choice([0]))
        net.config[b].on_chip = False
        nengo.Connection(a, b.neurons, transform=transform)
        in_shape = transform.output_shape

        transform = nengo_transforms.Convolution(n_filters,
                                                 in_shape,
                                                 kernel_size=kernel_size,
                                                 strides=strides,
                                                 init=kernel,
                                                 channels_last=channels_last)
        out_shape = transform.output_shape
        split_slices = conv.split_channels(out_shape,
                                           max_size=1024,
                                           max_channels=4)

        # --- make convolution population, split across ensembles
        cc = []
        cp = []
        out_shapes = []
        xslice = conv.ImageSlice(in_shape)
        for yslice in split_slices:
            transform_xy = conv.split_transform(transform, xslice, yslice)
            out_shapes.append(transform_xy.output_shape)
            c = nengo.Ensemble(transform_xy.output_shape.size,
                               1,
                               neuron_type=nengo.LIF(),
                               max_rates=nengo.dists.Choice([15]),
                               intercepts=nengo.dists.Choice([0]))
            nengo.Connection(b.neurons, c.neurons, transform=transform_xy)
            cc.append(c)
            cp.append(nengo.Probe(c.neurons))

    simtime = 0.3

    with nengo.Simulator(net, optimize=False) as sim_nengo:
        sim_nengo.run(simtime)

    hw_opts = dict(snip_max_spikes_per_step=100)
    with Simulator(net, seed=seed, hardware_options=hw_opts) as sim_loihi:
        sim_loihi.run(simtime)

    nengo_out = []
    loihi_out = []
    for p, out_shape_i in zip(cp, out_shapes):
        nengo_out.append(
            (sim_nengo.data[p] > 0).sum(axis=0).reshape(out_shape_i.shape))
        loihi_out.append(
            (sim_loihi.data[p] > 0).sum(axis=0).reshape(out_shape_i.shape))

    if channels_last:
        nengo_out = np.concatenate(nengo_out, axis=2)
        loihi_out = np.concatenate(loihi_out, axis=2)

        # put channels first to display them separately
        nengo_out = np.transpose(nengo_out, (2, 0, 1))
        loihi_out = np.transpose(loihi_out, (2, 0, 1))
    else:
        nengo_out = np.concatenate(nengo_out, axis=0)
        loihi_out = np.concatenate(loihi_out, axis=0)

    out_max = np.maximum(nengo_out.max(), loihi_out.max())

    # --- plot results
    rows = 2
    cols = 3

    ax = plt.subplot(rows, cols, 1)
    imshow(test10[0][0].reshape((28, 28)), vmin=0, vmax=1, ax=ax)

    ax = plt.subplot(rows, cols, 2)
    tile(np.transpose(kernel[0], (2, 0, 1)), cols=8, ax=ax)

    ax = plt.subplot(rows, cols, 3)
    plt.hist(nengo_out.ravel(), bins=31)
    plt.hist(loihi_out.ravel(), bins=31)

    ax = plt.subplot(rows, cols, 4)
    tile(nengo_out, vmin=0, vmax=out_max, cols=8, ax=ax)

    ax = plt.subplot(rows, cols, 6)
    tile(loihi_out, vmin=0, vmax=out_max, cols=8, ax=ax)

    assert allclose(loihi_out, nengo_out, atol=0.05 * out_max, rtol=0.15)
Exemple #24
0
def test_chip_population_axons(on_chip, precompute, pop_type, channels_last,
                               Simulator, rng):
    """Check that all types of population axons work as inputs or between cores.

    Also, on the chip, dummy axons were still having an effect. Check this is fixed.
    """
    def conv_layer(input=None, label=None, **kwargs):
        conv = nengo.Convolution(**kwargs)
        layer = nengo.Ensemble(conv.output_shape.size, 1, label=label)
        conn = (nengo.Connection(input, layer.neurons, transform=conv)
                if input is not None else None)
        return layer, conv, conn

    if pop_type == 16 and not channels_last:
        pytest.skip(
            "pop16 axons not compatible with single-compartment shifts")

    max_rate = 100
    amp = 1 / max_rate

    n_filters0 = 4
    n_filters1 = 4
    # 6 x 6 input will have one unused pixel at edge with 3 x 3 kernel and stride 2
    input_shape = (6, 6, 1) if channels_last else (1, 6, 6)
    input_shape = nengo_transforms.ChannelShape(input_shape,
                                                channels_last=channels_last)
    X = rng.uniform(0.2, 1, size=input_shape.shape)
    kernel0 = rng.uniform(0.2, 1, size=(1, 1, 1, n_filters0))
    kernel1 = rng.uniform(0.1, 0.5, size=(3, 3, n_filters0, n_filters1))

    with nengo.Network(seed=0) as net:
        nengo_loihi.add_params(net)
        net.config[nengo.Ensemble].neuron_type = nengo.SpikingRectifiedLinear(
            amplitude=amp)
        net.config[nengo.Ensemble].max_rates = nengo.dists.Choice([max_rate])
        net.config[nengo.Ensemble].intercepts = nengo.dists.Choice([0])
        net.config[nengo.Connection].synapse = 0.005

        inp = nengo.Node(X.ravel()) if not on_chip else None

        # first layer is off-chip to translate the inputs into spikes
        layer0, conv0, _ = conv_layer(
            input=inp,
            n_filters=n_filters0,
            input_shape=input_shape,
            channels_last=channels_last,
            kernel_size=(1, 1),
            init=kernel0,
            label="layer0",
        )

        net.config[layer0].on_chip = on_chip
        if on_chip:
            assert kernel0.shape[:2] == (1, 1)
            w = kernel0[0, 0]
            Y = X.dot(w) if channels_last else np.tensordot(w.T, X, axes=1)
            layer0.gain = nengo.dists.Choice([0.0])
            layer0.bias = Y.ravel() * max_rate

        layer1, conv1, conn1 = conv_layer(
            input=layer0.neurons,
            n_filters=n_filters1,
            input_shape=conv0.output_shape,
            channels_last=channels_last,
            kernel_size=(3, 3),
            strides=(2, 2),
            init=kernel1,
            label="layer1",
        )
        net.config[conn1].pop_type = pop_type

        probe = nengo.Probe(layer1.neurons)

    sim_time = 0.1
    with Simulator(net, target="sim") as emulator:
        emulator.run(sim_time)

    with Simulator(net, target="loihi", precompute=precompute) as loihi:
        loihi.run(sim_time)

    assert np.all(emulator.data[probe].sum(axis=0) > 0)
    assert np.array_equal(loihi.data[probe], emulator.data[probe])
Exemple #25
0
    return layer, conv


dt = 0.001  # simulation timestep
presentation_time = 0.1  # input presentation time
max_rate = 100  # neuron firing rates
# neuron spike amplitude (scaled so that the overall output is ~1)
amp = 1 / max_rate
# input image shape
input_shape = inp_shape

with nengo.Network(seed=0) as net:
    # set up the default parameters for ensembles/connections
    nengo_loihi.add_params(net)
    net.config[nengo.Ensemble].neuron_type = (nengo.SpikingRectifiedLinear(
        amplitude=amp))
    net.config[nengo.Ensemble].max_rates = nengo.dists.Choice([max_rate])
    net.config[nengo.Ensemble].intercepts = nengo.dists.Choice([0])
    net.config[nengo.Connection].synapse = None

    # the input node that will be used to feed in input images
    inp = nengo.Node(nengo.processes.PresentInput(test_data[0],
                                                  presentation_time),
                     size_out=out_size)

    # the output node provides the 10-dimensional classification
    out = nengo.Node(size_in=10)
    for _ in range(n_parallel):
        # build parallel copies of the network
        layer, conv = conv_layer(inp,
                                 1,
Exemple #26
0
        def close(self):
            """Dummy close function"""

        def accept(self):
            """Dummy accept function"""

        def sendto(self, *args):
            """Dummy sendto function"""

        def recv_into(self, *args):
            """Dummy recv_into function"""

    return DummyCom


@pytest.fixture(params=[nengo.RectifiedLinear(), nengo.SpikingRectifiedLinear()])
def params(request):  # pragma: no cover
    """Create a dummy network and extract params for fullstack tests

    Fixture itself is parametrized for neuron type
    """

    # Tell Nengo to use 32b, like the FPGA
    nengo.rc.set("precision", "bits", "32")

    # Arbitrary params (keep `my_func` in mind if changing dims)
    neuron = request.param
    n_neurons = 200
    dims_out = 2
    dims_in = 2 * dims_out
    seed = 10
Exemple #27
0
def test_conv_preslice(on_chip, Simulator, plt):
    conv2d = pytest.importorskip("nengo._vendor.npconv2d.conv2d")

    kernel = np.array([[-1, 2, -1], [-1, 2, -1], [-1, 2, -1]], dtype=float)
    kernel /= kernel.max()

    image = np.array(
        [
            [1, 2, 1, 2, 0],
            [2, 3, 2, 1, 1],
            [1, 2, 1, 2, 3],
            [2, 3, 2, 1, 1],
            [1, 2, 1, 2, 0],
        ],
        dtype=float,
    )
    image /= image.max()

    image2 = np.column_stack([c * x for c in image.T for x in (1, -1)])

    input_gain = 149.0

    neuron_type = nengo.SpikingRectifiedLinear()
    loihi_neuron = LoihiSpikingRectifiedLinear()
    layer0_neuron = loihi_neuron if on_chip else neuron_type

    y_ref = layer0_neuron.rates(image.ravel(), input_gain, 0)
    y_ref = conv2d.conv2d(y_ref.reshape((1, 5, 5, 1)),
                          kernel.reshape((3, 3, 1, 1)),
                          pad="VALID")
    y_ref = loihi_neuron.rates(y_ref.ravel(), 1.0, 0.0).reshape((3, 3))

    with nengo.Network() as net:
        nengo_loihi.add_params(net)

        u = nengo.Node(image2.ravel())
        a = nengo.Ensemble(
            50,
            1,
            neuron_type=neuron_type,
            gain=nengo.dists.Choice([input_gain]),
            bias=nengo.dists.Choice([0]),
        )
        net.config[a].on_chip = on_chip

        transform = nengo_transforms.Convolution(n_filters=1,
                                                 input_shape=(5, 5, 1),
                                                 init=kernel.reshape(
                                                     (3, 3, 1, 1)))

        b = nengo.Ensemble(
            transform.output_shape.size,
            1,
            neuron_type=neuron_type,
            gain=nengo.dists.Choice([1]),
            bias=nengo.dists.Choice([0]),
        )

        nengo.Connection(u, a.neurons, synapse=None)
        nengo.Connection(a.neurons[::2], b.neurons, transform=transform)
        bp = nengo.Probe(b.neurons, synapse=nengo.Alpha(0.02))

    with Simulator(net) as sim:
        assert sim.precompute is True
        sim.run(0.3)

    y_ref = y_ref / input_gain
    y = sim.data[bp][-1].reshape((3, -1)) / input_gain

    plt.subplot(121)
    plt.imshow(y_ref)
    plt.colorbar()
    plt.subplot(122)
    plt.imshow(y)
    plt.colorbar()

    assert np.allclose(y, y_ref, atol=0.02, rtol=0.1)
Exemple #28
0
def go(NPre=100, N=30, t=10, m=Uniform(30, 30), i=Uniform(-0.8, 0.8), seed=0, dt=0.001, f=DoubleExp(1e-3, 1e-1), fS=DoubleExp(1e-3, 1e-1), neuron_type=LIF(), d1=None, d2=None, f1=None, f2=None, e1=None, e2=None, l1=False, l2=False, test=False, freq=1, phase=0, tDrive=0.2):

    A = [[1, 1e-1*2*np.pi*freq], [-1e-1*2*np.pi*freq, 1]]  # tau*A + I
    if isinstance(neuron_type, Bio) and not f1: f1=DoubleExp(1e-3, 1e-1)
    if isinstance(neuron_type, Bio) and not f2: f2=DoubleExp(1e-3, 1e-1)
    stim = lambda t: [np.sin(2*np.pi*freq*t+phase), np.cos(2*np.pi*freq*t+phase)]

    with nengo.Network(seed=seed) as model:          
        inpt = nengo.Node(stim)
        tar = nengo.Ensemble(1, 2, neuron_type=nengo.Direct())
        pre = nengo.Ensemble(NPre, 2, max_rates=m, neuron_type=nengo.SpikingRectifiedLinear(), radius=2, seed=seed)
        ens = nengo.Ensemble(N, 2, max_rates=m, intercepts=i, neuron_type=neuron_type, radius=2, seed=seed)
        nengo.Connection(inpt, tar, synapse=None, transform=A, seed=seed)
        nengo.Connection(inpt, pre, synapse=None, seed=seed)
        c1 = nengo.Connection(pre, ens, synapse=f1, seed=seed, solver=NoSolver(d1))
        pInpt = nengo.Probe(inpt, synapse=None)
        pTar = nengo.Probe(tar, synapse=None)
        pPre = nengo.Probe(pre.neurons, synapse=None)
        pEns = nengo.Probe(ens.neurons, synapse=None)
        # Encoder Learning (Bio)
        if l1:
            tarEns = nengo.Ensemble(N, 2, max_rates=m, intercepts=i, neuron_type=nengo.LIF(), seed=seed)
            nengo.Connection(inpt, tarEns, synapse=None, seed=seed)
            learnEncoders(c1, tarEns, fS)
            pTarEns = nengo.Probe(tarEns.neurons, synapse=None)
        if l2:
            pre2 = nengo.Ensemble(NPre, 2, max_rates=m, neuron_type=nengo.LIF(), seed=seed, radius=2)
            tarEns2 = nengo.Ensemble(N, 2, max_rates=m, intercepts=i, neuron_type=nengo.LIF(), seed=seed)
            ens2 = nengo.Ensemble(N, 2, max_rates=m, intercepts=i, neuron_type=neuron_type, seed=seed, radius=2)
            
#             ens3 = nengo.Ensemble(N, 2, max_rates=m, intercepts=i, neuron_type=neuron_type, seed=seed, radius=2)
#             nengo.Connection(tar, pre2, synapse=f)
#             c3 = nengo.Connection(ens, ens2, synapse=f2, seed=seed)
#             c4 = nengo.Connection(pre2, ens3, synapse=f1, seed=seed)
#             learnEncoders(c3, ens3, fS)
#             pTarEns2 = nengo.Probe(ens3.neurons, synapse=None)
#             pEns2 = nengo.Probe(ens2.neurons, synapse=None)

            nengo.Connection(inpt, pre2, synapse=f)
            nengo.Connection(pre2, tarEns2, synapse=f, seed=seed)
            c3 = nengo.Connection(ens, ens2, synapse=f2, seed=seed)
            learnEncoders(c3, tarEns2, fS, alpha=3e-7)
            pTarEns2 = nengo.Probe(tarEns2.neurons, synapse=None)
            pEns2 = nengo.Probe(ens2.neurons, synapse=None)
        if test:
            c2 = nengo.Connection(ens, ens, synapse=f2, seed=seed, solver=NoSolver(d2))
            off = nengo.Node(lambda t: 1 if t>tDrive else 0)
            nengo.Connection(off, pre.neurons, synapse=None, transform=-1e4*np.ones((NPre, 1)))

    with nengo.Simulator(model, seed=seed, dt=dt, progress_bar=False) as sim:
        if isinstance(neuron_type, Bio):
            setWeights(c1, d1, e1)
            if l2: setWeights(c3, d2, e2)
#             if l2: setWeights(c4, d1, e1)
            if test: setWeights(c2, d2, e2)
            neuron.h.init()
            sim.run(t, progress_bar=True)
            reset_neuron(sim, model) 
        else:
            sim.run(t, progress_bar=True)
      
    e1 = c1.e if l1 else e1
    e2 = c3.e if l2 else e2

    return dict(
        times=sim.trange(),
        inpt=sim.data[pInpt],
        tar=sim.data[pTar],
        pre=sim.data[pPre],
        ens=sim.data[pEns],
        tarEns=sim.data[pTarEns] if l1 else None,
        tarEns2=sim.data[pTarEns2] if l2 else None,
        ens2=sim.data[pEns2] if l2 else None,
        e1=e1,
        e2=e2,
    )
Exemple #29
0
from nengo_loihi.neurons import (
    AlphaRCNoise,
    discretize_tau_rc,
    discretize_tau_ref,
    loihi_rates,
    LoihiLIF,
    LoihiSpikingRectifiedLinear,
    LowpassRCNoise,
)


@pytest.mark.parametrize('dt', [3e-4, 1e-3])
@pytest.mark.parametrize('neuron_type', [
    nengo.LIF(),
    nengo.LIF(tau_ref=0.001, tau_rc=0.07, amplitude=0.34),
    nengo.SpikingRectifiedLinear(),
    nengo.SpikingRectifiedLinear(amplitude=0.23),
])
def test_loihi_rates(dt, neuron_type, Simulator, plt, allclose):
    n = 256
    x = np.linspace(-0.1, 1, n)

    encoders = np.ones((n, 1))
    max_rates = 400 * np.ones(n)
    intercepts = 0 * np.ones(n)
    gain, bias = neuron_type.gain_bias(max_rates, intercepts)
    j = x * gain + bias

    with nengo.Network() as model:
        a = nengo.Ensemble(n,
                           1,
Exemple #30
0
    return mean_rates


def is_spiking_type(neuron_type):
    return isinstance(neuron_type, (nengo.LIF, nengo.SpikingRectifiedLinear))


# test the trained networks on test set
mean_rates = run_network(activation=nengo.RectifiedLinear(),
                         n_steps=10,
                         plot_idx=plot_no)
plt.savefig(outdir + f'/{plot_no}.jpg')
plot_no += 1

# test the trained networks using spiking neurons
run_network(activation=nengo.SpikingRectifiedLinear(),
            scale_firing_rates=100,
            synapse=0.005,
            plot_idx=plot_no)
plt.savefig(outdir + f'/{plot_no}.jpg')
plot_no += 1

# test the trained networks using spiking neurons
run_network(activation=nengo_loihi.neurons.LoihiSpikingRectifiedLinear(),
            scale_firing_rates=100,
            synapse=0.005,
            plot_idx=plot_no)
plt.savefig(outdir + f'/{plot_no}.jpg')
plot_no += 1