Exemple #1
0
def test_train_ff(Simulator, neurons, seed):
    minibatch_size = 4
    n_hidden = 20

    with nengo.Network(seed=seed) as net:
        net.config[nengo.Ensemble].gain = nengo.dists.Choice([1])
        net.config[nengo.Ensemble].bias = nengo.dists.Choice([0])
        net.config[nengo.Connection].synapse = None

        # note: we have these weird input setup just so that we can test
        # training with two distinct inputs
        inp_a = nengo.Node([0])
        inp_b = nengo.Node([0])
        inp = nengo.Node(size_in=2)
        nengo.Connection(inp_a, inp[0])
        nengo.Connection(inp_b, inp[1])

        ens = nengo.Ensemble(n_hidden + 1,
                             n_hidden,
                             neuron_type=nengo.Sigmoid(tau_ref=1))
        out = nengo.Ensemble(1, 1, neuron_type=nengo.Sigmoid(tau_ref=1))
        nengo.Connection(inp,
                         ens.neurons if neurons else ens,
                         transform=dists.Glorot())
        nengo.Connection(ens.neurons if neurons else ens,
                         out.neurons,
                         transform=dists.Glorot())

        p = nengo.Probe(out.neurons)

    with Simulator(net,
                   minibatch_size=minibatch_size,
                   unroll_simulation=1,
                   seed=seed) as sim:
        x = np.asarray([[[0, 0]], [[0, 1]], [[1, 0]], [[1, 1]]])
        y = np.asarray([[[0.1]], [[0.9]], [[0.9]], [[0.1]]])

        sim.train({
            inp_a: x[..., [0]],
            inp_b: x[..., [1]]
        }, {p: y},
                  tf.train.AdamOptimizer(0.01),
                  n_epochs=500)

        sim.check_gradients(atol=5e-5)

        sim.step(input_feeds={inp_a: x[..., [0]], inp_b: x[..., [1]]})

        assert np.allclose(sim.data[p], y, atol=1e-3)
Exemple #2
0
def test_regularize_train(Simulator, mode, seed):
    with nengo.Network(seed=seed) as net:
        a = nengo.Node([1])
        b = nengo.Ensemble(30,
                           1,
                           neuron_type=nengo.Sigmoid(tau_ref=1),
                           gain=nengo.dists.Choice([1]),
                           bias=nengo.dists.Choice([0]))
        c = nengo.Connection(a,
                             b.neurons,
                             synapse=None,
                             transform=nengo.dists.Uniform(-0.1, 0.1))

        if mode == "weights":
            p = nengo.Probe(c, "weights")
        else:
            p = nengo.Probe(b.neurons)

    with Simulator(net) as sim:
        sim.train(
            5,
            tf.train.RMSPropOptimizer(0.01 if mode == "weights" else 0.1),
            objective={p: objectives.Regularize()},
            n_epochs=100)

        sim.step()
        assert np.allclose(sim.data[p], 0, atol=1e-2)
Exemple #3
0
def test_sigmoid_invalid(Simulator, max_rate, intercept):
    """Check that invalid sigmoid ensembles raise an error."""
    with nengo.Network() as m:
        nengo.Ensemble(1, 1, neuron_type=nengo.Sigmoid(),
                       max_rates=[max_rate], intercepts=[intercept])
    with pytest.raises(BuildError):
        with Simulator(m):
            pass
def test_train_ff(Simulator, neurons, seed):
    minibatch_size = 4
    n_hidden = 20

    np.random.seed(seed)

    with nengo.Network() as net:
        net.config[nengo.Ensemble].gain = nengo.dists.Choice([1])
        net.config[nengo.Ensemble].bias = nengo.dists.Choice([0])
        net.config[nengo.Connection].synapse = None

        inp = nengo.Node([0, 0])
        ens = nengo.Ensemble(n_hidden + 1,
                             n_hidden,
                             neuron_type=nengo.Sigmoid(tau_ref=1))
        out = nengo.Ensemble(1, 1, neuron_type=nengo.Sigmoid(tau_ref=1))
        nengo.Connection(inp,
                         ens.neurons if neurons else ens,
                         transform=dists.Glorot())
        nengo.Connection(ens.neurons if neurons else ens,
                         out.neurons,
                         transform=dists.Glorot())

        # TODO: why does training fail if we probe out instead of out.neurons?
        p = nengo.Probe(out.neurons)

    with Simulator(net,
                   minibatch_size=minibatch_size,
                   unroll_simulation=1,
                   seed=seed) as sim:
        x = np.asarray([[[0, 0]], [[0, 1]], [[1, 0]], [[1, 1]]])
        y = np.asarray([[[0.1]], [[0.9]], [[0.9]], [[0.1]]])

        sim.train({inp: x}, {p: y},
                  tf.train.MomentumOptimizer(1, 0.9),
                  n_epochs=500)

        sim.check_gradients(atol=5e-5)

        sim.step(input_feeds={inp: x})

        assert np.allclose(sim.data[p], y, atol=1e-3)
def test_train_sparse(Simulator, seed):
    minibatch_size = 4
    n_hidden = 5

    np.random.seed(seed)

    with nengo.Network(seed=seed) as net:
        net.config[nengo.Ensemble].gain = nengo.dists.Choice([1])
        net.config[nengo.Ensemble].bias = nengo.dists.Choice([0])
        net.config[nengo.Connection].synapse = None

        inp = nengo.Node([0, 0, 0, 0, 0])
        ens = nengo.Ensemble(n_hidden,
                             n_hidden,
                             neuron_type=nengo.Sigmoid(tau_ref=1))
        out = nengo.Ensemble(2, 2, neuron_type=nengo.Sigmoid(tau_ref=1))
        nengo.Connection(inp[[0, 2, 3]], ens, transform=dists.Glorot())
        nengo.Connection(ens, out, transform=dists.Glorot())

        p = nengo.Probe(out.neurons)

    with Simulator(net,
                   minibatch_size=minibatch_size,
                   unroll_simulation=1,
                   seed=seed) as sim:
        x = np.asarray([[[0, 0, 0, 0, 0]], [[0, 0, 1, 0, 0]], [[1, 0, 0, 0,
                                                                0]],
                        [[1, 0, 1, 0, 0]]])
        y = np.asarray([[[0.1, 0]], [[0.9, 0]], [[0.9, 0]], [[0.1, 0]]])

        sim.train({inp: x}, {p: y},
                  tf.train.MomentumOptimizer(1, 0.9),
                  n_epochs=500)

        sim.step(input_feeds={inp: x})

        assert np.allclose(sim.data[p], y, atol=1e-3)
Exemple #6
0
    def _add_neuron_layer(self, layer):
        inputs = [self._get_input(layer)]
        neuron = layer["neuron"]
        ntype = neuron["type"]
        n = layer["outputs"]

        gain = 1.0
        bias = 0.0
        amplitude = 1.0
        if ntype == "ident":
            neuron_type = nengo.Direct()
        elif ntype == "relu":
            neuron_type = nengo.RectifiedLinear()
        elif ntype == "logistic":
            neuron_type = nengo.Sigmoid()
        elif ntype == "softlif":

            tau_ref, tau_rc, alpha, amp, sigma = [
                neuron["params"][k] for k in ["t", "r", "a", "m", "g"]
            ]
            lif_type = self.lif_type.lower()
            if lif_type == "lif":
                neuron_type = nengo.LIF(tau_rc=tau_rc, tau_ref=tau_ref)
            elif lif_type == "lifrate":
                neuron_type = nengo.LIFRate(tau_rc=tau_rc, tau_ref=tau_ref)
            elif lif_type == "softlifrate":
                neuron_type = SoftLIFRate(sigma=sigma,
                                          tau_rc=tau_rc,
                                          tau_ref=tau_ref)
            else:
                raise KeyError("Unrecognized LIF type %r" % self.lif_type)
            gain = alpha
            bias = 1.0
            amplitude = amp
        else:
            raise NotImplementedError("Neuron type %r" % ntype)

        return self.add_neuron_layer(
            n,
            inputs=inputs,
            neuron_type=neuron_type,
            synapse=self.synapse,
            gain=gain,
            bias=bias,
            amplitude=amplitude,
            name=layer["name"],
        )
Exemple #7
0
def test_neuron_build_errors(Simulator):
    # unsupported neuron type
    with nengo.Network() as net:
        nengo.Ensemble(5, 1, neuron_type=nengo.neurons.Sigmoid(tau_ref=0.005))

    with pytest.raises(BuildError, match="type 'Sigmoid' cannot be simulated"):
        with Simulator(net):
            pass

    # unsupported RegularSpiking type
    with nengo.Network() as net:
        nengo.Ensemble(5,
                       1,
                       neuron_type=nengo.RegularSpiking(
                           nengo.Sigmoid(tau_ref=0.005)))

    with pytest.raises(BuildError,
                       match="RegularSpiking.*'Sigmoid'.*cannot be simu"):
        with Simulator(net):
            pass

    # amplitude with RegularSpiking base type
    with nengo.Network() as net:
        nengo.Ensemble(5,
                       1,
                       neuron_type=nengo.RegularSpiking(
                           nengo.LIFRate(amplitude=0.5)))

    with pytest.raises(BuildError,
                       match="Amplitude is not supported on RegularSpikin"):
        with Simulator(net):
            pass

    # non-zero initial voltage warning
    with nengo.Network() as net:
        nengo.Ensemble(
            5,
            1,
            neuron_type=nengo.LIF(
                initial_state={"voltage": nengo.dists.Uniform(0, 1)}),
        )

    with pytest.warns(Warning,
                      match="initial values for 'voltage' being non-zero"):
        with Simulator(net):
            pass
Exemple #8
0
def test_sigmoid_response_curves(Simulator, max_rate, intercept):
    """Check the sigmoid response curve monotonically increases.

    The sigmoid response curve should work fine:

    - if max rate > rate at inflection point and intercept < 1
    - if max rate < rate at inflection point and intercept > 1
    """
    with nengo.Network() as m:
        e = nengo.Ensemble(1, 1, neuron_type=nengo.Sigmoid(),
                           max_rates=[max_rate], intercepts=[intercept])

    with Simulator(m) as sim:
        pass
    x, y = nengo.utils.ensemble.response_curves(e, sim)
    assert np.allclose(np.max(y), max_rate)
    assert np.all(y > 0.)
    assert np.all(np.diff(y) > 0.)  # monotonically increasing
Exemple #9
0
def test_regularize_train(Simulator, mode, seed):
    with nengo.Network(seed=seed) as net:
        a = nengo.Node([1])
        b = nengo.Ensemble(
            30,
            1,
            neuron_type=nengo.Sigmoid(tau_ref=1),
            gain=nengo.dists.Choice([1]),
            bias=nengo.dists.Choice([0]),
        )
        c = nengo.Connection(a,
                             b.neurons,
                             synapse=None,
                             transform=nengo.dists.Uniform(-0.1, 0.1))

        if mode == "weights":
            p = nengo.Probe(c, "weights")
        else:
            p = nengo.Probe(b.neurons)

        # default output required so that there is a defined gradient for all
        # parameters
        default_p = nengo.Probe(b)

    with Simulator(net) as sim:
        sim.compile(
            tf.optimizers.RMSprop(0.01 if mode == "weights" else 0.1),
            loss={
                p: losses.Regularize(),
                default_p: lambda y_true, y_pred: 0 * y_pred
            },
        )
        sim.fit(
            n_steps=5,
            y={
                p: np.zeros((1, 5, p.size_in)),
                default_p: np.zeros((1, 5, default_p.size_in)),
            },
            epochs=100,
        )

        sim.step()
        assert np.allclose(sim.data[p], 0, atol=1e-2)
Exemple #10
0
    def _add_neuron_layer(self, layer):
        neuron = layer['neuron']
        ntype = neuron['type']
        n = layer['outputs']

        e = nengo.Ensemble(n, 1, label='%s_neurons' % layer['name'])
        e.gain = np.ones(n)
        e.bias = np.zeros(n)

        transform = 1.
        if ntype == 'ident':
            e.neuron_type = nengo.Direct()
        elif ntype == 'relu':
            e.neuron_type = nengo.RectifiedLinear()
        elif ntype == 'logistic':
            e.neuron_type = nengo.Sigmoid()
        elif ntype == 'softlif':
            from .neurons import SoftLIFRate
            tau_ref, tau_rc, alpha, amp, sigma, noise = [
                neuron['params'][k] for k in ['t', 'r', 'a', 'm', 'g', 'n']]
            lif_type = self.lif_type.lower()
            if lif_type == 'lif':
                e.neuron_type = nengo.LIF(tau_rc=tau_rc, tau_ref=tau_ref)
            elif lif_type == 'lifrate':
                e.neuron_type = nengo.LIFRate(tau_rc=tau_rc, tau_ref=tau_ref)
            elif lif_type == 'softlifrate':
                e.neuron_type = SoftLIFRate(
                    sigma=sigma, tau_rc=tau_rc, tau_ref=tau_ref)
            else:
                raise KeyError("Unrecognized LIF type %r" % self.lif_type)
            e.gain = alpha * np.ones(n)
            e.bias = np.ones(n)
            transform = amp
        else:
            raise NotImplementedError("Neuron type %r" % ntype)

        node = nengo.Node(size_in=n, label=layer['name'])
        nengo.Connection(self._get_input(layer), e.neurons, synapse=None)
        nengo.Connection(
            e.neurons, node, transform=transform, synapse=self.synapse)
        return node
def test_gradients(Simulator, unroll, seed):
    minibatch_size = 4

    with nengo.Network(seed=seed) as net:
        net.config[nengo.Ensemble].gain = nengo.dists.Choice([1])
        net.config[nengo.Ensemble].bias = nengo.dists.Uniform(-1, 1)

        inp = nengo.Node([0], label="inp")

        # sigmoid neurons
        ens = nengo.Ensemble(10, 1, neuron_type=nengo.Sigmoid())

        # normal decoded connection
        nengo.Connection(inp, ens)

        # recurrent connection
        nengo.Connection(ens, ens, transform=0.1)

        # rectified neurons
        ens2 = nengo.Ensemble(10, 2, neuron_type=nengo.RectifiedLinear())

        # neuron--neuron connection
        nengo.Connection(ens,
                         ens2,
                         transform=[[1], [1]],
                         solver=nengo.solvers.LstsqL2(weights=True))

        # sliced output, no synapse
        nengo.Connection(inp, ens2[0], synapse=None, transform=0.5)

        # sliced input, sliced output
        inp2 = nengo.Node([0, 0], label="inp2")
        nengo.Connection(inp2[0], ens2[1])

        nengo.Probe(ens)
        nengo.Probe(ens2)

    with Simulator(net,
                   unroll_simulation=unroll,
                   minibatch_size=minibatch_size) as sim:
        sim.check_gradients(atol=1e-4)
Exemple #12
0
def test_build_optimizer(Simulator):
    with nengo.Network() as net:
        inp = nengo.Node([0])
        ens = nengo.Ensemble(10, 1, neuron_type=nengo.Sigmoid())
        nengo.Connection(inp, ens)
        p = nengo.Probe(ens)

    # check optimizer caching
    with Simulator(net) as sim:
        opt = tf.train.GradientDescentOptimizer(0)
        assert (sim.tensor_graph.build_optimizer(opt, (p, ), "mse") is
                sim.tensor_graph.build_optimizer(opt, (p, ), "mse"))

    # error when no trainable elements
    with nengo.Network() as net:
        inp = nengo.Node([0])
        p = nengo.Probe(inp)

    with Simulator(net) as sim:
        with pytest.raises(SimulationError):
            sim.tensor_graph.build_optimizer(opt, (p, ), "mse")
    def _add_neuron_layer(self, layer):
        inputs = [self._get_input(layer)]
        neuron = layer['neuron']
        ntype = neuron['type']
        n = layer['outputs']

        gain = 1.
        bias = 0.
        amplitude = 1.
        if ntype == 'ident':
            neuron_type = nengo.Direct()
        elif ntype == 'relu':
            neuron_type = nengo.RectifiedLinear()
        elif ntype == 'logistic':
            neuron_type = nengo.Sigmoid()
        elif ntype == 'softlif':
            from .neurons import SoftLIFRate
            tau_ref, tau_rc, alpha, amp, sigma, noise = [
                neuron['params'][k] for k in ['t', 'r', 'a', 'm', 'g', 'n']]
            lif_type = self.lif_type.lower()
            if lif_type == 'lif':
                neuron_type = nengo.LIF(tau_rc=tau_rc, tau_ref=tau_ref)
            elif lif_type == 'lifrate':
                neuron_type = nengo.LIFRate(tau_rc=tau_rc, tau_ref=tau_ref)
            elif lif_type == 'softlifrate':
                neuron_type = SoftLIFRate(
                    sigma=sigma, tau_rc=tau_rc, tau_ref=tau_ref)
            else:
                raise KeyError("Unrecognized LIF type %r" % self.lif_type)
            gain = alpha
            bias = 1.
            amplitude = amp
        else:
            raise NotImplementedError("Neuron type %r" % ntype)

        return self.add_neuron_layer(
            n, inputs=inputs, neuron_type=neuron_type, synapse=self.synapse,
            gain=gain, bias=bias, amplitude=amplitude, name=layer['name'])
def test_build_optimizer(Simulator):
    with nengo.Network() as net:
        inp = nengo.Node([0])
        ens = nengo.Ensemble(10, 1, neuron_type=nengo.Sigmoid())
        nengo.Connection(inp, ens)
        p = nengo.Probe(ens)

    # check optimizer caching
    with Simulator(net) as sim:
        opt = tf.train.GradientDescentOptimizer(0)
        assert (
            sim.tensor_graph.build_optimizer_func(opt, {p: objectives.mse}) is
            sim.tensor_graph.build_optimizer_func(opt, {p: objectives.mse}))

    # error when no trainable elements
    with nengo.Network() as net:
        inp = nengo.Node([0])
        p = nengo.Probe(inp)

    with Simulator(net) as sim:
        with pytest.raises(ValueError):
            sim.tensor_graph.build_outputs(
                {p: sim.tensor_graph.build_optimizer_func(
                    opt, {p: objectives.mse})})

    # capturing variables from nested loss function
    def loss(x):
        return abs(
            tf.get_variable("two", initializer=tf.constant_initializer(2.0),
                            shape=(), dtype=x.dtype) - x)

    net, _, p = dummies.linear_net()

    with Simulator(net) as sim:
        sim.train(5, tf.train.GradientDescentOptimizer(0.1),
                  objective={p: loss}, n_epochs=10)
        sim.step()
        assert np.allclose(sim.data[p], 2)
Exemple #15
0
def test_fit(Simulator, seed):
    minibatch_size = 4
    n_hidden = 20

    with nengo.Network(seed=seed) as net:
        net.config[nengo.Ensemble].gain = nengo.dists.Choice([1])
        net.config[nengo.Ensemble].bias = nengo.dists.Choice([0])
        net.config[nengo.Connection].synapse = None

        # note: we have these weird input setup just so that we can test
        # training with two distinct inputs
        inp_a = nengo.Node([0])
        inp_b = nengo.Node([0])
        inp = nengo.Node(size_in=2)
        nengo.Connection(inp_a, inp[0], transform=1)
        nengo.Connection(inp_b, inp[1], transform=1)

        ens = nengo.Ensemble(n_hidden + 1,
                             n_hidden,
                             neuron_type=nengo.Sigmoid(tau_ref=1))
        out = nengo.Ensemble(1, 1, neuron_type=nengo.Sigmoid(tau_ref=1))
        nengo.Connection(inp, ens.neurons, transform=dists.Glorot())
        nengo.Connection(ens.neurons, out.neurons, transform=dists.Glorot())

        nengo.Probe(out.neurons)

    with Simulator(net,
                   minibatch_size=minibatch_size,
                   unroll_simulation=1,
                   seed=seed) as sim:
        x = np.asarray([[[0.0, 0.0]], [[0.0, 1.0]], [[1.0, 0.0]], [[1.0,
                                                                    1.0]]])
        y = np.asarray([[[0.1]], [[0.9]], [[0.9]], [[0.1]]])

        sim.compile(optimizer=tf.optimizers.Adam(0.01), loss=tf.losses.mse)
        # note: batch_size should be ignored
        with pytest.warns(UserWarning,
                          match="Batch size is determined statically"):
            history = sim.fit(
                [x[..., [0]], x[..., [1]]],
                y,
                validation_data=([x[..., [0]], x[..., [1]]], y),
                epochs=200,
                verbose=0,
                batch_size=-1,
            )
        assert history.history["loss"][-1] < 5e-4
        assert history.history["val_loss"][-1] < 5e-4

        # check that validation_sample_weights work correctly
        history = sim.fit(
            [x[..., [0]], x[..., [1]]],
            y,
            validation_data=([x[..., [0]], x[...,
                                             [1]]], y, np.zeros(y.shape[0])),
            epochs=1,
            verbose=0,
        )
        assert np.allclose(history.history["val_loss"][-1], 0)

        # TODO: this will work in eager mode
        # sim.reset()
        # history = sim.fit(
        #     [tf.constant(x[..., [0]]), tf.constant(x[..., [1]])],
        #     tf.constant(y),
        #     epochs=200,
        #     verbose=0,
        # )
        # assert history.history["loss"][-1] < 5e-4

        sim.reset()
        history = sim.fit(
            (((x[..., [0]], x[..., [1]], np.ones((4, 1), dtype=np.int32)), y)
             for _ in range(200)),
            epochs=20,
            steps_per_epoch=10,
            verbose=0,
        )
        assert history.history["loss"][-1] < 5e-4

    # TODO: this crashes if placed on GPU (but not in eager mode)
    with Simulator(
            net,
            minibatch_size=minibatch_size,
            unroll_simulation=1,
            seed=seed,
            device="/cpu:0",
    ) as sim:
        sim.compile(optimizer=tf.optimizers.Adam(0.01), loss=tf.losses.mse)

        history = sim.fit(
            tf.data.Dataset.from_tensors(
                ((x[..., [0]], x[..., [1]], np.ones((4, 1),
                                                    dtype=np.int32)), y)),
            validation_data=tf.data.Dataset.from_tensors(
                ((x[..., [0]], x[..., [1]], np.ones((4, 1),
                                                    dtype=np.int32)), y)),
            epochs=200,
            verbose=0,
        )
        assert history.history["loss"][-1] < 5e-4
        assert history.history["val_loss"][-1] < 5e-4
Exemple #16
0
    plt.plot(x, ref_rates, "k", label="predicted")
    if ref_rates2 is not None:
        plt.plot(x, ref_rates2, "b", label="predicted-base")
    plt.plot(x, est_rates, "g", label="measured")
    plt.legend(loc="best")

    assert ref_rates.shape == est_rates.shape
    assert allclose(est_rates, ref_rates, atol=1, rtol=0, xtol=1)
    if ref_rates2 is not None:
        assert allclose(ref_rates2, ref_rates)


@pytest.mark.parametrize(
    "neuron_type",
    [
        nengo.Sigmoid(),
        nengo.RegularSpiking(nengo.Sigmoid()),
    ],
)
def test_loihi_rates_other_type(neuron_type, allclose):
    """Test using a neuron type that has no Loihi-specific implementation"""
    x = np.linspace(-7, 10)
    gain, bias = 0.2, 0.4
    dt = 0.002
    ref_rates = nengo_rates(neuron_type, x, gain, bias)
    rates = loihi_rates(neuron_type, x, gain, bias, dt)
    assert ref_rates.shape == rates.shape
    assert allclose(rates, ref_rates)


@pytest.mark.parametrize(
    def __init__(self,
                 t_rc,
                 t_ref,
                 t_psc,
                 c_spacing,
                 j,
                 i,
                 m,
                 x_j_hat_node,
                 sf_node,
                 s_node,
                 valid_sf=None,
                 n_dendrites=50):
        """

        :param t_rc:
        :param t_ref:
        :param t_psc: post synaptic time constant
        :param c_spacing: between column spacing
        :param j: position of connected column in previous layer
        :param i: parent column position in c
        :param m: max shift allowed in the column

        :param x_j_hat_node: Nengo node/ensemble of connected column in previous layer
        :param sf_node: Nengo node/ensemble of current layer subsampling factor
        :param s_node: Nengo node/ensemble of relative shift at current level

        Optimization parameters
        :param valid_sf: list of valid subsampling factors. Default = [1, 1.5, 2]
        :param n_dendrites: Number of dendrites for each neuron in L4 soma. Default=50

        :return:
        """

        self.j = j
        self.n_dendrites = n_dendrites

        if valid_sf is None:
            valid_sf = [1, 1.5, 2]

        # Populations --------------------------------------------------------------

        # Dendrites Routing
        # d[0] = mu_i = sf * i + s,
        # d[1]= sigma_att =  sf * column_spacing / 2.35
        max_sf = max(valid_sf)
        max_mu = max_sf * i + m
        max_sigma = max_sf * c_spacing / 2.35

        self.dend_rout = nengo.Ensemble(
            2000,  # population size
            2,  # dimensionality
            max_rates=nengo.dists.Uniform(100, 200),
            neuron_type=nengo.LIF(tau_ref=t_ref, tau_rc=t_rc),
            # neuron_type=nengo.Direct(),
            label='L4 dend rout',
            radius=np.sqrt(max_mu**2 + max_sigma**2))

        # Dendrites
        # d[0] = scaling factor
        # d[1]= input from previous level connected column
        self.dend = nengo.Ensemble(
            300,
            2,
            max_rates=nengo.dists.Uniform(100, 200),
            neuron_type=nengo.Sigmoid(),  # Dendrites do not spike
            radius=np.sqrt(2),
            label='L4 dendrites')

        # Soma
        self.soma = nengo.Ensemble(
            self.n_dendrites,  # population size
            1,  # dimensionality
            max_rates=nengo.dists.Uniform(100, 200),
            neuron_type=nengo.LIF(tau_ref=t_ref, tau_rc=t_rc),
            label='L4 soma',
        )

        # Output of the routing function can take on a specifiable set of values.
        # When solving for decoders, evaluate the function at points surrounding these values
        # to increase representation accuracy around these points
        pnts_d0 = np.random.choice(valid_sf,
                                   size=1000)  # Valid subsampling values
        pnts_d0 = pnts_d0 * i
        pnts_d0 = pnts_d0 + np.random.choice([-m, 0, m],
                                             size=1000)  # Valid shifts
        pnts_d0 = pnts_d0 + np.random.normal(loc=0, scale=0.1, size=1000)

        pnts_d1 = np.random.choice(valid_sf, size=1000) * c_spacing / 2.35 + \
            np.random.normal(loc=0, scale=0.1, size=1000)

        eval_points = np.vstack([pnts_d0, pnts_d1]).T

        # Scale factors are also between 0, 1 so use another set of eval points to improve soma
        # representation
        # TODO: Figure out why these eval points doesnt improve representation
        # pnts_d0 = np.random.uniform(0, 1, size=1000)
        # pnts_d1 = np.random.uniform(-1, 1, size=1000)
        # eval_points2 = np.vstack([pnts_d0, pnts_d1]).T

        # Connections ------------------------------------------------
        nengo.Connection(sf_node,
                         self.dend_rout[0],
                         transform=i,
                         synapse=t_psc)
        nengo.Connection(s_node, self.dend_rout[0], synapse=t_psc)
        nengo.Connection(sf_node,
                         self.dend_rout[1],
                         transform=(c_spacing / 2.35),
                         synapse=t_psc)

        nengo.Connection(self.dend_rout,
                         self.dend[0],
                         eval_points=eval_points,
                         function=self.routing_function,
                         synapse=t_psc)
        nengo.Connection(x_j_hat_node, self.dend[1], synapse=t_psc)

        nengo.Connection(
            self.dend,
            self.soma,
            synapse=t_psc,  # eval_points=eval_points2,
            function=self.scaled_dend_out)

        # Probes --------------------------------------------------------------
        self.dend_rout_p = nengo.Probe(self.dend_rout, synapse=0.1)
        self.dend_p = nengo.Probe(self.dend, synapse=0.1)
        self.soma_p = nengo.Probe(self.soma, synapse=0.1)
Exemple #18
0
model = nengo.Network()
with model:

    stim = nengo.Node(0)

    a = nengo.Ensemble(n_neurons=50,
                       dimensions=1,
                       neuron_type=nengo.LIF(tau_rc=0.02, tau_ref=0.002))

    b = nengo.Ensemble(n_neurons=50,
                       dimensions=1,
                       neuron_type=nengo.LIFRate(tau_rc=0.02, tau_ref=0.002))

    c = nengo.Ensemble(n_neurons=50,
                       dimensions=1,
                       neuron_type=nengo.Sigmoid(tau_ref=0.002))

    d = nengo.Ensemble(n_neurons=50,
                       dimensions=1,
                       neuron_type=nengo.RectifiedLinear())

    e = nengo.Ensemble(n_neurons=50,
                       dimensions=1,
                       neuron_type=nengo.Izhikevich(tau_recovery=0.02,
                                                    coupling=0.2,
                                                    reset_voltage=-65,
                                                    reset_recovery=8))

    nengo.Connection(stim, a)
    nengo.Connection(stim, b)
    nengo.Connection(stim, c)
Exemple #19
0
    def __init__(self,
                 t_rc,
                 t_ref,
                 t_psc,
                 c_spacing,
                 j,
                 i,
                 m,
                 x_j_hat_node,
                 sf_node,
                 s_node,
                 valid_sf=None,
                 n_dendrites=50):
        """

        :param t_rc:
        :param t_ref:
        :param t_psc: post synaptic time constant
        :param c_spacing: between column spacing
        :param j: position of connected column in previous layer
        :param i: parent column position in c
        :param m: max shift allowed in the column

        :param x_j_hat_node: Nengo node/ensemble of connected column in previous layer
        :param sf_node: Nengo node/ensemble of current layer subsampling factor
        :param s_node: Nengo node/ensemble of relative shift at current level

        Optimization parameters
        :param valid_sf: list of valid subsampling factors. Default = [1, 1.5, 2]
        :param n_dendrites: Number of dendrites for each neuron in L4 soma. Default=50

        :return:
        """
        self.j = j
        self.n_dendrites = n_dendrites

        if valid_sf is None:
            valid_sf = [1, 1.5, 2]

        # Populations --------------------------------------------------------------

        # Dendrites Routing
        # d[0] = mu_ix = sf * ix + sx,
        # d[1] = mu_iy = sf * iy + sy,
        # d[2]= sigma_att =  sf * column_spacing / 2.35
        max_sf = max(valid_sf)
        max_mu_x = max_sf * i[0] + m
        max_mu_y = max_sf * i[1] + m
        max_sigma = max_sf * c_spacing

        self.dend_rout = nengo.Ensemble(
            4000,  # population size
            3,  # dimensionality
            max_rates=nengo.dists.Uniform(100, 200),
            neuron_type=nengo.LIF(tau_ref=t_ref, tau_rc=t_rc),
            # neuron_type=nengo.Direct(),
            label='L4 dend rout',
            radius=np.sqrt(max_mu_x**2 + max_mu_y**2 + max_sigma**2))

        # Dendrites
        # d[0] = scaling factor (internal),
        # d[1]= input from previous level connected column
        self.dend = nengo.networks.EnsembleArray(
            n_neurons=300,
            ens_dimensions=2,
            n_ensembles=self.n_dendrites,
            neuron_type=nengo.Sigmoid(),  # Dendrites do not spike
            radius=np.sqrt(max_sf**2 + 1),
            label='L4 dendrites')

        # Soma
        self.soma = nengo.Ensemble(
            self.n_dendrites,  # population size
            1,  # dimensionality
            max_rates=nengo.dists.Uniform(100, 200),
            neuron_type=nengo.LIF(tau_ref=t_ref, tau_rc=t_rc),
            label='L4 soma',
        )

        encoders = self.soma.encoders.sample(n_dendrites, d=1)
        self.soma.encoders = encoders

        # Connections ------------------------------------------------
        nengo.Connection(sf_node,
                         self.dend_rout[0],
                         transform=i[0],
                         synapse=t_psc)
        nengo.Connection(sf_node,
                         self.dend_rout[1],
                         transform=i[1],
                         synapse=t_psc)
        nengo.Connection(sf_node,
                         self.dend_rout[2],
                         transform=(c_spacing / 2.35),
                         synapse=t_psc)

        nengo.Connection(s_node[0], self.dend_rout[0], synapse=t_psc)
        nengo.Connection(s_node[1], self.dend_rout[1], synapse=t_psc)

        # Output of the routing function can take on a specifiable set of values.
        # When solving for decoders, evaluate the function at points surrounding these values
        # to increase representation accuracy around these points
        sample_sf = np.random.choice(valid_sf, size=2000)
        sample_m = np.random.choice([-m, 0, m], size=2000)

        pnts_d0 = sample_sf[0: 1000] * i[0] + sample_m[0: 1000] + \
            np.random.normal(loc=0, scale=0.1, size=1000)
        pnts_d1 = sample_sf[0: 1000] * i[1] + sample_m[0: 1000] + \
            np.random.normal(loc=0, scale=0.1, size=1000)
        pnts_d2 = np.random.choice(valid_sf, size=1000) * c_spacing / 2.35 + \
            np.random.normal(loc=0, scale=0.1, size=1000)

        eval_points = np.vstack([pnts_d0, pnts_d1, pnts_d2]).T

        for e_idx, ens in enumerate(self.dend.ensembles):

            # previous column out ---> dendrites
            nengo.Connection(x_j_hat_node, ens[1], synapse=t_psc)

            # Dendrites routing ---> dendrites
            nengo.Connection(self.dend_rout,
                             ens[0],
                             eval_points=eval_points,
                             function=self.routing_function,
                             synapse=t_psc)

            # Dendrites ---> soma
            nengo.Connection(
                ens,
                self.soma.neurons[e_idx],
                synapse=t_psc,  # eval_points=eval_points2,
                function=self.scaled_dend_out,
                transform=encoders[e_idx])

        # Probes --------------------------------------------------------------
        self.dend_rout_p = nengo.Probe(self.dend_rout, synapse=0.1)
        self.dend_p = nengo.Probe(self.dend.ensembles[0], synapse=0.1)
        self.soma_p = nengo.Probe(self.soma, synapse=0.1)