예제 #1
0
    def _rate_step(self, J, dt):
        tau_ref = discretize_tau_ref(self.tau_ref, dt)
        tau_rc = discretize_tau_rc(self.tau_rc, dt)
        # Since LoihiLIF takes `ceil(period/dt)` the firing rate is
        # always below the LIF rate. Using `tau_ref1` in LIF curve makes
        # it the average of the LoihiLIF curve (rather than upper bound).
        tau_ref1 = tau_ref + 0.5 * dt

        J -= self.one

        # --- compute Loihi rates (for forward pass)
        period = tau_ref + tau_rc * tf.math.log1p(
            tf.math.reciprocal(tf.maximum(J, self.epsilon))
        )
        period = dt * tf.math.ceil(period / dt)
        loihi_rates = self.spike_noise.generate(period, tau_rc=tau_rc)
        loihi_rates = tf.where(J > self.zero, self.amplitude * loihi_rates, self.zeros)

        # --- compute LIF rates (for backward pass)
        if self.config.lif_smoothing:
            js = J / self.sigma
            j_valid = js > -20
            js_safe = tf.where(j_valid, js, self.zeros)

            # softplus(js) = log(1 + e^js)
            z = tf.nn.softplus(js_safe) * self.sigma

            # as z->0
            #   z = s*log(1 + e^js) = s*e^js
            #   log(1 + 1/z) = log(1/z) = -log(s*e^js) = -js - log(s)
            q = tf.where(
                j_valid,
                tf.math.log1p(tf.math.reciprocal(z)),
                -js - tf.math.log(self.sigma),
            )

            rates = self.amplitude / (tau_ref1 + tau_rc * q)
        else:
            rates = self.amplitude / (
                tau_ref1
                + tau_rc
                * tf.math.log1p(tf.math.reciprocal(tf.maximum(J, self.epsilon)))
            )
            rates = tf.where(J > self.zero, rates, self.zeros)

        # rates + stop_gradient(loihi_rates - rates) =
        #     loihi_rates on forward pass, rates on backwards
        return rates + tf.stop_gradient(loihi_rates - rates)
예제 #2
0
    def _step(self, J, voltage, refractory, dt):
        tau_ref = discretize_tau_ref(self.tau_ref, dt)
        tau_rc = discretize_tau_rc(self.tau_rc, dt)

        delta_t = tf.clip_by_value(dt - refractory, self.zero, dt)
        voltage -= (J - voltage) * tf.expm1(-delta_t / tau_rc)

        spiked = voltage > self.one
        spikes = tf.cast(spiked, J.dtype) * self.alpha

        refractory = tf.where(spiked, tau_ref + self.zeros, refractory - dt)
        voltage = tf.where(spiked, self.zeros,
                           tf.maximum(voltage, self.min_voltage))

        # we use stop_gradient to avoid propagating any nans (those get
        # propagated through the cond even if the spiking version isn't
        # being used at all)
        return (tf.stop_gradient(spikes), tf.stop_gradient(voltage),
                tf.stop_gradient(refractory))
예제 #3
0
def rate_nengo_dl_net(neuron_type,
                      discretize=True,
                      dt=0.001,
                      nx=256,
                      gain=1.,
                      bias=0.):
    """Create a network for determining rate curves with Nengo DL.

    Arguments
    ---------
    neuron_type : NeuronType
        The neuron type used in the network's ensemble.
    discretize : bool, optional (Default: True)
        Whether the tau_ref and tau_rc values should be discretized
        before generating rate curves.
    dt : float, optional (Default: 0.001)
        Simulator timestep.
    nx : int, optional (Default: 256)
        Number of x points in the rate curve.
    gain : float, optional (Default: 1.)
        Gain of all neurons.
    bias : float, optional (Default: 0.)
        Bias of all neurons.
    """
    net = nengo.Network()
    net.dt = dt
    net.bias = bias
    net.gain = gain
    lif_kw = dict(amplitude=neuron_type.amplitude)
    if isinstance(neuron_type, LoihiLIF):
        net.x = np.linspace(-1, 30, nx)

        net.sigma = 0.02
        lif_kw['tau_rc'] = neuron_type.tau_rc
        lif_kw['tau_ref'] = neuron_type.tau_ref

        if discretize:
            lif_kw['tau_ref'] = discretize_tau_ref(lif_kw['tau_ref'], dt)
            lif_kw['tau_rc'] = discretize_tau_rc(lif_kw['tau_rc'], dt)
        lif_kw['tau_ref'] += 0.5 * dt

    elif isinstance(neuron_type, LoihiSpikingRectifiedLinear):
        net.x = np.linspace(-1, 999, nx)

        net.tau_ref1 = 0.5 * dt
        net.j = neuron_type.current(net.x, gain, bias) - 1

    with net:
        if isinstance(neuron_type, LoihiLIF) and discretize:
            nengo_dl.configure_settings(lif_smoothing=net.sigma)

        net.stim = nengo.Node(np.zeros(nx))
        net.ens = nengo.Ensemble(nx,
                                 1,
                                 neuron_type=neuron_type,
                                 gain=nengo.dists.Choice([gain]),
                                 bias=nengo.dists.Choice([bias]))
        nengo.Connection(net.stim, net.ens.neurons, synapse=None)
        net.probe = nengo.Probe(net.ens.neurons)

    rates = dict(ref=loihi_rates(neuron_type, net.x, gain, bias, dt=dt))
    # rates['med'] is an approximation of the smoothed Loihi tuning curve
    if isinstance(neuron_type, LoihiLIF):
        rates['med'] = nengo.LIF(**lif_kw).rates(net.x, gain, bias)
    elif isinstance(neuron_type, LoihiSpikingRectifiedLinear):
        rates['med'] = np.zeros_like(net.j)
        rates['med'][net.j > 0] = (neuron_type.amplitude /
                                   (net.tau_ref1 + 1. / net.j[net.j > 0]))

    return net, rates, lif_kw