Exemplo n.º 1
0
    def weights(self, new_w):
        assert np.size(new_w) == self.size**2, ("`new_w` must have [" +
                                                str(self.size**2) +
                                                "] elements.")

        self._weights = new_w

        if hasattr(self, "_rec_synapses"):
            # - Assign recurrent weights
            new_w = np.asarray(new_w).reshape(self.size, -1)
            self._rec_synapses.weight = new_w.flatten()
Exemplo n.º 2
0
def get_new_assignments(result_monitor, input_numbers):
    assignments = np.zeros(n_e)
    input_nums = np.asarray(input_numbers)
    maximum_rate = [0] * n_e
    for j in xrange(10):
        num_assignments = len(np.where(input_nums == j)[0])
        if num_assignments > 0:
            rate = np.sum(result_monitor[input_nums == j],
                          axis=0) / num_assignments
        for i in xrange(n_e):
            if rate[i] > maximum_rate[i]:
                maximum_rate[i] = rate[i]
                assignments[i] = j
    return assignments
Exemplo n.º 3
0
    def __init__(
        self,
        weights: np.ndarray,
        weights_in: np.ndarray,
        dt: float = 0.1 * ms,
        noise_std: float = 0 * mV,
        refractory=0 * ms,
        neuron_params=None,
        syn_params=None,
        integrator_name: str = "rk4",
        name: str = "unnamed",
    ):
        """
        Construct a spiking recurrent layer with IAF neurons, with a Brian2 back-end

        :param weights:             np.array NxN weight matrix
        :param weights_in:             np.array 1xN input weight matrix.

        :param refractory: float Refractory period after each spike. Default: 0ms

        :param neuron_params:    dict Parameters to over overwriting neuron defaulst

        :param syn_params:    dict Parameters to over overwriting synapse defaulst

        :param integrator_name:   str Integrator to use for simulation. Default: 'exact'

        :param name:         str Name for the layer. Default: 'unnamed'
        """
        warn("RecDynapseBrian: This layer is deprecated.")

        # - Call super constructor
        super().__init__(
            weights=weights,
            dt=np.asarray(dt),
            noise_std=np.asarray(noise_std),
            name=name,
        )

        # - Input weights must be provided
        assert weights_in is not None, "weights_in must be provided."

        # - Warn that nosie is not implemented
        if noise_std != 0:
            print("WARNING: Noise is currently not implemented in this layer.")

        # - Set up spike source to receive spiking input
        self._input_generator = b2.SpikeGeneratorGroup(
            self.size, [0], [0 * second], dt=np.asarray(dt) * second)

        # - Handle unit of dt: if no unit provided, assume it is in seconds
        dt = np.asscalar(np.array(dt)) * second

        ### --- Neurons

        # - Set up reservoir neurons
        self._neuron_group = teiliNG(
            N=self.size,
            equation_builder=teiliDPIEqts(num_inputs=2),
            name="reservoir_neurons",
            refractory=refractory,
            method=integrator_name,
            dt=dt,
        )

        # - Overwrite default neuron parameters
        if neuron_params is not None:
            self._neuron_group.set_params(
                dict(dTeiliNeuronParam, **neuron_params))
        else:
            self._neuron_group.set_params(dTeiliNeuronParam)

        ### --- Synapses

        # - Add recurrent synapses (all-to-all)
        self._rec_synapses = teiliSyn(
            self._neuron_group,
            self._neuron_group,
            equation_builder=teiliDPISynEqts,
            method=integrator_name,
            dt=dt,
            name="reservoir_recurrent_synapses",
        )
        self._rec_synapses.connect()

        # - Add source -> reservoir synapses (one-to-one)
        self._inp_synapses = teiliSyn(
            self._input_generator,
            self._neuron_group,
            equation_builder=teiliDPISynEqts,
            method=integrator_name,
            dt=np.asarray(dt) * second,
            name="receiver_synapses",
        )
        # Each spike generator neuron corresponds to one reservoir neuron
        self._inp_synapses.connect("i==j")

        # - Overwrite default synapse parameters
        if syn_params is not None:
            self._rec_synapses.set_params(neuron_params)
            self._inp_synapses.set_params(neuron_params)

        # - Add spike monitor to record layer outputs
        self._spike_monitor = b2.SpikeMonitor(self._neuron_group,
                                              record=True,
                                              name="layer_spikes")

        # - Call Network constructor
        self._net = b2.Network(
            self._neuron_group,
            self._rec_synapses,
            self._input_generator,
            self._inp_synapses,
            self._spike_monitor,
            name="recurrent_spiking_layer",
        )

        # - Record neuron / synapse parameters
        # automatically sets weights  via setters
        self.weights = weights
        self.weights_in = weights_in

        # - Store "reset" state
        self._net.store("reset")
Exemplo n.º 4
0
 def state(self, new_state):
     self._neuron_group.i_mem = (
         np.asarray(self._expand_to_net_size(new_state, "new_state")) *
         volt)
Exemplo n.º 5
0
    def __init__(
        self,
        weights: Union[np.ndarray, int] = None,
        dt: float = 0.1 * ms,
        noise_std: float = 0 * mV,
        tau_syn: float = 5 * ms,
        synapse_eq=eqSynapseExp,
        integrator_name: str = "rk4",
        name: str = "unnamed",
    ):
        """
        Construct an exponential synapse layer (spiking input), with a Brian2 backend

        :param weights:             np.array MxN weight matrix
                                int Size of layer -> creates one-to-one conversion layer
        :param dt:             float Time step for state evolution. Default: 0.1 ms
        :param noise_std:       float Std. dev. of noise added to this layer. Default: 0

        :param tau_syn:         float Output synaptic time constants. Default: 5ms
        :param synapse_eq:      Brian2.Equations set of synapse equations for receiver. Default: exponential
        :param integrator_name:   str Integrator to use for simulation. Default: 'exact'

        :param name:         str Name for the layer. Default: 'unnamed'
        """
        warn(
            "FFExpSynBrian - This layer is deprecated. You can use FFExpSyn or FFExpSynTorch instead."
        )

        # - Provide default dt
        if dt is None:
            dt = 0.1 * ms

        # - Provide default weight matrix for one-to-one conversion
        if isinstance(weights, int):
            weights = np.identity(weights, "float")

        # - Call super constructor
        super().__init__(weights=weights, dt=dt, noise_std=noise_std, name=name)

        # - Set up spike source to receive spiking input
        self._input_generator = b2.SpikeGeneratorGroup(
            self.size_in, [0], [0 * second], dt=np.asarray(dt) * second
        )

        # - Set up layer receiver nodes
        self._neuron_group = b2.NeuronGroup(
            self.size,
            synapse_eq,
            refractory=False,
            method=integrator_name,
            dt=np.asarray(dt) * second,
            name="receiver_neurons",
        )

        # - Add source -> receiver synapses
        self._inp_synapses = b2.Synapses(
            self._input_generator,
            self._neuron_group,
            model="w : 1",
            on_pre="I_syn_post += w*amp",
            method=integrator_name,
            dt=np.asarray(dt) * second,
            name="receiver_synapses",
        )
        self._inp_synapses.connect()

        # - Add current monitors to record reservoir outputs
        self._state_monitor = b2.StateMonitor(
            self._neuron_group, "I_syn", True, name="receiver_synaptic_currents"
        )

        # - Call Network constructor
        self._net = b2.Network(
            self._input_generator,
            self._neuron_group,
            self._inp_synapses,
            self._state_monitor,
            name="ff_spiking_to_exp_layer",
        )

        # - Record layer parameters, set weights
        self.weights = weights
        self.tau_syn = tau_syn

        # - Store "reset" state
        self._net.store("reset")
Exemplo n.º 6
0
 def tau_syn(self, new_tau_syn):
     self._neuron_group.tau_s = np.asarray(new_tau_syn) * second
Exemplo n.º 7
0
 def state(self, new_state):
     self._neuron_group.I_syn = (
         np.asarray(self._expand_to_net_size(new_state, "new_state")) * amp
     )
Exemplo n.º 8
0
    def evolve(
        self,
        ts_input: Optional[TSEvent] = None,
        duration: Optional[float] = None,
        num_timesteps: Optional[int] = None,
        verbose: bool = False,
    ) -> TSContinuous:
        """
        Function to evolve the states of this layer given an input

        :param Optional[TSEvent] ts_input:      TSEvent  Input spike trian
        :param Optional[float] duration:           Simulation/Evolution time
        :param Optional[int] num_timesteps:       Number of evolution time steps
        :param bool verbose:            Currently no effect, just for conformity
        :return TSContinuous:              output spike series

        """

        # - Prepare time base
        time_base, __, num_timesteps = self._prepare_input(
            ts_input, duration, num_timesteps
        )

        # - Set spikes for spike generator
        if ts_input is not None:
            event_times, event_channels, _ = ts_input(
                t_start=time_base[0], t_stop=time_base[-1] + self.dt
            )
            self._input_generator.set_spikes(
                event_channels, event_times * second, sorted=False
            )
        else:
            self._input_generator.set_spikes([], [] * second)

        # - Generate a noise trace
        noise_step = (
            np.random.randn(np.size(time_base), self.size)
            * self.noise_std
            * np.sqrt(2 * self.tau_syn / self.dt)
        )
        # noise_step = np.zeros((np.size(time_base), self.size))
        # noise_step[0,:] = self.noise_std

        # - Specifiy noise input currents, construct TimedArray
        inp_noise = TAShift(
            np.asarray(noise_step) * amp,
            self.dt * second,
            tOffset=self.t * second,
            name="noise_input",
        )

        # - Perform simulation
        self._net.run(
            num_timesteps * self.dt * second, namespace={"I_inp": inp_noise}, level=0
        )
        self._timestep += num_timesteps

        # - Build response TimeSeries
        time_base_out = self._state_monitor.t_
        use_time = self._state_monitor.t_ >= time_base[0]
        time_base_out = time_base_out[use_time]
        a = self._state_monitor.I_syn_.T
        a = a[use_time, :]

        # - Return the current state as final time point
        if time_base_out[-1] != self.t:
            time_base_out = np.concatenate((time_base_out, [self.t]))
            a = np.concatenate((a, np.reshape(self.state, (1, self.size))))

        return TSContinuous(time_base_out, a, name="Receiver current")