def __init__(self, data_types, global_data_types=None): """ :param data_types:\ A list of data types in the neuron structure, in the order that\ they appear :param global_data_types:\ A list of data types in the neuron global structure, in the order\ that they appear """ super(AbstractNeuronModel, self).__init__(data_types) if global_data_types is None: global_data_types = [] self.__global_struct = Struct(global_data_types)
PARAMS_BASE_WORDS = 14 # start_scaled, end_scaled, is_fast_source, exp_minus_lambda, isi_val, # time_to_spike PARAMS_WORDS_PER_NEURON = 6 START_OF_POISSON_GENERATOR_PARAMETERS = PARAMS_BASE_WORDS * 4 MICROSECONDS_PER_SECOND = 1000000.0 MICROSECONDS_PER_MILLISECOND = 1000.0 SLOW_RATE_PER_TICK_CUTOFF = 1.0 _REGIONS = SpikeSourcePoissonMachineVertex.POISSON_SPIKE_SOURCE_REGIONS _PoissonStruct = Struct([ DataType.UINT32, # Start Scaled DataType.UINT32, # End Scaled DataType.UINT32, # is_fast_source DataType.U032, # exp^(-rate) DataType.S1615, # inter-spike-interval DataType.S1615 ]) # timesteps to next spike class SpikeSourcePoissonVertex( ApplicationVertex, AbstractGeneratesDataSpecification, AbstractHasAssociatedBinary, AbstractSpikeRecordable, AbstractProvidesOutgoingPartitionConstraints, AbstractChangableAfterRun, AbstractReadParametersBeforeSet, AbstractRewritesDataSpecification, SimplePopulationSettable, ProvidesKeyToAtomMappingImpl): """ A Poisson Spike source object """
class AbstractNeuronModel(AbstractStandardNeuronComponent): """ Represents a neuron model. """ __slots__ = ["__global_struct"] def __init__(self, data_types, global_data_types=None): """ :param data_types:\ A list of data types in the neuron structure, in the order that\ they appear :param global_data_types:\ A list of data types in the neuron global structure, in the order\ that they appear """ super(AbstractNeuronModel, self).__init__(data_types) if global_data_types is None: global_data_types = [] self.__global_struct = Struct(global_data_types) @property def global_struct(self): """ Get the global parameters structure """ return self.__global_struct @overrides(AbstractStandardNeuronComponent.get_dtcm_usage_in_bytes) def get_dtcm_usage_in_bytes(self, n_neurons): usage = super(AbstractNeuronModel, self).get_dtcm_usage_in_bytes(n_neurons) return usage + (self.__global_struct.get_size_in_whole_words() * 4) @overrides(AbstractStandardNeuronComponent.get_sdram_usage_in_bytes) def get_sdram_usage_in_bytes(self, n_neurons): usage = super(AbstractNeuronModel, self).get_sdram_usage_in_bytes(n_neurons) return usage + (self.__global_struct.get_size_in_whole_words() * 4) def get_global_values(self): """ Get the global values to be written to the machine for this model :return: A list with the same length as self.global_struct.field_types :rtype: A list of single values """ return numpy.zeros(0, dtype="uint32") @overrides(AbstractStandardNeuronComponent.get_data) def get_data(self, parameters, state_variables, vertex_slice): super_data = super(AbstractNeuronModel, self).get_data(parameters, state_variables, vertex_slice) values = self.get_global_values() global_data = self.__global_struct.get_data(values) return numpy.concatenate([global_data, super_data]) @overrides(AbstractStandardNeuronComponent.read_data) def read_data(self, data, offset, vertex_slice, parameters, state_variables): # Assume that the global data doesn't change offset += (self.__global_struct.get_size_in_whole_words() * 4) return super(AbstractNeuronModel, self).read_data(data, offset, vertex_slice, parameters, state_variables)
MICROSECONDS_PER_SECOND = 1000000.0 MICROSECONDS_PER_MILLISECOND = 1000.0 SLOW_RATE_PER_TICK_CUTOFF = 0.01 # as suggested by MH (between Exp and Knuth) FAST_RATE_PER_TICK_CUTOFF = 10 # between Knuth algorithm and Gaussian approx. _REGIONS = SpikeSourcePoissonMachineVertex.POISSON_SPIKE_SOURCE_REGIONS OVERFLOW_TIMESTEPS_FOR_SDRAM = 5 # The microseconds per timestep will be divided by this to get the max offset _MAX_OFFSET_DENOMINATOR = 10 _PoissonStruct = Struct([ DataType.UINT32, # Start Scaled DataType.UINT32, # End Scaled DataType.UINT32, # Next Scaled DataType.UINT32, # is_fast_source DataType.U032, # exp^(-spikes_per_tick) DataType.S1615, # sqrt(spikes_per_tick) DataType.UINT32, # inter-spike-interval DataType.UINT32]) # timesteps to next spike def _flatten(alist): for item in alist: if hasattr(item, "__iter__"): for subitem in _flatten(item): yield subitem else: yield item