def _write_common_data_spec(self, spec, rec_regions): """ Write the data specification for the common regions :param ~data_specification.DataSpecificationGenerator spec: The data specification to write to :param list(int) rec_regions: A list of sizes of each recording region (including empty ones) """ # Write the setup region spec.reserve_memory_region(region=self.__regions.system, size=SIMULATION_N_BYTES, label='System') spec.switch_write_focus(self.__regions.system) spec.write_array(get_simulation_header_array(self.__binary_file_name)) # Reserve memory for provenance self.reserve_provenance_data_region(spec) # Write profile data reserve_profile_region(spec, self.__regions.profile, self._app_vertex.n_profile_samples) write_profile_region_data(spec, self.__regions.profile, self._app_vertex.n_profile_samples) # Set up for recording spec.reserve_memory_region(region=self.__regions.recording, size=get_recording_header_size( len(rec_regions)), label="Recording") spec.switch_write_focus(self.__regions.recording) spec.write_array(get_recording_header_array(rec_regions))
def generate_data_specification(self, spec, placement, machine_time_step, time_scale_factor, graph_mapper, application_graph, machine_graph, routing_info, tags, n_machine_time_steps): # pylint: disable=too-many-arguments, arguments-differ vertex = placement.vertex spec.comment("\n*** Spec for block of {} neurons ***\n".format( self._model_name)) vertex_slice = graph_mapper.get_slice(vertex) # Reserve memory regions self._reserve_memory_regions(spec, vertex_slice, vertex) # Declare random number generators and distributions: # TODO add random distribution stuff # self.write_random_distribution_declarations(spec) # Get the key key = routing_info.get_first_key_from_pre_vertex( vertex, constants.SPIKE_PARTITION_ID) # Write the setup region spec.switch_write_focus( constants.POPULATION_BASED_REGIONS.SYSTEM.value) spec.write_array( simulation_utilities.get_simulation_header_array( self.get_binary_file_name(), machine_time_step, time_scale_factor)) # Write the recording region spec.switch_write_focus( constants.POPULATION_BASED_REGIONS.RECORDING.value) ip_tags = tags.get_ip_tags_for_vertex(vertex) recorded_region_sizes = recording_utilities.get_recorded_region_sizes( self._get_buffered_sdram(vertex_slice, n_machine_time_steps), self._maximum_sdram_for_buffering) spec.write_array( recording_utilities.get_recording_header_array( recorded_region_sizes, self._time_between_requests, self._buffer_size_before_receive, ip_tags)) # Write the neuron parameters self._write_neuron_parameters(spec, key, vertex_slice, machine_time_step, time_scale_factor) # write profile data profile_utils.write_profile_region_data( spec, constants.POPULATION_BASED_REGIONS.PROFILING.value, self._n_profile_samples) # allow the synaptic matrix to write its data spec-able data self._synapse_manager.write_data_spec(spec, self, vertex_slice, vertex, placement, machine_graph, application_graph, routing_info, graph_mapper, self._input_type, machine_time_step) # End the writing of this specification: spec.end_specification()
def generate_data_specification(self, spec, placement, machine_time_step, time_scale_factor, routing_info, data_n_time_steps, graph, first_machine_time_step): """ :param int machine_time_step: :param int time_scale_factor: :param ~pacman.model.routing_info.RoutingInfo routing_info: :param int data_n_time_steps: :param ~pacman.model.graphs.machine.MachineGraph graph: :param int first_machine_time_step: """ # pylint: disable=too-many-arguments, arguments-differ self.__machine_time_step = machine_time_step vertex_slice = placement.vertex.vertex_slice spec.comment("\n*** Spec for SpikeSourcePoisson Instance ***\n\n") # Reserve SDRAM space for memory areas: self.reserve_memory_regions(spec, placement) # write setup data spec.switch_write_focus(_REGIONS.SYSTEM_REGION.value) spec.write_array( simulation_utilities.get_simulation_header_array( placement.vertex.get_binary_file_name(), machine_time_step, time_scale_factor)) # write recording data spec.switch_write_focus(_REGIONS.SPIKE_HISTORY_REGION.value) sdram = self.get_recording_sdram_usage(vertex_slice, machine_time_step) recorded_region_sizes = [sdram.get_total_sdram(data_n_time_steps)] spec.write_array( recording_utilities.get_recording_header_array( recorded_region_sizes)) # write parameters self._write_poisson_parameters(spec, graph, placement, routing_info, vertex_slice, machine_time_step) # write rates self._write_poisson_rates(spec, vertex_slice, machine_time_step, first_machine_time_step) # write profile data profile_utils.write_profile_region_data(spec, _REGIONS.PROFILER_REGION.value, self.__n_profile_samples) # write tdma params spec.switch_write_focus(_REGIONS.TDMA_REGION.value) spec.write_array( self.generate_tdma_data_specification_data( self.vertex_slices.index(vertex_slice))) # End-of-Spec: spec.end_specification()
def generate_data_specification(self, spec, placement, machine_time_step, time_scale_factor, graph_mapper, application_graph, machine_graph, routing_info, data_n_time_steps): # pylint: disable=too-many-arguments, arguments-differ vertex = placement.vertex spec.comment("\n*** Spec for block of {} neurons ***\n".format( self.__neuron_impl.model_name)) vertex_slice = graph_mapper.get_slice(vertex) # Reserve memory regions self._reserve_memory_regions(spec, vertex_slice, vertex) # Declare random number generators and distributions: # TODO add random distribution stuff # self.write_random_distribution_declarations(spec) # Get the key key = routing_info.get_first_key_from_pre_vertex( vertex, constants.SPIKE_PARTITION_ID) # Write the setup region spec.switch_write_focus( constants.POPULATION_BASED_REGIONS.SYSTEM.value) spec.write_array( simulation_utilities.get_simulation_header_array( self.get_binary_file_name(), machine_time_step, time_scale_factor)) # Write the neuron recording region self._neuron_recorder.write_neuron_recording_region( spec, POPULATION_BASED_REGIONS.NEURON_RECORDING.value, vertex_slice, data_n_time_steps) # Write the neuron parameters self._write_neuron_parameters(spec, key, vertex_slice, machine_time_step, time_scale_factor) # write profile data profile_utils.write_profile_region_data( spec, constants.POPULATION_BASED_REGIONS.PROFILING.value, self.__n_profile_samples) # Get the weight_scale value from the appropriate location weight_scale = self.__neuron_impl.get_global_weight_scale() # allow the synaptic matrix to write its data spec-able data self.__synapse_manager.write_data_spec(spec, self, vertex_slice, vertex, placement, machine_graph, application_graph, routing_info, graph_mapper, weight_scale, machine_time_step) # End the writing of this specification: spec.end_specification()
def generate_data_specification( self, spec, placement, machine_time_step, time_scale_factor, graph_mapper, application_graph, machine_graph, routing_info, data_n_time_steps, placements): # pylint: disable=too-many-arguments, arguments-differ vertex = placement.vertex spec.comment("\n*** Spec for block of {} neurons ***\n".format( self._neuron_impl.model_name)) vertex_slice = graph_mapper.get_slice(vertex) # Reserve memory regions self._reserve_memory_regions(spec, vertex_slice, vertex) # Declare random number generators and distributions: # TODO add random distribution stuff # self.write_random_distribution_declarations(spec) # Get the key key = routing_info.get_first_key_from_pre_vertex( vertex, constants.SPIKE_PARTITION_ID) # Write the setup region spec.switch_write_focus( constants.POPULATION_BASED_REGIONS.SYSTEM.value) spec.write_array(simulation_utilities.get_simulation_header_array( self.get_binary_file_name(), machine_time_step, time_scale_factor)) # Write the recording region spec.switch_write_focus( constants.POPULATION_BASED_REGIONS.RECORDING.value) spec.write_array(recording_utilities.get_recording_header_array( self._get_buffered_sdram(vertex_slice, data_n_time_steps))) # Write the neuron parameters self._write_neuron_parameters( spec, key, vertex_slice, machine_time_step, time_scale_factor) # write profile data profile_utils.write_profile_region_data( spec, constants.POPULATION_BASED_REGIONS.PROFILING.value, self._n_profile_samples) # Get the weight_scale value from the appropriate location weight_scale = self._neuron_impl.get_global_weight_scale() # allow the synaptic matrix to write its data spec-able data self._synapse_manager.write_data_spec( spec, self, vertex_slice, vertex, placement, machine_graph, application_graph, routing_info, graph_mapper, weight_scale, machine_time_step, placements) # End the writing of this specification: spec.end_specification()
def generate_data_specification(self, spec, placement, machine_time_step, time_scale_factor, graph_mapper, routing_info, data_n_time_steps, graph): # pylint: disable=too-many-arguments, arguments-differ self.__machine_time_step = machine_time_step vertex = placement.vertex vertex_slice = graph_mapper.get_slice(vertex) spec.comment("\n*** Spec for SpikeSourcePoisson Instance ***\n\n") # Reserve SDRAM space for memory areas: self.reserve_memory_regions(spec, placement, graph_mapper) # write setup data spec.switch_write_focus(_REGIONS.SYSTEM_REGION.value) spec.write_array( simulation_utilities.get_simulation_header_array( self.get_binary_file_name(), machine_time_step, time_scale_factor)) # write recording data spec.switch_write_focus(_REGIONS.SPIKE_HISTORY_REGION.value) sdram = self.get_recording_sdram_usage(vertex_slice, machine_time_step) recorded_region_sizes = [sdram.get_total_sdram(data_n_time_steps)] spec.write_array( recording_utilities.get_recording_header_array( recorded_region_sizes)) # write parameters self._write_poisson_parameters(spec, graph, placement, routing_info, vertex_slice, machine_time_step, time_scale_factor) # write profile data profile_utils.write_profile_region_data(spec, _REGIONS.PROFILER_REGION.value, self.__n_profile_samples) # End-of-Spec: spec.end_specification()
def _write_profile_dsg(self, spec): if self._profile: profile_utils.write_profile_region_data(spec, self._profile_region, self._n_profile_samples)
def generate_data_specification(self, spec, placement, machine_time_step, time_scale_factor, application_graph, machine_graph, routing_info, data_n_time_steps, n_key_map): """ :param machine_time_step: (injected) :param time_scale_factor: (injected) :param application_graph: (injected) :param machine_graph: (injected) :param routing_info: (injected) :param data_n_time_steps: (injected) :param n_key_map: (injected) """ # pylint: disable=too-many-arguments, arguments-differ vertex = placement.vertex spec.comment("\n*** Spec for block of {} neurons ***\n".format( self.__neuron_impl.model_name)) vertex_slice = vertex.vertex_slice # Reserve memory regions self._reserve_memory_regions(spec, vertex_slice, vertex, machine_graph, n_key_map) # Declare random number generators and distributions: # TODO add random distribution stuff # self.write_random_distribution_declarations(spec) # Get the key key = routing_info.get_first_key_from_pre_vertex( vertex, constants.SPIKE_PARTITION_ID) # Write the setup region spec.switch_write_focus(POPULATION_BASED_REGIONS.SYSTEM.value) spec.write_array( simulation_utilities.get_simulation_header_array( self.get_binary_file_name(), machine_time_step, time_scale_factor)) # Write the neuron recording region self._neuron_recorder.write_neuron_recording_region( spec, POPULATION_BASED_REGIONS.NEURON_RECORDING.value, vertex_slice, data_n_time_steps) # Write the neuron parameters self._write_neuron_parameters(spec, key, vertex_slice, machine_time_step, time_scale_factor) # write profile data profile_utils.write_profile_region_data( spec, POPULATION_BASED_REGIONS.PROFILING.value, self.__n_profile_samples) # Get the weight_scale value from the appropriate location weight_scale = self.__neuron_impl.get_global_weight_scale() # allow the synaptic matrix to write its data spec-able data self.__synapse_manager.write_data_spec(spec, self, vertex_slice, vertex, placement, machine_graph, application_graph, routing_info, weight_scale, machine_time_step) vertex.set_on_chip_generatable_area( self.__synapse_manager.host_written_matrix_size, self.__synapse_manager.on_chip_written_matrix_size) # write up the bitfield builder data bit_field_utilities.write_bitfield_init_data( spec, vertex, machine_graph, routing_info, n_key_map, POPULATION_BASED_REGIONS.BIT_FIELD_BUILDER.value, POPULATION_BASED_REGIONS.POPULATION_TABLE.value, POPULATION_BASED_REGIONS.SYNAPTIC_MATRIX.value, POPULATION_BASED_REGIONS.DIRECT_MATRIX.value, POPULATION_BASED_REGIONS.BIT_FIELD_FILTER.value, POPULATION_BASED_REGIONS.BIT_FIELD_KEY_MAP.value, POPULATION_BASED_REGIONS.STRUCTURAL_DYNAMICS.value, isinstance(self.__synapse_manager.synapse_dynamics, AbstractSynapseDynamicsStructural)) # End the writing of this specification: spec.end_specification()
def generate_data_specification(self, spec, placement, routing_info, data_n_time_steps, graph, first_machine_time_step): """ :param ~pacman.model.routing_info.RoutingInfo routing_info: :param int data_n_time_steps: :param ~pacman.model.graphs.machine.MachineGraph graph: :param int first_machine_time_step: """ # pylint: disable=too-many-arguments, arguments-differ spec.comment("\n*** Spec for SpikeSourcePoisson Instance ***\n\n") # Reserve SDRAM space for memory areas: self.reserve_memory_regions(spec, placement) # write setup data spec.switch_write_focus( self.POISSON_SPIKE_SOURCE_REGIONS.SYSTEM_REGION.value) spec.write_array( simulation_utilities.get_simulation_header_array( placement.vertex.get_binary_file_name())) # write recording data spec.switch_write_focus( self.POISSON_SPIKE_SOURCE_REGIONS.SPIKE_HISTORY_REGION.value) sdram = self._app_vertex.get_recording_sdram_usage(self.vertex_slice) recorded_region_sizes = [sdram.get_total_sdram(data_n_time_steps)] spec.write_array( recording_utilities.get_recording_header_array( recorded_region_sizes)) # write parameters self._write_poisson_parameters(spec, graph, placement, routing_info) # write rates self._write_poisson_rates(spec, first_machine_time_step) # write profile data profile_utils.write_profile_region_data( spec, self.POISSON_SPIKE_SOURCE_REGIONS.PROFILER_REGION.value, self._app_vertex.n_profile_samples) # write tdma params spec.switch_write_focus( self.POISSON_SPIKE_SOURCE_REGIONS.TDMA_REGION.value) spec.write_array( self._app_vertex.generate_tdma_data_specification_data( self.__slice_index)) # write SDRAM edge parameters spec.switch_write_focus( self.POISSON_SPIKE_SOURCE_REGIONS.SDRAM_EDGE_PARAMS.value) if self.__sdram_partition is None: spec.write_array([0, 0, 0]) else: size = self.__sdram_partition.get_sdram_size_of_region_for(self) proj = self._app_vertex.outgoing_projections[0] synapse_info = proj._synapse_information spec.write_value( self.__sdram_partition.get_sdram_base_address_for(self)) spec.write_value(size) # Work out the offset into the data to write from, based on the # synapse type in use synapse_type = synapse_info.synapse_type offset = synapse_type * self.vertex_slice.n_atoms spec.write_value(offset) # If we are here, the connector must be one-to-one so create # the synapses and then store the scaled weights connections = synapse_info.connector.create_synaptic_block( None, None, self.vertex_slice, self.vertex_slice, synapse_type, synapse_info) weight_scales = (next(iter( self.__sdram_partition.edges)).post_vertex.weight_scales) weights = connections["weight"] * weight_scales[synapse_type] weights = numpy.rint(numpy.abs(weights)).astype("uint16") if len(weights) % 2 != 0: padding = numpy.array([0], dtype="uint16") weights = numpy.concatenate((weights, padding)) spec.write_array(weights.view("uint32")) # End-of-Spec: spec.end_specification()