def __init__(self, dt=0.001, label=None, decoder_cache=NoDecoderCache()): self.dt = dt self.label = label self.decoder_cache = decoder_cache # Will be filled in by the network builder self.toplevel = None self.config = None # Resources used by the build process self.operators = [] self.params = {} self.probes = [] self.seeds = {} self.seeded = {} self.sig = collections.defaultdict(dict) self.sig['common'][0] = Signal(0., readonly=True, name='ZERO') self.sig['common'][1] = Signal(1., readonly=True, name='ONE') self.step = Signal(np.array(0, dtype=np.int64), name='step') self.time = Signal(np.array(0, dtype=np.float64), name='time') self.add_op(TimeUpdate(self.step, self.time)) self.build_callback = None
def __init__(self, dt=0.001, machine_timestep=1000, decoder_cache=NoDecoderCache(), keyspaces=None): self.dt = dt self.machine_timestep = machine_timestep self.decoder_cache = decoder_cache self.params = dict() self.seeds = dict() self.rngs = dict() self.rng = None self.config = None self.object_operators = dict() self.extra_operators = list() self.connection_map = model.ConnectionMap() if keyspaces is None: keyspaces = KeyspaceContainer() self.keyspaces = keyspaces # Builder dictionaries self._builders = dict() self._transmission_parameter_builders = dict() self._source_getters = dict() self._reception_parameter_builders = dict() self._sink_getters = dict() self._probe_builders = dict()
def __init__(self, dt=0.001, label=None, decoder_cache=None, builder=None): self.dt = dt self.label = label self.decoder_cache = (NoDecoderCache() if decoder_cache is None else decoder_cache) # Will be filled in by the network builder self.toplevel = None self.config = None # Resources used by the build process self.operators = [] self.params = {} self.probes = [] self.seeds = {} self.seeded = {} self.sig = collections.defaultdict(dict) self.sig["common"][0] = Signal(np.array(0.0, dtype=rc.float_dtype), readonly=True, name="ZERO") self.sig["common"][1] = Signal(np.array(1.0, dtype=rc.float_dtype), readonly=True, name="ONE") self.step = Signal(np.array(0, dtype=rc.int_dtype), name="step") self.time = Signal(np.array(0, dtype=rc.float_dtype), name="time") self.add_op(TimeUpdate(self.step, self.time)) self.builder = Builder() if builder is None else builder self.build_callback = None
def run_test(nengo_network, nodes_as_function_of_time, nodes_as_function_of_time_time_period): # build via gfe nengo_spinnaker_gfe spinnaker seed = 11111 timer_period = 10 app_graph_builder = NengoApplicationGraphBuilder() (app_graph, host_network, nengo_to_app_graph_map, random_number_generator) = app_graph_builder( nengo_network=nengo_network, machine_time_step=1.0, nengo_random_number_generator_seed=seed, decoder_cache=NoDecoderCache(), utilise_extra_core_for_probes=True, nengo_nodes_as_function_of_time=nodes_as_function_of_time, function_of_time_nodes_time_period=( nodes_as_function_of_time_time_period)) interposer_installer = NengoUtiliseInterposers() app_graph = interposer_installer( app_graph, nengo_to_app_graph_map, random_number_generator, seed) machine_graph, graph_mapper = NengoPartitioner(app_graph) # build via nengo_spinnaker_gfe - spinnaker nengo_spinnaker.add_spinnaker_params(nengo_network.config) for nengo_node in nodes_as_function_of_time: nengo_network.config[nengo_node].function_of_time = True for nengo_node in nodes_as_function_of_time_time_period: nengo_network.config[nengo_node].function_of_time_period = \ nodes_as_function_of_time_time_period[nengo_node] io_controller = Ethernet() builder_kwargs = io_controller.builder_kwargs nengo_spinnaker_network_builder = Model() nengo_spinnaker_network_builder.build(nengo_network, **builder_kwargs) nengo_spinnaker_network_builder.add_interposers() nengo_spinnaker_network_builder.make_netlist(timer_period) nengo_operators = dict() nengo_operators.update( nengo_spinnaker_network_builder.object_operators) nengo_operators.update(io_controller._sdp_receivers) nengo_operators.update(io_controller._sdp_transmitters) match = compare_against_the_nengo_spinnaker_and_gfe_impls( nengo_operators, nengo_to_app_graph_map, nengo_spinnaker_network_builder.connection_map, app_graph, nengo_spinnaker_network_builder) if not match: raise Exception("didnt match")
def __init__(self, dt=0.001, label=None, builder=None): self.dt = dt self.label = label self.builder = Builder() if builder is None else builder self.build_callback = None self.decoder_cache = NoDecoderCache() # Objects created by the model for simulation on Loihi self.inputs = OrderedDict() self.blocks = OrderedDict() # Will be filled in by the network builder self.toplevel = None self.config = None # Resources used by the build process self.objs = defaultdict(dict) self.params = {} # Holds data generated when building objects self.probes = [] self.probe_conns = {} self.seeds = {} self.seeded = {} # --- other (typically standard) parameters # Filter on decode neurons self.decode_tau = 0.005 # ^TODO: how to choose this filter? Even though the input is spikes, # it may not be absolutely necessary since tau_rc provides a filter, # and maybe we don't want double filtering if connection has a filter self.decode_neurons = Preset10DecodeNeurons(dt=dt) self.node_neurons = OnOffDecodeNeurons(dt=dt) # voltage threshold for non-spiking neurons (i.e. voltage decoders) self.vth_nonspiking = 10 # limit for clipping intercepts, to avoid neurons with high gains self.intercept_limit = 0.95 # scaling for PES errors, before rounding and clipping to -127..127 self.pes_error_scale = 100. # learning weight exponent for PES (controls the maximum weight # magnitude/weight resolution) self.pes_wgt_exp = 4 # Will be provided by Simulator self.chip2host_params = {}
def __init__(self, dt=0.001, label=None, decoder_cache=NoDecoderCache()): self.dt = dt self.label = label self.decoder_cache = decoder_cache # We want to keep track of the toplevel network self.toplevel = None # Builders can set a config object to affect sub-builders self.config = None # Resources used by the build process. self.operators = [] self.params = {} self.seeds = {} self.probes = [] self.sig = collections.defaultdict(dict)
def __init__(self, dt=0.001, label=None, decoder_cache=NoDecoderCache()): self.dt = dt self.label = label self.decoder_cache = decoder_cache # We want to keep track of the toplevel network self.toplevel = None # Builders can set a config object to affect sub-builders self.config = None # Resources used by the build process. self.operators = [] self.params = {} self.seeds = {} self.probes = [] self.sig = collections.defaultdict(dict) self.sig['common'][0] = Signal(0.0, name='ZERO', readonly=True) self.sig['common'][1] = Signal(1.0, name='ONE', readonly=True)
def ncc(model, sample, zscore, seed, upsample=False): if upsample: fs_scale = 50000. / TIMIT.fs resample_len = int(sample.shape[0] * fs_scale) sample = lengthen(sample, resample_len) model.fs = 50000 model.audio = sample net = model.build(nengo.Network(seed=seed)) with net: pr = nengo.Probe(net.output, synapse=0.01) # Disable decoder cache for this model _model = nengo.builder.Model(dt=0.001, decoder_cache=NoDecoderCache()) sim = nengo.Simulator(net, model=_model) sim.run(model.t_audio, progress_bar=False) model.audio = np.zeros(1) feat = sim.data[pr] if zscore: feat = stats.zscore(feat, axis=0) feat[np.isnan(feat)] = 0. # If variance is 0, can get nans. return feat
def __init__(self, dt=0.001, label=None, decoder_cache=NoDecoderCache()): self.dt = dt self.label = label self.decoder_cache = decoder_cache # We want to keep track of the toplevel network self.toplevel = None # Builders can set a config object to affect sub-builders self.config = None # Resources used by the build process. self.operators = [] self.params = {} self.seeds = {} self.probes = [] self.sig = collections.defaultdict(dict) self.sig['common'][0] = Signal(0., readonly=True, name='ZERO') self.sig['common'][1] = Signal(1., readonly=True, name='ONE') self.step = Signal(np.array(0, dtype=np.int64), name='step') self.time = Signal(np.array(0, dtype=np.float64), name='time') self.add_op(TimeUpdate(self.step, self.time))
def test_cache_performance(tmpdir, Simulator, seed): cache_dir = str(tmpdir) model = nengo.Network(seed=seed) with model: nengo.Connection(nengo.Ensemble(2000, 10), nengo.Ensemble(2000, 10)) with Timer() as t_no_cache: Simulator(model, model=nengo.builder.Model(dt=0.001, decoder_cache=NoDecoderCache())) with Timer() as t_cache_miss: Simulator(model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir))) with Timer() as t_cache_hit: Simulator(model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir))) assert calc_relative_timer_diff(t_no_cache, t_cache_miss) < 0.1 assert calc_relative_timer_diff(t_cache_hit, t_no_cache) > 0.4
def __init__(self, dt=0.001, label=None, builder=None): self.dt = dt self.label = label self.builder = Builder() if builder is None else builder self.build_callback = None self.decoder_cache = NoDecoderCache() # TODO: these models may not look/behave exactly the same as # standard nengo models, because they don't have a toplevel network # built into them or configs set self.host_pre = NengoModel( dt=float(dt), label="%s:host_pre, dt=%f" % (label, dt), decoder_cache=NoDecoderCache(), ) self.host = NengoModel( dt=float(dt), label="%s:host, dt=%f" % (label, dt), decoder_cache=NoDecoderCache(), ) # Objects created by the model for simulation on Loihi self.inputs = OrderedDict() self.blocks = OrderedDict() self.block_shapes = {} self.probes = [] # Will be filled in by the simulator __init__ self.split = None # Will be filled in by the network builder self.toplevel = None self.config = None # Resources used by the build process self.objs = defaultdict(dict) # maps Nengo objects to Loihi objects self.params = {} # maps Nengo objects to data generated during build self.nengo_probes = [] # list of Nengo probes in the model self.nengo_probe_conns = {} self.seeds = {} self.seeded = {} # --- other (typically standard) parameters # Filter on decode neurons self.decode_tau = 0.005 # ^TODO: how to choose this filter? Even though the input is spikes, # it may not be absolutely necessary since tau_rc provides a filter, # and maybe we don't want double filtering if connection has a filter self.decode_neurons = Preset10DecodeNeurons(dt=dt) self.node_neurons = OnOffDecodeNeurons(dt=dt, is_input=True) # voltage threshold for non-spiking neurons (i.e. voltage decoders) self.vth_nonspiking = 10 # limit for clipping intercepts, to avoid neurons with high gains self.intercept_limit = 0.95 # scaling for PES errors, before rounding and clipping to -127..127 self.pes_error_scale = 100.0 # learning weight exponent for PES (controls the maximum weight # magnitude/weight resolution) self.pes_wgt_exp = 4 # Used to track interactions between host models self.chip2host_params = {} self.chip2host_receivers = OrderedDict() self.host2chip_senders = OrderedDict() self.host2chip_pes_senders = OrderedDict() self.needs_sender = {}
def run_test(nengo_network, nodes_as_function_of_time, nodes_as_function_of_time_time_period): seed = 11111 app_graph_builder = NengoApplicationGraphBuilder() (app_graph, host_network, nengo_to_app_graph_map, random_number_generator) = app_graph_builder( nengo_network=nengo_network, machine_time_step=1.0, nengo_random_number_generator_seed=1234, decoder_cache=NoDecoderCache(), utilise_extra_core_for_probes=True, nengo_nodes_as_function_of_time=nodes_as_function_of_time, function_of_time_nodes_time_period=( nodes_as_function_of_time_time_period)) interposer_installer = NengoUtiliseInterposers() app_graph = interposer_installer(app_graph, random_number_generator, seed) virtual_machine_generator = VirtualMachineGenerator() machine = virtual_machine_generator(width=16, height=16, virtual_has_wrap_arounds=False, version=5, n_cpus_per_chip=18, with_monitors=True, down_chips=None, down_cores=None, down_links=None, max_sdram_size=None) partitioner = NengoPartitioner() machine_graph, graph_mapper = partitioner(app_graph, machine, random_number_generator, pre_allocated_resources=None) # build via nengo_spinnaker_gfe - spinnaker nengo_spinnaker.add_spinnaker_params(nengo_network.config) for nengo_node in nodes_as_function_of_time: nengo_network.config[nengo_node].function_of_time = True for nengo_node in nodes_as_function_of_time_time_period: nengo_network.config[nengo_node].function_of_time_period = \ nodes_as_function_of_time_time_period[nengo_node] io_controller = Ethernet() builder_kwargs = io_controller.builder_kwargs nengo_spinnaker_network_builder = Model() nengo_spinnaker_network_builder.build(nengo_network, **builder_kwargs) net_list = nengo_spinnaker_network_builder.make_netlist(200) nengo_app_operators = dict() nengo_app_operators.update( nengo_spinnaker_network_builder.object_operators) nengo_app_operators.update(io_controller._sdp_receivers) nengo_app_operators.update(io_controller._sdp_transmitters) match = \ compare_against_the_nengo_spinnaker_and_gfe_impls_machine_graphs( # nengo bits nengo_app_operators, nengo_to_app_graph_map, nengo_spinnaker_network_builder.connection_map, net_list, # gfe bits machine_graph, graph_mapper, app_graph, nengo_spinnaker_network_builder) if not match: raise Exception("didnt match")
def test_no_decoder_cache(): cache = NoDecoderCache() assert cache.get_size_in_bytes() == 0 assert cache.get_size() == "0 B"
def __init__( self, network, dt=constants.DEFAULT_DT, time_scale=constants.DEFAULT_TIME_SCALE, host_name=None, graph_label=None, database_socket_addresses=None, dsg_algorithm=None, n_chips_required=None, extra_pre_run_algorithms=None, extra_post_run_algorithms=None, decoder_cache=NoDecoderCache(), function_of_time_nodes=None, function_of_time_nodes_time_period=None): """Create a new Simulator with the given network. :param time_scale: Scaling factor to apply to the simulation, e.g.,\ a value of `0.5` will cause the simulation to run at twice \ real-time. :type time_scale: float :param host_name: Hostname of the SpiNNaker machine to use; if None\ then the machine specified in the config file will be used. :type host_name: basestring or None :param dt: The length of a simulator timestep, in seconds. :type dt: float :param graph_label: human readable graph label :type graph_label: basestring :param database_socket_addresses: :type database_socket_addresses: :param dsg_algorithm: :type dsg_algorithm: :param n_chips_required: :type n_chips_required: :param extra_post_run_algorithms: :type extra_post_run_algorithms: :param extra_pre_run_algorithms: :type extra_pre_run_algorithms: values :rtype None """ self._nengo_object_to_data_map = dict() self._profiled_nengo_object_to_data_map = dict() self._nengo_to_app_graph_map = None self._app_graph_to_nengo_operator_map = None self._nengo_app_machine_graph_mapper = None executable_finder = ExecutableFinder() executable_finder.add_path(os.path.dirname(binaries.__file__)) # Calculate the machine timestep, this is measured in microseconds # (hence the 1e6 scaling factor). machine_time_step = ( int((dt / time_scale) * constants.SECONDS_TO_MICRO_SECONDS_CONVERTER)) xml_paths = list() xml_paths.append(os.path.join(os.path.dirname( overridden_mapping_algorithms.__file__), self.NENGO_ALGORITHM_XML_FILE_NAME)) SpiNNaker.__init__( self, executable_finder, host_name=host_name, graph_label=graph_label, database_socket_addresses=database_socket_addresses, dsg_algorithm=dsg_algorithm, n_chips_required=n_chips_required, extra_pre_run_algorithms=extra_pre_run_algorithms, extra_post_run_algorithms=extra_post_run_algorithms, time_scale_factor=time_scale, default_config_paths=[( os.path.join(os.path.dirname(__file__), self.CONFIG_FILE_NAME))], machine_time_step=machine_time_step, extra_xml_paths=xml_paths, chip_id_allocator="NengoMallocBasedChipIDAllocator") # only add the sdram edge allocator if not using a virtual board extra_mapping_algorithms = list() if not helpful_functions.read_config_boolean( self.config, "Machine", "virtual_board"): extra_mapping_algorithms.append( "NengoSDRAMOutgoingPartitionAllocator") if function_of_time_nodes is None: function_of_time_nodes = list() if function_of_time_nodes_time_period is None: function_of_time_nodes_time_period = list() # update the main flow with new algorithms and params self.extend_extra_mapping_algorithms(extra_mapping_algorithms) self.update_extra_inputs( {"UserCreateDatabaseFlag": True, 'DefaultNotifyHostName': self.config.get_str( "Database", "notify_hostname"), 'NengoNodesAsFunctionOfTime': function_of_time_nodes, 'NengoNodesAsFunctionOfTimeTimePeriod': function_of_time_nodes_time_period, 'NengoModel': network, 'NengoDecoderCache': decoder_cache, "NengoNodeIOSetting": self.config.get("Simulator", "node_io"), "NengoEnsembleProfile": self.config.getboolean("Ensemble", "profile"), "NengoEnsembleProfileNumSamples": helpful_functions.read_config_int( self.config, "Ensemble", "profile_num_samples"), "NengoRandomNumberGeneratorSeed": helpful_functions.read_config_int( self.config, "Simulator", "global_seed"), "NengoUtiliseExtraCoreForProbes": self.config.getboolean( "Node", "utilise_extra_core_for_probes"), "MachineTimeStepInSeconds": dt, "ReceiveBufferPort": helpful_functions.read_config_int( self.config, "Buffers", "receive_buffer_port"), "ReceiveBufferHost": self.config.get( "Buffers", "receive_buffer_host"), "MinBufferSize": self.config.getint( "Buffers", "minimum_buffer_sdram"), "MaxSinkBuffingSize": self.config.getint( "Buffers", "sink_vertex_max_sdram_for_buffing"), "UsingAutoPauseAndResume": self.config.getboolean( "Buffers", "use_auto_pause_and_resume"), "TimeBetweenRequests": self.config.getint( "Buffers", "time_between_requests"), "BufferSizeBeforeReceive": self.config.getint( "Buffers", "buffer_size_before_receive"), "SpikeBufferMaxSize": self.config.getint( "Buffers", "spike_buffer_size"), "VariableBufferMaxSize": self.config.getint( "Buffers", "variable_buffer_size")}) # build app graph, machine graph, as the main tools expect an # application / machine graph level, and cannot go from random to app # graph. nengo_app_graph_generator = NengoApplicationGraphGenerator() (self._nengo_operator_graph, host_network, self._nengo_to_app_graph_map, self._app_graph_to_nengo_operator_map, random_number_generator) = \ nengo_app_graph_generator( self._extra_inputs["NengoModel"], self.machine_time_step, self._extra_inputs["NengoRandomNumberGeneratorSeed"], self._extra_inputs["NengoDecoderCache"], self._extra_inputs["NengoUtiliseExtraCoreForProbes"], self._extra_inputs["NengoNodesAsFunctionOfTime"], self._extra_inputs["NengoNodesAsFunctionOfTimeTimePeriod"], self.config.getboolean("Node", "optimise_utilise_interposers"), self._print_timings, self._do_timings, self._xml_paths, self._pacman_executor_provenance_path, self._extra_inputs["NengoEnsembleProfile"], self._extra_inputs["NengoEnsembleProfileNumSamples"], self._extra_inputs["ReceiveBufferPort"], self._extra_inputs["ReceiveBufferHost"], self._extra_inputs["MinBufferSize"], self._extra_inputs["MaxSinkBuffingSize"], self._extra_inputs["UsingAutoPauseAndResume"], self._extra_inputs["TimeBetweenRequests"], self._extra_inputs["BufferSizeBeforeReceive"], self._extra_inputs["SpikeBufferMaxSize"], self._extra_inputs["VariableBufferMaxSize"], self._extra_inputs["MachineTimeStepInSeconds"]) # add the extra outputs as new inputs self.update_extra_inputs( {"NengoHostGraph": host_network, "NengoGraphToAppGraphMap": self._nengo_to_app_graph_map, "AppGraphToNengoOperatorMap": self._app_graph_to_nengo_operator_map, "NengoRandomNumberGenerator": random_number_generator, "NengoOperatorGraph": self._nengo_operator_graph})