def test_decode_neuron_str(): assert str(DecodeNeurons(dt=0.005)) == "DecodeNeurons(dt=0.005)" assert str(OnOffDecodeNeurons(pairs_per_dim=2, dt=0.002, rate=None)) == ( "OnOffDecodeNeurons(pairs_per_dim=2, dt=0.002, rate=250)") assert str(NoisyDecodeNeurons(1, rate=20)) == ( "NoisyDecodeNeurons(pairs_per_dim=1, dt=0.001, rate=20, noise_exp=-2)") assert str( Preset5DecodeNeurons()) == ("Preset5DecodeNeurons(dt=0.001, rate=200)") assert str(Preset10DecodeNeurons( dt=0.0001, rate=0.5)) == ("Preset10DecodeNeurons(dt=0.0001, rate=0.5)")
def __init__(self, dt=0.001, label=None, builder=None): self.dt = dt self.label = label self.builder = Builder() if builder is None else builder self.build_callback = None self.decoder_cache = NoDecoderCache() # Objects created by the model for simulation on Loihi self.inputs = OrderedDict() self.blocks = OrderedDict() # Will be filled in by the network builder self.toplevel = None self.config = None # Resources used by the build process self.objs = defaultdict(dict) self.params = {} # Holds data generated when building objects self.probes = [] self.probe_conns = {} self.seeds = {} self.seeded = {} # --- other (typically standard) parameters # Filter on decode neurons self.decode_tau = 0.005 # ^TODO: how to choose this filter? Even though the input is spikes, # it may not be absolutely necessary since tau_rc provides a filter, # and maybe we don't want double filtering if connection has a filter self.decode_neurons = Preset10DecodeNeurons(dt=dt) self.node_neurons = OnOffDecodeNeurons(dt=dt) # voltage threshold for non-spiking neurons (i.e. voltage decoders) self.vth_nonspiking = 10 # limit for clipping intercepts, to avoid neurons with high gains self.intercept_limit = 0.95 # scaling for PES errors, before rounding and clipping to -127..127 self.pes_error_scale = 100. # learning weight exponent for PES (controls the maximum weight # magnitude/weight resolution) self.pes_wgt_exp = 4 # Will be provided by Simulator self.chip2host_params = {}
DecodeNeurons, NoisyDecodeNeurons, OnOffDecodeNeurons, Preset5DecodeNeurons, Preset10DecodeNeurons, ) @pytest.mark.parametrize( "decode_neurons, tolerance", [ (OnOffDecodeNeurons(), 0.35), (NoisyDecodeNeurons(5), 0.12), (NoisyDecodeNeurons(10), 0.11), (Preset5DecodeNeurons(), 0.06), (Preset10DecodeNeurons(), 0.03), ], ) def test_add_inputs(decode_neurons, tolerance, Simulator, seed, plt): sim_time = 2.0 pres_time = sim_time / 4 eval_time = sim_time / 8 stim_values = [[0.5, 0.5], [0.5, -0.9], [-0.7, -0.3], [-0.3, 1.0]] stim_times = np.arange(0, sim_time, pres_time) stim_fn_a = nengo.processes.Piecewise( {t: stim_values[i][0] for i, t in enumerate(stim_times)}) stim_fn_b = nengo.processes.Piecewise( {t: stim_values[i][1] for i, t in enumerate(stim_times)})
def __init__(self, dt=0.001, label=None, builder=None): self.dt = dt self.label = label self.builder = Builder() if builder is None else builder self.build_callback = None self.decoder_cache = NoDecoderCache() # TODO: these models may not look/behave exactly the same as # standard nengo models, because they don't have a toplevel network # built into them or configs set self.host_pre = NengoModel( dt=float(dt), label="%s:host_pre, dt=%f" % (label, dt), decoder_cache=NoDecoderCache(), ) self.host = NengoModel( dt=float(dt), label="%s:host, dt=%f" % (label, dt), decoder_cache=NoDecoderCache(), ) # Objects created by the model for simulation on Loihi self.inputs = OrderedDict() self.blocks = OrderedDict() self.block_shapes = {} self.probes = [] # Will be filled in by the simulator __init__ self.split = None # Will be filled in by the network builder self.toplevel = None self.config = None # Resources used by the build process self.objs = defaultdict(dict) # maps Nengo objects to Loihi objects self.params = {} # maps Nengo objects to data generated during build self.nengo_probes = [] # list of Nengo probes in the model self.nengo_probe_conns = {} self.seeds = {} self.seeded = {} # --- other (typically standard) parameters # Filter on decode neurons self.decode_tau = 0.005 # ^TODO: how to choose this filter? Even though the input is spikes, # it may not be absolutely necessary since tau_rc provides a filter, # and maybe we don't want double filtering if connection has a filter self.decode_neurons = Preset10DecodeNeurons(dt=dt) self.node_neurons = OnOffDecodeNeurons(dt=dt, is_input=True) # voltage threshold for non-spiking neurons (i.e. voltage decoders) self.vth_nonspiking = 10 # limit for clipping intercepts, to avoid neurons with high gains self.intercept_limit = 0.95 # scaling for PES errors, before rounding and clipping to -127..127 self.pes_error_scale = 100.0 # learning weight exponent for PES (controls the maximum weight # magnitude/weight resolution) self.pes_wgt_exp = 4 # Used to track interactions between host models self.chip2host_params = {} self.chip2host_receivers = OrderedDict() self.host2chip_senders = OrderedDict() self.host2chip_pes_senders = OrderedDict() self.needs_sender = {}