def a_run(self): v, spikes, v2, spikes2 = do_run(plot=False) # any checks go here spikes_test = neo_convertor.convert_spikes(spikes) spikes_test2 = neo_convertor.convert_spikes(spikes2) self.assertEqual(263, len(spikes_test)) self.assertEqual(263, len(spikes_test2))
def test_simple_spikes(self): sim.setup(timestep=1.0) pop = sim.Population(4, sim.IF_curr_exp(), label="a label") Recorder.get_spikes = mock_spikes Recorder.get_recorded_matrix = mock_v_all get_simulator().get_current_time = mock_time neo = pop.getSpikes() spikes = neo_convertor.convert_spikes(neo) assert numpy.array_equal(spikes, mock_spikes(None)) spiketrains = neo.segments[0].spiketrains assert 4 == len(spiketrains) # gather False has not effect testing that here neo = pop.get_data("spikes", gather=False) spikes = neo_convertor.convert_spikes(neo) assert numpy.array_equal(spikes, mock_spikes(None)) spiketrains = neo.segments[0].spiketrains assert 4 == len(spiketrains) neo = pop.get_v() v = neo.segments[0].filter(name='v')[0].magnitude (target, _, _) = mock_v_all(None, "any") assert numpy.array_equal(v, target) neo = pop.get_gsyn() exc = neo.segments[0].filter(name='gsyn_exc')[0].magnitude assert numpy.array_equal(exc, target) inh = neo.segments[0].filter(name='gsyn_inh')[0].magnitude assert numpy.array_equal(inh, target) sim.end()
def test_write(self): sim.setup(timestep=1.0) pop = sim.Population(4, sim.IF_curr_exp(), label="a label") Recorder.get_spikes = mock_spikes Recorder.get_recorded_matrix = mock_v_all get_simulator().get_current_time = mock_time # Note gather=False will be ignored just testing it can be pop.write_data("spikes.pkl", "spikes", gather=False) try: with open("spikes.pkl") as pkl: neo = pickle.load(pkl) spikes = neo_convertor.convert_spikes(neo) assert numpy.array_equal(spikes, mock_spikes(None)) except UnicodeDecodeError as e: raise SkipTest( "https://github.com/NeuralEnsemble/python-neo/issues/529" ) from e pop.printSpikes("spikes.pkl") try: with open("spikes.pkl") as pkl: neo = pickle.load(pkl) spikes = neo_convertor.convert_spikes(neo) assert numpy.array_equal(spikes, mock_spikes(None)) except UnicodeDecodeError as e: raise SkipTest( "https://github.com/NeuralEnsemble/python-neo/issues/529" ) from e (target, _, _) = mock_v_all(None, "any") pop.print_v("v.pkl") with open("v.pkl") as pkl: neo = pickle.load(pkl) v = neo.segments[0].filter(name='v')[0].magnitude assert v.shape == target.shape assert numpy.array_equal(v, target) pop.print_gsyn("gsyn.pkl") with open("gsyn.pkl") as pkl: neo = pickle.load(pkl) exc = neo.segments[0].filter(name='gsyn_exc')[0].magnitude assert numpy.array_equal(exc, target) inh = neo.segments[0].filter(name='gsyn_inh')[0].magnitude assert numpy.array_equal(inh, target) sim.end()
def do_run(nNeurons): p.setup(timestep=1.0, min_delay=1.0) p.set_number_of_neurons_per_core(p.Izhikevich, 100) cell_params_izk = { 'a': 0.02, 'b': 0.2, 'c': -65, 'd': 8, 'v': -75, 'u': 0, 'tau_syn_E': 2, 'tau_syn_I': 2, 'i_offset': 0 } populations = list() projections = list() weight_to_spike = 40 delay = 1 connections = list() for i in range(0, nNeurons): singleConnection = (i, ((i + 1) % nNeurons), weight_to_spike, delay) connections.append(singleConnection) injectionConnection = [(0, 0, weight_to_spike, delay)] spikeArray = {'spike_times': [[50]]} populations.append(p.Population(nNeurons, p.Izhikevich, cell_params_izk, label='pop_1')) populations.append(p.Population(1, p.SpikeSourceArray, spikeArray, label='inputSpikes_1')) projections.append(p.Projection(populations[0], populations[0], p.FromListConnector(connections))) projections.append(p.Projection(populations[1], populations[0], p.FromListConnector(injectionConnection))) populations[0].record("v") populations[0].record("gsyn_exc") populations[0].record("spikes") p.run(500) neo = populations[0].get_data(["v", "spikes", "gsyn_exc"]) v = neo_convertor.convert_data(neo, name="v") gsyn = neo_convertor.convert_data(neo, name="gsyn_exc") spikes = neo_convertor.convert_spikes(neo) p.end() return (v, gsyn, spikes)
def test_get_spikes_by_view(self): sim.setup(timestep=1.0) pop = sim.Population(4, sim.IF_curr_exp(), label="a label") Recorder.get_spikes = mock_spikes get_simulator().get_current_time = mock_time view = pop[1:3] view.record("spikes") neo = view.get_data("spikes", gather=False) spikes = neo_convertor.convert_spikes(neo) target = trim_spikes(mock_spikes(None), [1, 2]) assert numpy.array_equal(spikes, target) spiketrains = neo.segments[0].spiketrains assert 2 == len(spiketrains) sim.end()
def test_get_spikes_view_missing(self): sim.setup(timestep=1.0) pop = sim.Population(4, sim.IF_curr_exp(), label="a label") Recorder.get_spikes = mock_spikes Recorder.get_recorded_matrix = mock_v_all get_simulator().get_current_time = mock_time view = pop[2:4] neo = view.get_data("spikes") spikes = neo_convertor.convert_spikes(neo) target = trim_spikes(mock_spikes(None), [2]) assert numpy.array_equal(spikes, target) spiketrains = neo.segments[0].spiketrains assert 2 == len(spiketrains) assert 2 == len(spiketrains[0]) assert 2 == spiketrains[0].annotations['source_index'] assert 0 == len(spiketrains[1]) assert 3 == spiketrains[1].annotations['source_index'] sim.end()
def do_run(nNeurons): p.setup(timestep=1.0, min_delay=1.0) cell_params_lif_in = { 'tau_m': 333.33, 'cm': 208.33, 'v': [0.0, 0.0146789550781, 0.029296875, 0.0438842773438, 0.0584106445312], 'v_rest': 0.1, 'v_reset': 0.0, 'v_thresh': 1.0, 'tau_syn_E': 1, 'tau_syn_I': 2, 'tau_refrac': 2.5, 'i_offset': 3.0 } pop1 = p.Population(nNeurons, p.IF_curr_exp, cell_params_lif_in, label='pop_0') pop1.record("v") pop1.record("gsyn_exc") pop1.record("spikes") p.run(100) neo = pop1.get_data(["v", "spikes", "gsyn_exc"]) v = neo_convertor.convert_data(neo, name="v") gsyn = neo_convertor.convert_data(neo, name="gsyn_exc") spikes = neo_convertor.convert_spikes(neo) p.end() return (v, gsyn, spikes)
neo = populations[0].get_data("spikes") p.end() return neo class BigMultiProcessorSpikeSourcePrint(BaseTestCase): def seventy(self): nNeurons = 600 # number of neurons in each population neo = do_run(nNeurons, 70) spike_count = neo_convertor.count_spikes(neo) self.assertEqual(spike_count, 7200) def test_seventy(self): self.runsafe(self.seventy) if __name__ == '__main__': nNeurons = 600 # number of neurons in each population neo = do_run(nNeurons, 60) spikes = neo_convertor.convert_spikes(neo) plot_utils.plot_spikes(spikes) print(spikes) neo = do_run(nNeurons, 70) spikes = neo_convertor.convert_spikes(neo) plot_utils.plot_spikes(spikes) print(spikes)
def larger_array(self): v, spikes, conns = do_larger_array(plot=False) # checks go here spikes_test = neo_convertor.convert_spikes(spikes) self.assertEqual(4032, len(conns)) self.assertEqual(640, len(spikes_test))
def get_spike_source_spikes_numpy(self): spikes = self._input_spikes_recorded_list[0] return neo_convertor.convert_spikes(spikes)
def get_output_pop_spikes_numpy(self): spikes = self._recorded_spikes_list[0] return neo_convertor.convert_spikes(spikes)
def do_run(): random.seed(0) # initial call to set up the front end (pynn requirement) Frontend.setup(timestep=1.0, min_delay=1.0) # neurons per population and the length of runtime in ms for the # simulation, as well as the expected weight each spike will contain n_neurons = 100 run_time = 8000 weight_to_spike = 2.0 # neural parameters of the ifcur model used to respond to injected spikes. # (cell params for a synfire chain) cell_params_lif = { 'cm': 0.25, 'i_offset': 0.0, 'tau_m': 20.0, 'tau_refrac': 2.0, 'tau_syn_E': 5.0, 'tau_syn_I': 5.0, 'v_reset': -70.0, 'v_rest': -65.0, 'v_thresh': -50.0 } ################################## # Parameters for the injector population. This is the minimal set of # parameters required, which is for a set of spikes where the key is not # important. Note that a virtual key *will* be assigned to the population, # and that spikes sent which do not match this virtual key will be dropped; # however, if spikes are sent using 16-bit keys, they will automatically be # made to match the virtual key. The virtual key assigned can be obtained # from the database. ################################## cell_params_spike_injector = { # The port on which the spiNNaker machine should listen for packets. # Packets to be injected should be sent to this port on the spiNNaker # machine 'port': 12345, } ################################## # Parameters for the injector population. Note that each injector needs to # be given a different port. The virtual key is assigned here, rather than # being allocated later. As with the above, spikes injected need to match # this key, and this will be done automatically with 16-bit keys. ################################## cell_params_spike_injector_with_key = { # The port on which the spiNNaker machine should listen for packets. # Packets to be injected should be sent to this port on the spiNNaker # machine 'port': 12346, # This is the base key to be used for the injection, which is used to # allow the keys to be routed around the spiNNaker machine. This # assignment means that 32-bit keys must have the high-order 16-bit # set to 0x7; This will automatically be prepended to 16-bit keys. 'virtual_key': 0x70000, } # create synfire populations (if cur exp) pop_forward = Frontend.Population(n_neurons, Frontend.IF_curr_exp(**cell_params_lif), label='pop_forward') pop_backward = Frontend.Population(n_neurons, Frontend.IF_curr_exp(**cell_params_lif), label='pop_backward') # Create injection populations injector_forward = Frontend.Population( n_neurons, Frontend.external_devices.SpikeInjector(), additional_parameters=cell_params_spike_injector_with_key, label='spike_injector_forward') injector_backward = Frontend.Population( n_neurons, Frontend.external_devices.SpikeInjector(), additional_parameters=cell_params_spike_injector, label='spike_injector_backward') # Create a connection from the injector into the populations Frontend.Projection(injector_forward, pop_forward, Frontend.OneToOneConnector(), Frontend.StaticSynapse(weight=weight_to_spike)) Frontend.Projection(injector_backward, pop_backward, Frontend.OneToOneConnector(), Frontend.StaticSynapse(weight=weight_to_spike)) # Synfire chain connection where each neuron is connected to next neuron # NOTE: there is no recurrent connection so that each chain stops once it # reaches the end loop_forward = list() loop_backward = list() for i in range(0, n_neurons - 1): loop_forward.append((i, (i + 1) % n_neurons, weight_to_spike, 3)) loop_backward.append(((i + 1) % n_neurons, i, weight_to_spike, 3)) Frontend.Projection( pop_forward, pop_forward, Frontend.FromListConnector(loop_forward), Frontend.StaticSynapse(weight=weight_to_spike, delay=3)) Frontend.Projection( pop_backward, pop_backward, Frontend.FromListConnector(loop_backward), Frontend.StaticSynapse(weight=weight_to_spike, delay=3)) # record spikes from the synfire chains so that we can read off valid # results in a safe way afterwards, and verify the behaviour pop_forward.record(['spikes']) pop_backward.record(['spikes']) # Activate the sending of live spikes Frontend.external_devices.activate_live_output_for(pop_forward) Frontend.external_devices.activate_live_output_for(pop_backward) # Set up the live connection for sending spikes live_spikes_connection_send = \ Frontend.external_devices.SpynnakerLiveSpikesConnection( receive_labels=None, local_port=None, send_labels=["spike_injector_forward", "spike_injector_backward"]) Frontend.external_devices.add_database_socket_address( live_spikes_connection_send.local_ip_address, live_spikes_connection_send.local_port, None) # Set up callbacks to occur at initialisation live_spikes_connection_send.add_init_callback("spike_injector_forward", init_pop) live_spikes_connection_send.add_init_callback("spike_injector_backward", init_pop) # Set up callbacks to occur at the start of simulation live_spikes_connection_send.add_start_resume_callback( "spike_injector_forward", send_input_forward) live_spikes_connection_send.add_start_resume_callback( "spike_injector_backward", send_input_backward) # if not using the c visualiser, then a new spynnaker live spikes # connection is created to define that there is a python function which # receives the spikes. live_spikes_connection_receive = \ Frontend.external_devices.SpynnakerLiveSpikesConnection( receive_labels=["pop_forward", "pop_backward"], local_port=None, send_labels=None) Frontend.external_devices.add_database_socket_address( live_spikes_connection_receive.local_ip_address, live_spikes_connection_receive.local_port, None) # Set up callbacks to occur when spikes are received live_spikes_connection_receive.add_receive_callback( "pop_forward", receive_spikes) live_spikes_connection_receive.add_receive_callback( "pop_backward", receive_spikes) # Run the simulation on spiNNaker Frontend.run(run_time) Frontend.run(run_time) # Retrieve spikes from the synfire chain population spikes_forward = neo_convertor.convert_spikes( pop_forward.get_data('spikes')) spikes_backward = neo_convertor.convert_spikes( pop_backward.get_data('spikes')) # Clear data structures on spiNNaker to leave the machine in a clean state # for future executions Frontend.end() return (spikes_forward, spikes_backward)
def do_run(self): v, spikes, pre_weights, post_weights = do_run(plot=False) spikes_test = neo_convertor.convert_spikes(spikes) self.assertEqual(4970, len(spikes_test)) self.check_weights(pre_weights[1], 2.1) self.check_weights(pre_weights[2], 1.1)
class DistanceDependentProbabilityConnectorTest(BaseTestCase): def distance(self, i, j): # As just testing 1 and 2 simple square distance is fine return abs((i % 10) - (j % 10)) + abs((i // 10) - (j // 10)) def check_weights(self, weights, allowed_distance): pairs = [(s, d) for (s, d, _) in weights] for i in range(100): for j in range(100): if self.distance(i, j) < allowed_distance: self.assertIn((i, j), pairs) else: self.assertNotIn((i, j), pairs) def do_run(self): v, spikes, pre_weights, post_weights = do_run(plot=False) spikes_test = neo_convertor.convert_spikes(spikes) self.assertEqual(4970, len(spikes_test)) self.check_weights(pre_weights[1], 2.1) self.check_weights(pre_weights[2], 1.1) def test_do_run(self): self.runsafe(self.do_run) if __name__ == '__main__': _v, _spikes, _pre_weights, _post_weights = do_run(plot=True) print(len(neo_convertor.convert_spikes(_spikes))) print('pre_weights: ', _pre_weights) print('post_weights: ', _post_weights)