def main(args): setup(timestep=0.1) random_image = np.random.rand(2,2) size = random_image.size input_population_arr = Population(random_image.size, SpikeSourceArray, {'spike_times': [0 for i in range(0, random_image.size)]}) cell_params = {'tau_refrac': 2.0, 'v_thresh': -50.0, 'tau_syn_E': 2.0, 'tau_syn_I': 2.0} output_population = Population(1, IF_curr_alpha, cell_params, label="output") projection = Projection(input_population_arr, output_population, AllToAllConnector()) projection.setWeights(1.0) input_population_arr.record('spikes') output_population.record('spikes') tstop = 1000.0 run(tstop) output_population.write_data("simpleNetwork_output.pkl",'spikes') input_population_arr.write_data("simpleNetwork_input.pkl",'spikes') #output_population.print_v("simpleNetwork.v") end()
def two_neuron_example( current=1000.0, time_simulation=2000.0, weight=0.4, neuron_parameters={"v_rest": -50.0, "cm": 1, "tau_m": 20.0, "tau_refrac": 5.0, "v_thresh": -40.0, "v_reset": -50.0}, ): sim.setup(timestep=0.1, min_delay=0.1) pulse = sim.DCSource(amplitude=current, start=0.0, stop=time_simulation) pre = sim.Population(1, sim.IF_curr_exp(**neuron_parameters)) pre.record("spikes") pulse.inject_into(pre) sim.run(time_simulation) # rates in Hz rate_pre = len(pre.get_data("spikes").segments[0].spiketrains[0]) / time_simulation * 1000.0 sim.end() return rate_pre
def sim_neuron(rate): neuron_parameters={ 'v_rest' : -50.0, 'cm' : 1, 'tau_m' : 20.0, 'tau_syn_E' : 5.0, 'tau_syn_I' : 5.0, 'v_reset' : -50.0, 'v_thresh' : 10000000000000000000000000000000000000000000000000000000000000000000000.0, 'e_rev_E' : 0.0, 'e_rev_I' : -100, } time_simulation = 100000 # don't choose to small number in order to get good statistics weight = 0.1 # is this value allreight sim.setup(timestep=0.1, min_delay=0.1) pois_exc = sim.SpikeSourcePoisson(duration=time_simulation,start=0.0,rate=rate) # generate poisson rate stimulus pois_inh = sim.SpikeSourcePoisson(duration=time_simulation,start=0.0,rate=rate) # generate poisson rate stimulus exc = sim.Population(1, cellclass=pois_exc) # simulate excitatory cell inh = sim.Population(1, cellclass=pois_inh) # simulate inhibitory cell rec = sim.Population(1, sim.IF_cond_exp(**neuron_parameters)) # simulate receiving neuron sim.Projection(exc, rec, connector=sim.OneToOneConnector(),synapse_type=sim.StaticSynapse(weight=weight),receptor_type='excitatory') # connect excitatory neuron to receiver sim.Projection(inh, rec, connector=sim.OneToOneConnector(),synapse_type=sim.StaticSynapse(weight=weight),receptor_type='inhibitory') # connect inhibitory neuron to receiver rec.record('v') # record membrane potential rec.record('gsyn_exc') # record excitatory conductance rec.record('gsyn_inh') # record inhibitory conductance sim.run(time_simulation) # start simulation return rec.get_data('v').segments[0].analogsignalarrays[0], rec.get_data('gsyn_exc').segments[0].analogsignalarrays[0], rec.get_data('gsyn_inh').segments[0].analogsignalarrays[0] # return membrane potential, excitatory conductance, inhibitory conductance
def compute(self, proximal, distal=None): if distal is not None: for i, times in enumerate(distal): self.distal_input[i].spike_times = times active = [] predictive = [] if not (isinstance(proximal[0], list) or isinstance(proximal[0], np.ndarray)): proximal = [proximal] timestep = self.parameters.config.timestep for p in proximal: t = pynn.get_current_time() for c in p: self.proximal_input[int(c)].spike_times = np.array([t + 0.01]) pynn.run(self.parameters.config.timestep) spikes_soma = self.soma.getSpikes() mask = (spikes_soma[:,1] >= t) & (spikes_soma[:,1] < t + timestep) active.append(np.unique(spikes_soma[mask,0])) spikes_distal = self.distal.getSpikes() mask = (spikes_distal[:,1] >= t) & (spikes_distal[:,1] < t + timestep) predictive.append(np.unique(spikes_distal[mask,0].astype(np.int16)/2)) return (active, predictive)
def run_sim(ncell): print "Cells: ", ncell setup0 = time.time() sim.setup(timestep=0.1) hh_cell_type = sim.HH_cond_exp() hh = sim.Population(ncell, hh_cell_type) pulse = sim.DCSource(amplitude=0.5, start=20.0, stop=80.0) pulse.inject_into(hh) hh.record('v') setup1 = time.time() t0 = time.time() sim.run(100.0) v = hh.get_data() sim.end() t1 = time.time() setup_total = setup1 - setup0 run_total = t1 - t0 print "Setup: ", setup_total print "Run: ", run_total print "Total sim time: ", setup_total + run_total return run_total
def run(self, spiketimes): assert spiketimes.shape[0] == self.n_spike_source, 'spiketimes length should be equal to input neurons' start = time.clock() sim.reset() end = time.clock() print "reset uses %f s." % (end - start) for i in range(self.n_spike_source): spiketime = np.array(spiketimes[i], dtype=float) if spiketimes[i].any(): self.spike_source[i].spike_times = spiketime sim.initialize(self.hidden_neurons, V_m=0) sim.initialize(self.output_neurons, V_m=0.) sim.run(self.sim_time) spiketrains = self.output_neurons.get_data(clear=True).segments[0].spiketrains # vtrace = self.hidden_neurons.get_data(clear=True).segments[0].filter(name='V_m')[0] # plt.figure() # plt.plot(vtrace.times, vtrace) # plt.show() hidden_spiketrains = self.hidden_neurons.get_data(clear=True).segments[0].spiketrains spike_cnts = 0 for spiketrain in hidden_spiketrains: spike_cnts += len(list(np.array(spiketrain))) self.hidden_spike_cnts.append(spike_cnts) print 'hidden spikes: ', spike_cnts spiketimes_out = [] for spiketrain in spiketrains: spiketimes_out.append(list(np.array(spiketrain))) return np.array(spiketimes_out)
def compute(self, proximal, distal=None): if distal is not None: for i, times in enumerate(distal): self.distal_input[i].spike_times = times active = [] predictive = [] if not (isinstance(proximal[0], list) or isinstance(proximal[0], np.ndarray)): proximal = [proximal] timestep = self.parameters.config.timestep for p in proximal: t = pynn.get_current_time() for c in p: self.proximal_input[int(c)].spike_times = np.array([t + 0.01]) pynn.run(self.parameters.config.timestep) spikes_soma = self.soma.getSpikes() mask = (spikes_soma[:, 1] >= t) & (spikes_soma[:, 1] < t + timestep) active.append(np.unique(spikes_soma[mask, 0])) spikes_distal = self.distal.getSpikes() mask = (spikes_distal[:, 1] >= t) & (spikes_distal[:, 1] < t + timestep) predictive.append( np.unique(spikes_distal[mask, 0].astype(np.int16) / 2)) return (active, predictive)
def main(): # setup timestep of simulation and minimum and maximum synaptic delays setup(timestep=simulationTimestep, min_delay=minSynapseDelay, max_delay=maxSynapseDelay) # create a spike sources retinaLeft = createSpikeSource("Retina Left") retinaRight = createSpikeSource("Retina Right") # create network and attach the spike sources network = createCooperativeNetwork(retinaLeft=retinaLeft, retinaRight=retinaRight) # run simulation for time in milliseconds print "Simulation started..." run(simulationTime) print "Simulation ended." # plot results from itertools import repeat numberOfLayersToPlot = 4 layers = zip(repeat(network, numberOfLayersToPlot), range(1, numberOfLayersToPlot+1), repeat(False, numberOfLayersToPlot)) customLayers = [(network, 20, False),(network, 40, False),(network, 60, False),(network, 80, False)] for proc in range(0, numberOfLayersToPlot): p = Process(target=plotSimulationResults, args=customLayers[proc]) p.start() # finalise program and simulation end()
def run_test(w_list, cell_para, spike_source_data): pop_list = [] p.setup(timestep=1.0, min_delay=1.0, max_delay=3.0) #input poisson layer input_size = w_list[0].shape[0] pop_in = p.Population(input_size, p.SpikeSourceArray, {'spike_times' : []}) for j in range(input_size): pop_in[j].spike_times = spike_source_data[j] pop_list.append(pop_in) for w in w_list: pos_w = np.copy(w) pos_w[pos_w < 0] = 0 neg_w = np.copy(w) neg_w[neg_w > 0] = 0 output_size = w.shape[1] pop_out = p.Population(output_size, p.IF_curr_exp, cell_para) p.Projection(pop_in, pop_out, p.AllToAllConnector(weights = pos_w), target='excitatory') p.Projection(pop_in, pop_out, p.AllToAllConnector(weights = neg_w), target='inhibitory') pop_list.append(pop_out) pop_in = pop_out pop_out.record() run_time = np.ceil(np.max(spike_source_data)[0]/1000.)*1000 p.run(run_time) spikes = pop_out.getSpikes(compatible_output=True) return spikes
def scnn_test(cell_params_lif, l_cnn, w_cnn, num_test, test, max_rate, dur_test, silence): p.setup(timestep=1.0, min_delay=1.0, max_delay=3.0) L = l_cnn random.seed(0) input_size = L[0][1] pops_list = [] pops_list.append( init_inputlayer(input_size, test[:num_test, :], max_rate, dur_test, silence)) print('SCNN constructing...') for l in range(len(w_cnn)): pops_list.append( construct_layer(cell_params_lif, pops_list[l], L[l + 1][0], L[l + 1][1], w_cnn[l])) result = pops_list[-1][0] result.record(['v', 'spikes']) # new print('SCNN running...') p.run((dur_test + silence) * num_test) spike_result = result.getSpikes(compatible_output=True) #spike_result = result.get_spike_counts(gather=True) #tuple datta #spike_result = result.get_data('spikes') p.end() print('analysing...') spike_result_count = count_spikes(spike_result, 10, num_test, dur_test, silence) print("spike_result_count : ", spike_result_count) predict = np.argmax(spike_result_count, axis=0) print("predict : ", predict) # prob = np.exp(spike_result_count)/np.sum(np.exp(spike_result_count), axis=0) return predict, spike_result
def test_replicate_can_replicate(): p1 = pynn.Population(6, pynn.IF_cond_exp(i_offset=10)) p2 = pynn.Population(6, pynn.IF_cond_exp()) p3 = pynn.Population(6, pynn.IF_cond_exp()) l = v.Replicate(p1, (p2, p3), v.ReLU(), weights=(1, 1)) pynn.run(1000) l.store_spikes() expected = np.ones((2, 6)) assert np.allclose(expected, l.get_output())
def test_replicate_create(): p1 = pynn.Population(6, pynn.IF_cond_exp()) p2 = pynn.Population(6, pynn.IF_cond_exp()) p3 = pynn.Population(6, pynn.IF_cond_exp()) l = v.Replicate(p1, (p2, p3), v.ReLU(), weights=(1, 1)) pynn.run(1000) l.store_spikes() assert l.layer1.input.shape == (6, 0) assert l.layer2.input.shape == (6, 0) assert l.get_output().shape == (2, 6)
def test_ticket244(): nest = pyNN.nest nest.setup(threads=4) p1 = nest.Population(4, nest.IF_curr_exp()) p1.record('spikes') poisson_generator = nest.Population(3, nest.SpikeSourcePoisson(rate=1000.0)) conn = nest.OneToOneConnector() syn = nest.StaticSynapse(weight=1.0) nest.Projection(poisson_generator, p1.sample(3), conn, syn, receptor_type="excitatory") nest.run(15) p1.get_data()
def two_neuron_example( current=1000.0, time_simulation=2000., weight=0.4, neuron_parameters={ 'v_rest' : -65.0, 'cm' : 0.1, 'tau_m' : 1.0, 'tau_refrac' : 2.0, 'tau_syn_E' : 10.0, 'tau_syn_I' : 10.0, 'i_offset' : 0.0, 'v_reset' : -65.0, 'v_thresh' : -50.0, }, ): """ Connects to neurons with corresponding parameters. The first is stimulated via current injection while the second receives the other one's spikes. """ sim.setup(timestep=0.1, min_delay=0.1) pulse = sim.DCSource(amplitude=current, start=0.0, stop=time_simulation) pre = sim.Population(1, sim.IF_curr_exp(**neuron_parameters)) post = sim.Population(1, sim.IF_curr_exp(**neuron_parameters)) pre.record('spikes') post.record('spikes') sim.Projection(pre, post, connector=sim.OneToOneConnector(), synapse_type=sim.StaticSynapse(weight=weight), receptor_type='excitatory') pulse.inject_into(pre) sim.run(time_simulation) # rates in Hz rate_pre = len(pre.get_data('spikes').segments[0].spiketrains[0])\ / time_simulation * 1000. rate_post = len(post.get_data('spikes').segments[0].spiketrains[0])\ / time_simulation * 1000. sim.end() return rate_pre, rate_post
def loop(): for device_instance in Interfaces.DeviceMeta._instances: device_instance._create_device() print "Entered loop" i = 0 while(True): sim.run(20.0) Observers.Observer.notify() Setters.Setter.notify() SimulatorPorts.RPCPort.execute() for pop_view in population_register.values(): pass #print pop_view.meanSpikeCount() #print 'amplitude', pop_view.get_data().segments[0].filter(name='v') #print nest.GetStatus(map(int, [pop_view.all_cells[0]]), 'V_m') i += 1 sim.end()
def test_record_native_model(): if not have_nest: raise SkipTest nest = pyNN.nest from pyNN.random import RandomDistribution init_logging(logfile=None, debug=True) nest.setup() parameters = {'tau_m': 17.0} n_cells = 10 p1 = nest.Population(n_cells, nest.native_cell_type("ht_neuron")(**parameters)) p1.initialize(V_m=-70.0, Theta=-50.0) p1.set(theta_eq=-51.5) #assert_arrays_equal(p1.get('theta_eq'), -51.5*numpy.ones((10,))) assert_equal(p1.get('theta_eq'), -51.5) print(p1.get('tau_m')) p1.set(tau_m=RandomDistribution('uniform', low=15.0, high=20.0)) print(p1.get('tau_m')) current_source = nest.StepCurrentSource(times=[50.0, 110.0, 150.0, 210.0], amplitudes=[0.01, 0.02, -0.02, 0.01]) p1.inject(current_source) p2 = nest.Population(1, nest.native_cell_type("poisson_generator")(rate=200.0)) print("Setting up recording") p2.record('spikes') p1.record('V_m') connector = nest.AllToAllConnector() syn = nest.StaticSynapse(weight=0.001) prj_ampa = nest.Projection(p2, p1, connector, syn, receptor_type='AMPA') tstop = 250.0 nest.run(tstop) vm = p1.get_data().segments[0].analogsignals[0] n_points = int(tstop / nest.get_time_step()) + 1 assert_equal(vm.shape, (n_points, n_cells)) assert vm.max() > 0.0 # should have some spikes
def scnn_test(l_cnn, w_cnn, num_test, test, max_rate, dur_test, silence): p.setup(timestep=1.0, min_delay=1.0, max_delay=3.0) L = l_cnn random.seed(0) input_size = L[0][1] pops_list = [] pops_list.append(init_inputlayer(input_size, test[:num_test, :], max_rate, dur_test, silence)) for l in range(len(w_cnn)): pops_list.append(construct_layer(pops_list[l], L[l+1][0], L[l+1][1], w_cnn[l])) result = pops_list[-1][0] result.record() p.run((dur_test+silence)*num_test) spike_result = result.getSpikes(compatible_output=True) p.end() spike_result_count = count_spikes(spike_result, 10, num_test, dur_test, silence) predict = np.argmax(spike_result_count, axis=0) # prob = np.exp(spike_result_count)/np.sum(np.exp(spike_result_count), axis=0) return predict
def test_record_native_model(): nest = pyNN.nest from pyNN.random import RandomDistribution from pyNN.utility import init_logging init_logging(logfile=None, debug=True) nest.setup() parameters = {'Tau_m': 17.0} n_cells = 10 p1 = nest.Population(n_cells, nest.native_cell_type("ht_neuron"), parameters) p1.initialize('V_m', -70.0) p1.initialize('Theta', -50.0) p1.set('Theta_eq', -51.5) assert_equal(p1.get('Theta_eq'), [-51.5]*10) print p1.get('Tau_m') p1.rset('Tau_m', RandomDistribution('uniform', [15.0, 20.0])) print p1.get('Tau_m') current_source = nest.StepCurrentSource({'times' : [50.0, 110.0, 150.0, 210.0], 'amplitudes' : [0.01, 0.02, -0.02, 0.01]}) p1.inject(current_source) p2 = nest.Population(1, nest.native_cell_type("poisson_generator"), {'rate': 200.0}) print "Setting up recording" p2.record() p1._record('V_m') connector = nest.AllToAllConnector(weights=0.001) prj_ampa = nest.Projection(p2, p1, connector, target='AMPA') tstop = 250.0 nest.run(tstop) n_points = int(tstop/nest.get_time_step()) + 1 assert_equal(p1.recorders['V_m'].get().shape, (n_points*n_cells, 3)) id, t, v = p1.recorders['V_m'].get().T assert v.max() > 0.0 # should have some spikes
def compute(self, data, learn=True): """Perform the actual computation""" timestep = self.parameters.config.timestep # run simulation for i, d in enumerate(data): t = pynn.get_current_time() d = d.astype(np.int32) activity = np.array(self.calculate_activity([d])) train = np.ndarray((np.sum(activity), 2)) pos = 0 for j in range(len(self.stimulus)): spikes = np.sort( np.random.normal(1.0 + t, 0.01, activity[0][j])) train[pos:pos + activity[0][j], :] = np.vstack( [np.ones(spikes.size) * j, spikes]).T pos += activity[0][j] for j, s in enumerate(self.stimulus): s.spike_times = train[train[:, 0] == j, 1] pynn.run(timestep) # extract spikes and calculate activity spikes = self.columns.getSpikes() mask = (spikes[:, 1] > t) & (spikes[:, 1] < t + timestep) active_columns = np.unique(spikes[mask, 0]).astype(np.int32) yield active_columns if learn > 0: # wake up, school's starting in five minutes! c = np.zeros(self.permanences.shape[0], dtype=np.bool) c[active_columns] = 1 d = d.astype(np.bool) self.permanences[np.outer(c, d)] += 0.01 self.permanences[np.outer(c, np.invert(d))] -= 0.01 self.permanences = np.minimum(np.maximum(self.permanences, 0), 1) if type(learn) == int: learn -= 1
def main(): # setup timestep of simulation and minimum and maximum synaptic delays setup(timestep=simulationTimestep, min_delay=minSynapseDelay, max_delay=maxSynapseDelay, threads=4) # create a spike sources retinaLeft = createSpikeSource("Retina Left") retinaRight = createSpikeSource("Retina Right") # create network and attach the spike sources network = createCooperativeNetwork(retinaLeft=retinaLeft, retinaRight=retinaRight) # run simulation for time in milliseconds print "Simulation started..." run(simulationTime) print "Simulation ended." # plot results plotSimulationResults(network, 1, False) # finalise program and simulation end()
def presentStimuli(pres_duration, num_pres_per_stim, num_source, num_target, bright_on_weights, bright_off_weights, bright_lat_weights, dark_on_weights, dark_off_weights, dark_lat_weights, is_repeated=False): """ For presenting a stimulus to the target network. Callback is used to switch between presentation rates. Arguments: num_source num_target num_pres_per_stim, pres_duration """ num_stim = 2 # two stimuli 'bright' and 'dark' total_duration = num_stim * num_pres_per_stim * pres_duration source_on_pop = pynn.Population(num_source, pynn.SpikeSourcePoisson(), label='source_on_pop') source_off_pop = pynn.Population(num_source, pynn.SpikeSourcePoisson(), label='source_off_pop') is_bright, random_on_rates, random_off_rates = getPresentationRatesForCallback(num_stim, num_source, num_pres_per_stim, is_repeated=is_repeated) bright_target_pop = pynn.Population(num_target, pynn.IF_cond_exp, {'i_offset':0.11, 'tau_refrac':3.0, 'v_thresh':-51.0}, label='target_pop') dark_target_pop = pynn.Population(num_target, pynn.IF_cond_exp, {'i_offset':0.11, 'tau_refrac':3.0, 'v_thresh':-51.0}, label='target_pop') bright_on_conn = pynn.Projection(source_on_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_on_weights), receptor_type='excitatory') bright_off_conn = pynn.Projection(source_off_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_off_weights), receptor_type='excitatory') bright_lat_conn = pynn.Projection(bright_target_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_lat_weights), receptor_type='inhibitory') dark_on_conn = pynn.Projection(source_on_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_on_weights), receptor_type='excitatory') dark_off_conn = pynn.Projection(source_off_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_off_weights), receptor_type='excitatory') dark_lat_conn = pynn.Projection(dark_target_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_lat_weights), receptor_type='inhibitory') source_on_pop.record('spikes') source_off_pop.record('spikes') bright_target_pop.record(['spikes']) dark_target_pop.record(['spikes']) pynn.run(total_duration, callbacks=[PoissonWeightVariation(source_on_pop, random_on_rates, pres_duration), PoissonWeightVariation(source_off_pop, random_off_rates, pres_duration)]) pynn.end() source_on_spikes = source_on_pop.get_data('spikes').segments[0].spiketrains source_off_spikes = source_off_pop.get_data('spikes').segments[0].spiketrains bright_spikes = bright_target_pop.get_data('spikes').segments[0].spiketrains dark_spikes = dark_target_pop.get_data('spikes').segments[0].spiketrains return is_bright, source_on_spikes, source_off_spikes, bright_spikes, dark_spikes
def compute(self, data, learn=True): """Perform the actual computation""" timestep = self.parameters.config.timestep # run simulation for i, d in enumerate(data): t = pynn.get_current_time() d = d.astype(np.int32) activity = np.array(self.calculate_activity([d])) train = np.ndarray((np.sum(activity), 2)) pos = 0 for j in range(len(self.stimulus)): spikes = np.sort(np.random.normal(1.0 + t, 0.01, activity[0][j])) train[pos:pos+activity[0][j],:] = np.vstack([np.ones(spikes.size)*j, spikes]).T pos += activity[0][j] for j, s in enumerate(self.stimulus): s.spike_times = train[train[:,0] == j,1] pynn.run(timestep) # extract spikes and calculate activity spikes = self.columns.getSpikes() mask = (spikes[:,1] > t) & (spikes[:,1] < t + timestep) active_columns = np.unique(spikes[mask,0]).astype(np.int32) yield active_columns if learn > 0: # wake up, school's starting in five minutes! c = np.zeros(self.permanences.shape[0], dtype=np.bool) c[active_columns] = 1 d = d.astype(np.bool) self.permanences[np.outer(c, d)] += 0.01 self.permanences[np.outer(c, np.invert(d))] -= 0.01 self.permanences = np.minimum(np.maximum(self.permanences, 0), 1) if type(learn) == int: learn -= 1
def test_column_input(): """ Tests whether all neurons receive the same feedforward input from common proximal dendrite. """ LOG.info('Testing column input...') # reset the simulator sim.reset() column = Column.Column() sim.run(1000) spikes = column.FetchSpikes() print('Spikes before: {}'.format(spikes)) # now stream some input into the column column.SetFeedforwardDendrite(1000.0) sim.run(1000) spikes = column.FetchSpikes().segments[0] print('Spikes after: {}'.format(spikes)) LOG.info('Test complete.')
def test_encoder_rate_1(): """ Checks if encoder is properly encoding provided values. """ encoder = ScalarEncoder.ScalarEncoder( size=10, width=1, min_val=0, max_val=10) encoder.encode(5.0) sim.run(100) rate = encoder.population.getSpikes() voltages = encoder.population.get_v().segments[0] pdb.set_trace() plot_signal(voltages, 1) # get index of maximum rate neuron idx_max = np.argmax(rate) LOG.info(rate) LOG.info('Max firing rate: {}'.format(idx_max)) assert idx_max == 4 # indexing starts from zero
def _run_microcircuit(plot_filename, conf): import plotting import logging simulator = conf['simulator'] # we here only need nest as simulator, simulator = 'nest' import pyNN.nest as sim # prepare simulation logging.basicConfig() # extract parameters from config file master_seed = conf['params_dict']['nest']['master_seed'] layers = conf['layers'] pops = conf['pops'] plot_spiking_activity = conf['plot_spiking_activity'] raster_t_min = conf['raster_t_min'] raster_t_max = conf['raster_t_max'] frac_to_plot = conf['frac_to_plot'] record_corr = conf['params_dict']['nest']['record_corr'] tau_max = conf['tau_max'] # Numbers of neurons from which to record spikes n_rec = helper_functions.get_n_rec(conf) sim.setup(**conf['simulator_params'][simulator]) if simulator == 'nest': n_vp = sim.nest.GetKernelStatus('total_num_virtual_procs') if sim.rank() == 0: print 'n_vp: ', n_vp print 'master_seed: ', master_seed sim.nest.SetKernelStatus({'print_time': False, 'dict_miss_is_error': False, 'grng_seed': master_seed, 'rng_seeds': range(master_seed + 1, master_seed + n_vp + 1), 'data_path': conf['system_params'] \ ['output_path']}) import network # result of export-files results = [] # create network start_netw = time.time() n = network.Network(sim) # contains the GIDs of the spike detectors and voltmeters needed for # retrieving filenames later device_list = n.setup(sim, conf) end_netw = time.time() if sim.rank() == 0: print 'Creating the network took ', end_netw - start_netw, ' s' # simulate if sim.rank() == 0: print "Simulating..." start_sim = time.time() sim.run(conf['simulator_params'][simulator]['sim_duration']) end_sim = time.time() if sim.rank() == 0: print 'Simulation took ', end_sim - start_sim, ' s' # extract filename from device_list (spikedetector/voltmeter), # gid of neuron and thread. merge outputs from all threads # into a single file which is then added to the task output. for dev in device_list: label = sim.nest.GetStatus(dev)[0]['label'] gid = sim.nest.GetStatus(dev)[0]['global_id'] # use the file extension to distinguish between spike and voltage # output extension = sim.nest.GetStatus(dev)[0]['file_extension'] if extension == 'gdf': # spikes data = np.empty((0, 2)) elif extension == 'dat': # voltages data = np.empty((0, 3)) for thread in xrange(conf['simulator_params']['nest']['threads']): filenames = glob.glob(conf['system_params']['output_path'] + '%s-*%d-%d.%s' % (label, gid, thread, extension)) assert( len(filenames) == 1), 'Multiple input files found. Use a clean output directory.' data = np.vstack([data, np.loadtxt(filenames[0])]) # delete original files os.remove(filenames[0]) order = np.argsort(data[:, 1]) data = data[order] outputfile_name = 'collected_%s-%d.%s' % (label, gid, extension) outputfile = open(outputfile_name, 'w') # the outputfile should have same format as output from NEST. # i.e., [int, float] for spikes and [int, float, float] for voltages, # hence we write it line by line and assign the corresponding filetype if extension == 'gdf': # spikes for line in data: outputfile.write('%d\t%.3f\n' % (line[0], line[1])) outputfile.close() filetype = 'application/vnd.juelich.nest.spike_times' elif extension == 'dat': # voltages for line in data: outputfile.write( '%d\t%.3f\t%.3f\n' % (line[0], line[1], line[2])) outputfile.close() filetype = 'application/vnd.juelich.nest.analogue_signal' res = (outputfile_name, filetype) results.append(res) if record_corr and simulator == 'nest': start_corr = time.time() if sim.nest.GetStatus(n.corr_detector, 'local')[0]: print 'getting count_covariance on rank ', sim.rank() cov_all = sim.nest.GetStatus( n.corr_detector, 'count_covariance')[0] delta_tau = sim.nest.GetStatus(n.corr_detector, 'delta_tau')[0] cov = {} for target_layer in np.sort(layers.keys()): for target_pop in pops: target_index = conf['structure'][target_layer][target_pop] cov[target_index] = {} for source_layer in np.sort(layers.keys()): for source_pop in pops: source_index = conf['structure'][ source_layer][source_pop] cov[target_index][source_index] = \ np.array(list( cov_all[target_index][source_index][::-1]) + list(cov_all[source_index][target_index][1:])) f = open(conf['system_params'][ 'output_path'] + '/covariances.dat', 'w') print >>f, 'tau_max: ', tau_max print >>f, 'delta_tau: ', delta_tau print >>f, 'simtime: ', conf['simulator_params'][ simulator]['sim_duration'], '\n' for target_layer in np.sort(layers.keys()): for target_pop in pops: target_index = conf['structure'][target_layer][target_pop] for source_layer in np.sort(layers.keys()): for source_pop in pops: source_index = conf['structure'][ source_layer][source_pop] print >>f, target_layer, target_pop, '-', source_layer, source_pop print >>f, 'n_events_target: ', sim.nest.GetStatus( n.corr_detector, 'n_events')[0][target_index] print >>f, 'n_events_source: ', sim.nest.GetStatus( n.corr_detector, 'n_events')[0][source_index] for i in xrange(len(cov[target_index][source_index])): print >>f, cov[target_index][source_index][i] print >>f, '' f.close() # add file covariances.dat into bundle res_cov = ('covariances.dat', 'text/plain') results.append(res_cov) end_corr = time.time() print "Writing covariances took ", end_corr - start_corr, " s" if plot_spiking_activity and sim.rank() == 0: plotting.plot_raster_bars(raster_t_min, raster_t_max, n_rec, frac_to_plot, n.pops, conf['system_params']['output_path'], plot_filename, conf) res_plot = (plot_filename, 'image/png') results.append(res_plot) sim.end() return results
import pyNN.nest as sim parameters = { u'E_L': 0.0, u'I_e': 0.9, # 用这个参数来表示leaky u'V_reset': 0.0, u'V_th': 0.5, u't_ref': .0, } sim.setup(timestep=01.0) nt = sim.native_cell_type('iaf_psc_delta_xxq') n = sim.Population(1, nt(**parameters)) s = sim.Population(1, sim.SpikeSourceArray()) s[0].spike_times = [10, 15, 20, 30, 40] p = sim.Projection(s, n, sim.FromListConnector([(0, 0, 0.00025, 0.01)])) # p1 = sim.Projection(n, n, sim.FromListConnector([(0, 0, 0.00025, 1.0)])) n.record('V_m') n.record('V_m') sim.initialize(n, V_m=0.) sim.run(128.0) vtrace = n.get_data(clear=True).segments[0].filter(name='V_m')[0] print p.get(['weight'], format='array') plt.figure() plt.plot(vtrace.times, vtrace, 'o') plt.ylim([0, 0.6]) plt.show() sim.end()
def run_retina(params): """Run the retina using the specified parameters.""" tmpdir = tempfile.mkdtemp() print "Setting up simulation" pyNN.Timer.start() # start timer on construction pyNN.setup(timestep=params['dt'],max_delay=params['syn_delay']) pyNN.pynest.setDict([0],{'threads' : params['threads']}) pyNN.setRNGseeds(params['kernelseeds']) N = params['N'] phr_ON = pyNN.Population((N,N),'dc_generator') phr_OFF = pyNN.Population((N,N),'dc_generator') noise_ON = pyNN.Population((N,N),'noise_generator',{'mean':0.,'std':params['noise_std']}) noise_OFF = pyNN.Population((N,N),'noise_generator',{'mean':0.,'std':params['noise_std']}) phr_ON.set({ 'start' : params['simtime']/4, 'stop' : params['simtime']/4*3}) phr_ON.tset('amplitude', params['amplitude'] * params['snr']) phr_OFF.set({ 'start' : params['simtime']/4, 'stop' : params['simtime']/4*3}) phr_OFF.tset('amplitude', - params['amplitude'] * params['snr']) # target ON and OFF populations out_ON = pyNN.Population((N,N) ,'iaf_sfa_neuron',params['parameters_gc']) out_OFF = pyNN.Population((N,N) ,'iaf_sfa_neuron',params['parameters_gc']) #print "Connecting the network" retina_proj_ON = pyNN.Projection(phr_ON, out_ON, 'oneToOne') retina_proj_ON.setWeights(params['weight']) retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF, 'oneToOne') retina_proj_OFF.setWeights(params['weight']) noise_proj_ON = pyNN.Projection(noise_ON, out_ON, 'oneToOne') noise_proj_ON.setWeights(params['weight']) noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF, 'oneToOne') noise_proj_OFF.setWeights(params['weight']) out_ON_filename=os.path.join(tmpdir,'out_on.gdf') out_OFF_filename=os.path.join(tmpdir,'out_off.gdf') out_ON.record() out_OFF.record() # reads out time used for building buildCPUTime= pyNN.Timer.elapsedTime() print "Running simulation" pyNN.Timer.start() # start timer on construction pyNN.run(params['simtime']) simCPUTime = pyNN.Timer.elapsedTime() out_ON.printSpikes(out_ON_filename) out_OFF.printSpikes(out_OFF_filename) out_ON_DATA = tmpfile2spikelist(out_ON_filename,params['dt']) out_OFF_DATA = tmpfile2spikelist(out_OFF_filename,params['dt']) print "\nRetina Network Simulation:" print(params['description']) print "Number of Neurons : ", N**2 print "Output rate (ON) : ", out_ON.meanSpikeCount(), \ "spikes/neuron in ", params['simtime'], "ms" print "Output rate (OFF) : ", out_OFF.meanSpikeCount(), \ "spikes/neuron in ",params['simtime'], "ms" print "Build time : ", buildCPUTime, "s" print "Simulation time : ", simCPUTime, "s" return out_ON_DATA,out_OFF_DATA
'e_rev_leak': ELeak, 'e_rev_E' : EIs[1], 'e_rev_I' : EIs[2], 'tau_syn_E' : 0.2, 'tau_syn_I' : 2.0, 'i_offset' : 0.0, } #vs = np.linspace(-75.0, EIs[0] + 5, 3) vs = [-80.0, -61.0, -60.0] neurons = [sim.create(sim.HH_cond_exp(**cellparams)) for _ in vs] for i in xrange(len(vs)): neurons[i].record(["v"]) neurons[i].initialize(v=vs[i]) sim.run(tEnd) fig = plt.figure(figsize=(cm2inch(12.4), cm2inch(7))) ax = fig.add_subplot(111) #cmap = plt.cm.rainbow #cmap = colors.LinearSegmentedColormap.from_list('blues', ['#729fcf', '#3465a4', # '#193a6b']) lss = ['--', ':', '-'] #colors = iter(cmap(np.linspace(0, 1, len(vs)))) colors = iter(['#204a87'] * 3) for i in xrange(len(vs)): data = neurons[i].get_data() signal_names = [s.name for s in data.segments[0].analogsignalarrays] vm = data.segments[0].analogsignalarrays[signal_names.index('v')] ax.plot(vm.times, vm, lss[i], color=next(colors),
connSTDP = pynn.nest.FindConnections(measure) weightList = [] aCausalList = [] aAnticausalList = [] timeGrid = np.arange(0, runtime + timeStep / 2.0, timeStep) #run simulation step-wise to record charge on "capacitors" and discrete synaptic weight for timeNow in timeGrid: weightList.append(prj.getWeights()[0]) for i in range(len(connSTDP)): #read out "capacitors" if pynn.nest.GetStatus([connSTDP[i]])[0]['synapse_model'].find(synapseModel) > -1: aCausalList.append(pynn.nest.GetStatus([connSTDP[i]])[0]['a_causal']) aAnticausalList.append(pynn.nest.GetStatus([connSTDP[i]])[0]['a_acausal']) if not timeNow == timeGrid[-1]: pynn.run(timeStep) spikes = neuron.getSpikes() #membrane = neuron.get_v() #for debugging print 'presynaptic spikes (static synapse)' print stimSpikes print 'presynaptic spikes (plastic synapse)' print measureSpikes print 'postsynaptic spikes' print spikes pynn.end() #visualization of results import matplotlib.pyplot as plt
def run(a_state): output_base = "out/" spike_count_filename = "gpi_spike_count.dat" weight_filename = conn_filename # filename, from which the cortex - striatum connections are read spike_count_full_filename = output_base + spike_count_filename #active_state = int(sys.argv[1]) active_state = a_state #Model of the basal ganglia D1 and D1 pathways. States and actions are populations coded. pyNN.utility.init_logging(None, debug=True) sim.setup(time_step) # cell class for all neurons in the network # (on HMF can be one of IF_cond_exp, EIF_cond_exp_isfa_ista) cellclass = sim.IF_cond_exp # ############# # POPULATIONS # ############# #CORTEX input population: N states, poisson inputs #?assemblies of m_actions populations or dictionnary of populations? #STRIATUM 2 populations of M actions, D1 and D2 #GPi/SNr 1 population of M actions, baseline firing rate driven by external poisson inputs cortex = [ sim.Population(n_cortex_cells, cellclass, neuron_parameters, label="CORTEX_{}".format(i)) for i in xrange(n_states)] cortex_assembly = sim.Assembly( *cortex, label="CORTEX") # independent Poisson input to cortex populations. # /active_state/ determines, which population receives # a different firing rate cortex_input = [] for i in xrange(n_states): if i == active_state: rate = active_state_rate else: rate = inactive_state_rate new_input = sim.Population( n_cortex_cells, sim.SpikeSourcePoisson, {'rate': rate}, label="STATE_INPUT_" + str(i)) sim.Projection( new_input, cortex[i], sim.OneToOneConnector(), sim.StaticSynapse(weight=cortex_input_weight, delay=cortex_input_delay) ) cortex_input.append(new_input) #print 'cortex ok' # striatum: # exciatatory populations striatum_d1 = [ sim.Population(n_msns, cellclass, neuron_parameters, label="D1_{}".format(i)) for i in xrange(m_actions)] # inhibitory populations striatum_d2 = [ sim.Population(n_msns, cellclass, neuron_parameters, label="D2_{}".format(i)) for i in xrange(m_actions)] # Striatum D2->D2 and D1->D1 lateral inhibition for lat_inh_source in xrange(m_actions): for lat_inh_target in xrange(m_actions): if lat_inh_source == lat_inh_target: continue sim.Projection( striatum_d1[lat_inh_source], striatum_d1[lat_inh_target], sim.FixedProbabilityConnector( d1_lat_inh_prob), sim.StaticSynapse( weight=d1_lat_inh_weight, delay=d1_lat_inh_delay), receptor_type="inhibitory", label="d1_lateral_inhibition_{}_{}".format( lat_inh_source, lat_inh_target)) sim.Projection( striatum_d2[lat_inh_source], striatum_d2[lat_inh_target], sim.FixedProbabilityConnector( d2_lat_inh_prob), sim.StaticSynapse( weight=d2_lat_inh_weight, delay=d2_lat_inh_delay), receptor_type="inhibitory", label="d2_lateral_inhibition_{}_{}".format( lat_inh_source, lat_inh_target)) striatum_assembly = sim.Assembly( *(striatum_d1 + striatum_d2), label="STRIATUM") #gids_cortex= [] #gids_d1= [] #gids_d2= [] #for s in xrange(n_states): # gids_cortex.append([gid for gid in cortex_assembly.get_population("CORTEX_"+str(s)).all()]) #for a in xrange(m_actions): # gids_d1.append([gid1 for gid1 in striatum_assembly.get_population("D1_"+str(a)).all()]) # gids_d2.append([gid2 for gid2 in striatum_assembly.get_population("D2_"+str(a)).all()]) #for i in xrange(0,3): # print i, 'len cortex ', len(gids_cortex[i]), 'unique ', len(np.unique(gids_cortex[i])) # print i, 'len d1', len(gids_d1[i]), 'unique ', len(np.unique(gids_d1[i])) # print i, 'len d2', len(gids_d2[i]), 'unique ', len(np.unique(gids_d2[i])) #print "striatum ok" #for i in xrange(0,3): # print np.unique(gids_cortex[i]) # gids_cortex[i][:]-=3 #if init: # init_w(gids_cortex, gids_d1, gids_d2) # cortex - striatum connection, all-to-all using loaded weights cs = sim.Projection( cortex_assembly, striatum_assembly, #sim.AllToAllConnector(), #sim.StaticSynapse( # weight=wd1, # delay=ctx_strd1_delay)) sim.FromFileConnector( weight_filename)) gpi = [ sim.Population(n_gpi, cellclass, neuron_parameters, label="GPI_{}".format(i)) for i in xrange(m_actions) ] gpi_assembly = sim.Assembly( *gpi, label="GPi") # external Poisson input to GPi gpi_input = sim.Population( m_actions * n_gpi, sim.SpikeSourcePoisson, dict( duration=sim_duration, rate=gpi_external_rate, start=0.), label="GPI_EXT_INPUT") sim.Projection( gpi_input, gpi_assembly, sim.OneToOneConnector(), sim.StaticSynapse( weight=gpi_external_weight, delay= gpi_external_delay)) # striatum - gpi connections for i in xrange(m_actions): gpi_p = sim.Projection( striatum_d1[i], gpi[i], sim.FixedProbabilityConnector(d1_gpi_prob), sim.StaticSynapse( weight=d1_gpi_weight, delay = d1_gpi_delay)) sim.Projection( striatum_d2[i], gpi[i], sim.FixedProbabilityConnector(d2_gpi_prob), sim.StaticSynapse(weight=d2_gpi_weight, delay=d2_gpi_delay), #target="inhibitory") receptor_type="inhibitory") #print gpi_p.get('weight', format='list') cortex_assembly.record('spikes') striatum_assembly.record('spikes') gpi_assembly.record('spikes') #print 'sim start' sim.run(sim_duration) sim.end() label = "CORTEX_0" #print 'cortex get pop', cortex_assembly.get_population(label) #print 'cortex describe', cortex_assembly.describe() #cortex_assembly.write_data("spikes") #cortex_assembly.get_population(label).write_data("spikes") #spikes = gpi_assembly #get_data("spikes", gather=True) # print "getdata spikes", spikes # print 'spikes.segment', spikes.segments #print 'spikes.segments.SpikeTrains', spikes.segments.spike #save_spikes(cortex_assembly, output_base, "cortex.dat") #save_spikes(striatum_d1, output_base, "striatum_d1.dat") #save_spikes(striatum_d2, output_base, "striatum_d2.dat") #save_spikes(gpi, output_base, "gpi.dat") #output_rates = np.array( # [len(i.getSpikes()) for i in gpi]) #np.savetxt(spike_count_full_filename, output_rates) # for seg in cortex_assembly.segments: # print("Analyzing segment %d" % seg.index) # stlist = [st - st.t_start for st in seg.spiketrains] # plt.figure() # count, bins = np.histogram(stlist) # plt.bar(bins[:-1], count, width=bins[1] - bins[0]) # plt.title("PSTH in segment %d" % seg.index) cortex_mean_spikes = np.zeros(n_states) gpi_mean_spikes = np.zeros(m_actions) d1_mean_spikes = np.zeros(m_actions) d2_mean_spikes = np.zeros(m_actions) for i in xrange(n_states): cortex_mean_spikes[i] = cortex_assembly.get_population("CORTEX_"+str(i)).mean_spike_count() for i in xrange(m_actions): gpi_mean_spikes[i] = gpi_assembly.get_population("GPI_"+str(i)).mean_spike_count() d1_mean_spikes[i] = striatum_assembly.get_population("D1_"+str(i)).mean_spike_count() d2_mean_spikes[i] = striatum_assembly.get_population("D2_"+str(i)).mean_spike_count() print 'CORTEX ', cortex_mean_spikes print 'D1', d1_mean_spikes print 'D2', d2_mean_spikes return gpi_mean_spikes
#!/usr/bin/env python import faulthandler from music_wizard.pynn import XmlFactory, Factory import music import pyNN.nest as sim sim.setup() music_setup = music.Setup() xml = music_setup.config('xml') model_factory = Factory.PyNNProxyFactory(sim, music_setup, acc_latency=10.0) connector_factory = Factory.PyNNConnectorFactory(sim) ########## # Load and execute brain file import brainfile ########## population_dict = brainfile.__population_views proxy_factory = XmlFactory.ProxyFactory("app2", connector_factory, model_factory, population_dict) with open(xml, 'r') as xml_stream: proxys = proxy_factory.create_proxys(xml_stream.read()) for i in xrange(20): sim.run(20.0)
def simulate(self): # reset detectors before simulating the next step nest.nest.SetStatus(self.network.detectors.values(), 'n_events', 0) nest.run(self.simduration) nest.end()
import pyNN.nest as sim sim.nest.Install("coronetmodule") sim.setup() sim.nest.SetKernelStatus({"dict_miss_is_error": False}) coronet_neuron = sim.native_cell_type("coronet_neuron") p1 = sim.Population(10, coronet_neuron) s1 = sim.Population(10, sim.SpikeSourcePoisson, {"rate": 20}) s2 = sim.Population(10, sim.SpikeSourcePoisson, {"rate": 20}) sim.Projection(s1, p1, sim.OneToOneConnector(weights=0.02), target="EX") sim.Projection(s2, p1, sim.OneToOneConnector(weights=0.01), target="IN") p1.initialize("V_m", -76.0) # this works as expected p1.record() p1._record("V_m") # ugly sim.run(1000) id, t, v = p1.recorders["V_m"].get().T # ugly import pylab as pl import numpy as np pl.figure() sl = p1.getSpikes() pl.plot(sl[:, 1], sl[:, 0], ".") pl.figure() id_is_0 = np.where(id == 0) pl.plot(t[id_is_0], v[id_is_0]) pl.show()
def run_retina(params): """Run the retina using the specified parameters.""" print "Setting up simulation" timer = Timer() timer.start() # start timer on construction pyNN.setup(timestep=params['dt'], max_delay=params['syn_delay'], threads=params['threads'], rng_seeds=params['kernelseeds']) N = params['N'] phr_ON = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')()) phr_OFF = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')()) noise_ON = pyNN.Population((N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std'])) noise_OFF = pyNN.Population((N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std'])) phr_ON.set(start=params['simtime']/4, stop=params['simtime']/4*3, amplitude=params['amplitude'] * params['snr']) phr_OFF.set(start=params['simtime']/4, stop=params['simtime']/4*3, amplitude=-params['amplitude'] * params['snr']) # target ON and OFF populations v_init = params['parameters_gc'].pop('Vinit') out_ON = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(**params['parameters_gc'])) out_OFF = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')(**params['parameters_gc'])) out_ON.initialize(v=v_init) out_OFF.initialize(v=v_init) #print "Connecting the network" retina_proj_ON = pyNN.Projection(phr_ON, out_ON, pyNN.OneToOneConnector()) retina_proj_ON.set(weight=params['weight']) retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF, pyNN.OneToOneConnector()) retina_proj_OFF.set(weight=params['weight']) noise_proj_ON = pyNN.Projection(noise_ON, out_ON, pyNN.OneToOneConnector()) noise_proj_ON.set(weight=params['weight']) noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF, pyNN.OneToOneConnector()) noise_proj_OFF.set(weight=params['weight']) out_ON.record('spikes') out_OFF.record('spikes') # reads out time used for building buildCPUTime = timer.elapsedTime() print "Running simulation" timer.start() # start timer on construction pyNN.run(params['simtime']) simCPUTime = timer.elapsedTime() out_ON_DATA = out_ON.get_data().segments[0] out_OFF_DATA = out_OFF.get_data().segments[0] print "\nRetina Network Simulation:" print(params['description']) print "Number of Neurons : ", N**2 print "Output rate (ON) : ", out_ON.mean_spike_count(), \ "spikes/neuron in ", params['simtime'], "ms" print "Output rate (OFF) : ", out_OFF.mean_spike_count(), \ "spikes/neuron in ", params['simtime'], "ms" print "Build time : ", buildCPUTime, "s" print "Simulation time : ", simCPUTime, "s" return out_ON_DATA, out_OFF_DATA
conn = [ sim.Projection(input[0:1], cells, connector, target="AMPA_spikeinput"), sim.Projection(input[1:2], cells, connector, target="GABAa_spikeinput"), sim.Projection(input[2:3], cells, connector, target="GABAb_spikeinput"), ] cells._record("iaf_V") cells._record("AMPA_g") cells._record("GABAa_g") cells._record("GABAb_g") cells.record() sim.run(100.0) cells.recorders["iaf_V"].write("Results/nineml_neuron.V", filter=[cells[0]]) cells.recorders["AMPA_g"].write("Results/nineml_neuron.g_exc", filter=[cells[0]]) cells.recorders["GABAa_g"].write("Results/nineml_neuron.g_gabaA", filter=[cells[0]]) cells.recorders["GABAb_g"].write("Results/nineml_neuron.g_gagaB", filter=[cells[0]]) t = cells.recorders["iaf_V"].get()[:, 1] v = cells.recorders["iaf_V"].get()[:, 2] gInhA = cells.recorders["GABAa_g"].get()[:, 2] gInhB = cells.recorders["GABAb_g"].get()[:, 2] gExc = cells.recorders["AMPA_g"].get()[:, 2] import pylab
def test_run_0(self, ): # see https://github.com/NeuralEnsemble/PyNN/issues/191 sim.setup(timestep=0.123, min_delay=0.246) sim.run(0) self.assertEqual(sim.get_current_time(), 0.0)
for j in range(n_j): l.append((i, j, W[i, j] * w0, delay)) con = sim.FromListConnector(l, column_names=["weight", "delay"]) return con connections_hid = sim.Projection(p_in, p_hid, W_to_connector(W_hid)) connections_out = sim.Projection(p_hid, p_out, W_to_connector(W_out)) # Record spikes p_in.record("spikes") p_hid.record("spikes") p_out.record("spikes") t = sim.run(T) # Get recorded data and plot data_block = p_hid.get_data() print(data_block) fig, ax_list = plt.subplots(3) for k, p, ax in zip(range(3), [p_in, p_hid, p_out], ax_list): ax.scatter(p.get_data()["spikes"]) ax.set_xlim[0, T] plt.show()