def run_simulation(sim, params): print "Running Network ..." timer = Timer() timer.reset() sim.run(params['run_time']) simCPUtime = timer.elapsedTime() print "... The simulation took %s ms to run." % str(simCPUtime)
def main_pyNN(parameters): timer = Timer() sim = import_module(parameters.simulator) timer.mark("import") sim.setup(threads=parameters.threads) timer.mark("setup") populations = {} for name, P in parameters.populations.parameters(): populations[name] = sim.Population(P.n, getattr(sim, P.celltype)(**P.params), label=name) timer.mark("build") if parameters.projections: projections = {} for name, P in parameters.projections.parameters(): connector = getattr(sim, P.connector.type)(**P.connector.params) synapse_type = getattr( sim, P.synapse_type.type)(**P.synapse_type.params) projections[name] = sim.Projection(populations[P.pre], populations[P.post], connector, synapse_type, receptor_type=P.receptor_type, label=name) timer.mark("connect") if parameters.recording: for pop_name, to_record in parameters.recording.parameters(): for var_name, n_record in to_record.items(): populations[pop_name].sample(n_record).record(var_name) timer.mark("record") sim.run(parameters.sim_time) timer.mark("run") spike_counts = {} if parameters.recording: for pop_name in parameters.recording.names(): block = populations[pop_name].get_data( ) # perhaps include some summary statistics in the data returned? spike_counts["spikes_%s" % pop_name] = populations[pop_name].mean_spike_count() timer.mark("get_data") mpi_rank = sim.rank() num_processes = sim.num_processes() sim.end() data = dict(timer.marks) data.update(num_processes=num_processes) data.update(spike_counts) return mpi_rank, data
def test_callback(data_input): global message message = data_input.actual.positions msg_list = list(message) #msg_list[0] = int(message[0].encode('hex'),16) #for i in #msg_list = int(message.encode('hex'),16) #print('============= Received image data.',message) rospy.loginfo('=====received data %r', msg_list[0]) timer = Timer() dt = 0.1 p.setup(timestep=dt) # 0.1ms pub = rospy.Publisher('/arm_controller/follow_joint_trajectory/goal', FollowJointTrajectoryActionGoal, queue_size=10) command = FollowJointTrajectoryActionGoal() command.header.stamp = rospy.Time.now() command.goal.trajectory.joint_names = ['elbow'] point = JointTrajectoryPoint() point.positions = [rate_command / 10] point.time_from_start = rospy.Duration(1) command.goal.trajectory.points.append(point) pub.publish(command) rospy.loginfo('=====send command %r', command.goal.trajectory.points[0]) print("now plotting the network---------------") rospy.loginfo('--------now plotting---------------') n_panels = sum(a.shape[1] for a in pop_1_data.segments[0].analogsignalarrays) + 2 plt.subplot(n_panels, 1, 1) plot_spiketrains(pop_1_data.segments[0]) panel = 3 for array in pop_1_data.segments[0].analogsignalarrays: for i in range(array.shape[1]): plt.subplot(n_panels, 1, panel) plot_signal(array, i, colour='bg'[panel % 2]) panel += 1 plt.xlabel("time (%s)" % array.times.units._dimensionality.string) plt.setp(plt.gca().get_xticklabels(), visible=True) #
def main_pynest(parameters): P = parameters assert P.sim_name == "pynest" timer = Timer() import nest timer.mark("import") nest.SetKernelStatus({"resolution": 0.1}) timer.mark("setup") p = nest.Create("iaf_psc_alpha", n=P.n, params={"I_e": 1000.0}) timer.mark("build") # todo: add recording and data retrieval nest.Simulate(P.sim_time) timer.mark("run") mpi_rank = nest.Rank() num_processes = nest.NumProcesses() data = P.as_dict() data.update(num_processes=num_processes, timings=timer.marks) return mpi_rank, data
def run_model(sim, **options): """ Run a simulation using the parameters read from the file "I_f_curve.json" :param sim: the PyNN backend module to be used. :param options: should contain a keyword "simulator" which is the name of the PyNN backend module used. :return: a tuple (`data`, `times`) where `data` is a Neo Block containing the recorded spikes and `times` is a dict containing the time taken for different phases of the simulation. """ import json from pyNN.utility import Timer timer = Timer() g = open("I_f_curve.json", 'r') d = json.load(g) N = d['param']['N'] max_current = d['param']['max_current'] tstop = d['param']['tstop'] if options['simulator'] == "hardware.brainscales": hardware_preset = d['setup'].pop('hardware_preset', None) if hardware_preset: d['setup']['hardware'] = sim.hardwareSetup[hardware_preset] timer.start() sim.setup(**d['setup']) popcell = sim.Population(N, sim.IF_cond_exp, d['IF_cond_exp']) #current_source = [] #for i in xrange(N): # current_source.append(sim.DCSource(amplitude=(max_current*(i+1)/N))) # popcell[i:(i+1)].inject(current_source[i]) i_offset = max_current * (1 + np.arange(N))/N popcell.tset("i_offset", i_offset) if PYNN07: popcell.record() else: popcell.record('spikes') #popcell[0, 1, N-2, N-1].record('v') # debug setup_time = timer.diff() sim.run(tstop) run_time = timer.diff() if PYNN07: spike_array = popcell.getSpikes() data = spike_array_to_neo(spike_array, popcell, tstop) else: data = popcell.get_data() sim.end() closing_time = timer.diff() times = {'setup_time': setup_time, 'run_time': run_time, 'closing_time': closing_time} return data, times
def do_run(seed=None): simulator_name = 'spiNNaker' timer = Timer() # === Define parameters ========================================= parallel_safe = True n = 1500 # number of cells # number of excitatory cells:number of inhibitory cells r_ei = 4.0 pconn = 0.02 # connection probability dt = 1 # (ms) simulation timestep tstop = 200 # (ms) simulaton duration delay = 1 # Cell parameters area = 20000. # (µm²) tau_m = 20. # (ms) cm = 1. # (µF/cm²) g_leak = 5e-5 # (S/cm²) e_leak = -49. # (mV) v_thresh = -50. # (mV) v_reset = -60. # (mV) t_refrac = 5. # (ms) (clamped at v_reset) # (mV) 'mean' membrane potential, for calculating CUBA weights v_mean = -60. tau_exc = 5. # (ms) tau_inh = 10. # (ms) # (nS) #Those weights should be similar to the COBA weights g_exc = 0.27 # (nS) # but the delpolarising drift should be taken into account g_inh = 4.5 e_rev_exc = 0. # (mV) e_rev_inh = -80. # (mV) # === Calculate derived parameters =============================== area *= 1e-8 # convert to cm² cm *= area * 1000 # convert to nF r_m = 1e-6 / (g_leak * area) # membrane resistance in MΩ assert tau_m == cm * r_m # just to check # number of excitatory cells n_exc = int(round((n * r_ei / (1 + r_ei)))) n_inh = n - n_exc # number of inhibitory cells celltype = p.IF_curr_exp # (nA) weight of excitatory synapses w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean) w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean) # (nA) assert w_exc > 0 assert w_inh < 0 # === Build the network ========================================== p.setup(timestep=dt, min_delay=delay, max_delay=delay) if simulator_name == 'spiNNaker': # this will set 100 neurons per core p.set_number_of_neurons_per_core(p.IF_curr_exp, 100) # this will set 50 neurons per core p.set_number_of_neurons_per_core(p.IF_cond_exp, 50) # node_id = 1 # np = 1 # host_name = socket.gethostname() cell_params = {'tau_m': tau_m, 'tau_syn_E': tau_exc, 'tau_syn_I': tau_inh, 'v_rest': e_leak, 'v_reset': v_reset, 'v_thresh': v_thresh, 'cm': cm, 'tau_refrac': t_refrac, 'i_offset': 0} timer.start() exc_cells = p.Population(n_exc, celltype, cell_params, label="Excitatory_Cells") inh_cells = p.Population(n_inh, celltype, cell_params, label="Inhibitory_Cells") rng = NumpyRNG(seed=seed, parallel_safe=parallel_safe) uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh], rng=rng) exc_cells.initialize(v=uniform_distr) inh_cells.initialize(v=uniform_distr) exc_conn = p.FixedProbabilityConnector(pconn, rng=rng) synapse_exc = p.StaticSynapse(weight=w_exc, delay=delay) inh_conn = p.FixedProbabilityConnector(pconn, rng=rng) synapse_inh = p.StaticSynapse(weight=w_inh, delay=delay) connections = dict() connections['e2e'] = p.Projection(exc_cells, exc_cells, exc_conn, synapse_type=synapse_exc, receptor_type='excitatory') connections['e2i'] = p.Projection(exc_cells, inh_cells, exc_conn, synapse_type=synapse_exc, receptor_type='excitatory') connections['i2e'] = p.Projection(inh_cells, exc_cells, inh_conn, synapse_type=synapse_inh, receptor_type='inhibitory') connections['i2i'] = p.Projection(inh_cells, inh_cells, inh_conn, synapse_type=synapse_inh, receptor_type='inhibitory') # === Setup recording ============================== exc_cells.record("spikes") # === Run simulation ================================ p.run(tstop) exc_spikes = exc_cells.get_data("spikes") exc_cells.write_data(neo_path, "spikes") p.end() return exc_spikes
def test_va_benchmark(self): try: simulator_name = 'spiNNaker' timer = Timer() # === Define parameters ========================================= rngseed = 98766987 parallel_safe = True n = 1500 # number of cells # number of excitatory cells:number of inhibitory cells r_ei = 4.0 pconn = 0.02 # connection probability dt = 0.1 # (ms) simulation timestep tstop = 200 # (ms) simulaton duration delay = 1 # Cell parameters area = 20000. # (µm²) tau_m = 20. # (ms) cm = 1. # (µF/cm²) g_leak = 5e-5 # (S/cm²) e_leak = -49. # (mV) v_thresh = -50. # (mV) v_reset = -60. # (mV) t_refrac = 5. # (ms) (clamped at v_reset) # (mV) 'mean' membrane potential, for calculating CUBA weights v_mean = -60. tau_exc = 5. # (ms) tau_inh = 10. # (ms) # (nS) #Those weights should be similar to the COBA weights g_exc = 0.27 # (nS) # but the delpolarising drift should be taken into account g_inh = 4.5 e_rev_exc = 0. # (mV) e_rev_inh = -80. # (mV) # === Calculate derived parameters =============================== area *= 1e-8 # convert to cm² cm *= area * 1000 # convert to nF r_m = 1e-6 / (g_leak * area) # membrane resistance in MΩ assert tau_m == cm * r_m # just to check # number of excitatory cells n_exc = int(round((n * r_ei / (1 + r_ei)))) n_inh = n - n_exc # number of inhibitory cells print n_exc, n_inh celltype = p.IF_curr_exp # (nA) weight of excitatory synapses w_exc = 1e-3 * g_exc * (e_rev_exc - v_mean) w_inh = 1e-3 * g_inh * (e_rev_inh - v_mean) # (nA) assert w_exc > 0 assert w_inh < 0 # === Build the network ========================================== p.setup(timestep=dt, min_delay=delay, max_delay=delay) if simulator_name == 'spiNNaker': # this will set 100 neurons per core p.set_number_of_neurons_per_core('IF_curr_exp', 100) # this will set 50 neurons per core p.set_number_of_neurons_per_core('IF_cond_exp', 50) node_id = 1 np = 1 host_name = socket.gethostname() print "Host #%d is on %s" % (np, host_name) cell_params = { 'tau_m': tau_m, 'tau_syn_E': tau_exc, 'tau_syn_I': tau_inh, 'v_rest': e_leak, 'v_reset': v_reset, 'v_thresh': v_thresh, 'cm': cm, 'tau_refrac': t_refrac, 'i_offset': 0 } print cell_params timer.start() print "%s Creating cell populations..." % node_id exc_cells = p.Population(n_exc, celltype, cell_params, label="Excitatory_Cells") inh_cells = p.Population(n_inh, celltype, cell_params, label="Inhibitory_Cells") p.NativeRNG(12345) print "%s Initialising membrane potential to random values..." \ % node_id rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe) uniform_distr = RandomDistribution('uniform', [v_reset, v_thresh], rng=rng) exc_cells.initialize('v', uniform_distr) inh_cells.initialize('v', uniform_distr) print "%s Connecting populations..." % node_id exc_conn = p.FixedProbabilityConnector(pconn, weights=w_exc, delays=delay) inh_conn = p.FixedProbabilityConnector(pconn, weights=w_inh, delays=delay) connections = dict() connections['e2e'] = p.Projection(exc_cells, exc_cells, exc_conn, target='excitatory', rng=rng) connections['e2i'] = p.Projection(exc_cells, inh_cells, exc_conn, target='excitatory', rng=rng) connections['i2e'] = p.Projection(inh_cells, exc_cells, inh_conn, target='inhibitory', rng=rng) connections['i2i'] = p.Projection(inh_cells, inh_cells, inh_conn, target='inhibitory', rng=rng) # === Setup recording ============================== print "%s Setting up recording..." % node_id exc_cells.record() # === Run simulation ================================ print "%d Running simulation..." % node_id print "timings: number of neurons:", n print "timings: number of synapses:", n * n * pconn p.run(tstop) exc_spikes = exc_cells.getSpikes() print len(exc_spikes) current_file_path = os.path.dirname(os.path.abspath(__file__)) current_file_path = os.path.join(current_file_path, "spikes.data") exc_cells.printSpikes(current_file_path) pre_recorded_spikes = p.utility_calls.read_spikes_from_file( current_file_path, 0, n_exc, 0, tstop) for spike_element, read_element in zip(exc_spikes, pre_recorded_spikes): self.assertEqual(round(spike_element[0], 1), round(read_element[0], 1)) self.assertEqual(round(spike_element[1], 1), round(read_element[1], 1)) p.end() # System intentional overload so may error except SpinnmanTimeoutException as ex: raise SkipTest(ex)
def test(cases=[1]): sp = Space(periodic_boundaries=((0, 1), (0, 1), None)) safe = False verbose = True autapse = False parallel_safe = True render = True for case in cases: #w = RandomDistribution('uniform', (0,1)) w = "0.2 + d/0.2" #w = 0.1 #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2) #delay = RandomDistribution('uniform', (0.1,5.)) delay = "0.1 + d/0.2" #delay = 0.1 #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances d_expression = "d < 0.1" #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)" #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()" timer = Timer() np = num_processes() timer.start() if case is 1: conn = DistanceDependentProbabilityConnector( d_expression, delays=delay, weights=w, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse) fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np) elif case is 2: conn = FixedProbabilityConnector(0.05, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse) fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np) elif case is 3: conn = AllToAllConnector(delays=delay, weights=w, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse) fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np) elif case is 4: conn = FixedNumberPostConnector(50, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse) fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np) elif case is 5: conn = FixedNumberPreConnector(50, weights=w, delays=delay, space=sp, safe=safe, verbose=verbose, allow_self_connections=autapse) fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np) elif case is 6: conn = OneToOneConnector(safe=safe, weights=w, delays=delay, verbose=verbose) fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np) elif case is 7: conn = FromFileConnector('connections.dat', safe=safe, verbose=verbose) fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np) elif case is 8: conn = SmallWorldConnector(degree=0.1, rewiring=0., weights=w, delays=delay, safe=safe, verbose=verbose, allow_self_connections=autapse, space=sp) fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np) print "Generating data for %s" % fig_name rng = NumpyRNG(23434, num_processes=np, parallel_safe=parallel_safe) prj = Projection(x, x, conn, rng=rng) simulation_time = timer.elapsedTime() print "Building time", simulation_time print "Nb synapses built", len(prj) if render: if not (os.path.isdir('Results')): os.mkdir('Results') print "Saving Positions...." x.savePositions('Results/positions.dat') print "Saving Connections...." prj.saveConnections('Results/connections.dat', compatible_output=False) if node_id == 0 and render: figure() print "Generating and saving %s" % fig_name positions = numpy.loadtxt('Results/positions.dat') connections = numpy.loadtxt('Results/connections.dat') positions = positions[numpy.argsort(positions[:, 0])] idx_pre = (connections[:, 0] - x.first_id).astype(int) idx_post = (connections[:, 1] - x.first_id).astype(int) d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1) subplot(231) title('Cells positions') plot(positions[:, 1], positions[:, 2], '.') subplot(232) title('Weights distribution') hist(connections[:, 2], 50) subplot(233) title('Delay distribution') hist(connections[:, 3], 50) subplot(234) ids = numpy.random.permutation(numpy.unique(positions[:, 0]))[0:6] colors = ['k', 'r', 'b', 'g', 'c', 'y'] for count, cell in enumerate(ids): draw_rf(cell, positions, connections, colors[count]) subplot(235) plot(d, connections[:, 2], '.') subplot(236) plot(d, connections[:, 3], '.') savefig("Results/" + fig_name) os.remove('Results/connections.dat') os.remove('Results/positions.dat')
def test(cases=[1]): sp = Space(periodic_boundaries=((0, 1), (0, 1), None), axes='xy') safe = False callback = progress_bar.set_level autapse = False parallel_safe = True render = True to_file = True for case in cases: #w = RandomDistribution('uniform', (0,1)) w = "0.2 + d/0.2" #w = 0.1 #w = lambda dist : 0.1 + numpy.random.rand(len(dist[0]))*sqrt(dist[0]**2 + dist[1]**2) #delay = RandomDistribution('uniform', (0.1,5.)) #delay = "0.1 + d/0.2" delay = 0.1 #delay = lambda distances : 0.1 + numpy.random.rand(len(distances))*distances d_expression = "exp(-d**2/(2*0.1**2))" #d_expression = "(d[0] < 0.05) & (d[1] < 0.05)" #d_expression = "(d[0]/(0.05**2) + d[1]/(0.1**2)) < 100*numpy.random.rand()" timer = Timer() np = num_processes() timer.start() synapse = StaticSynapse(weight=w, delay=delay) rng = NumpyRNG(23434, parallel_safe=parallel_safe) if case is 1: conn = DistanceDependentProbabilityConnector( d_expression, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng) fig_name = "DistanceDependent_%s_np_%d.png" % (simulator_name, np) elif case is 2: conn = FixedProbabilityConnector(0.02, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng) fig_name = "FixedProbability_%s_np_%d.png" % (simulator_name, np) elif case is 3: conn = AllToAllConnector(delays=delay, safe=safe, callback=callback, allow_self_connections=autapse) fig_name = "AllToAll_%s_np_%d.png" % (simulator_name, np) elif case is 4: conn = FixedNumberPostConnector(50, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng) fig_name = "FixedNumberPost_%s_np_%d.png" % (simulator_name, np) elif case is 5: conn = FixedNumberPreConnector(50, safe=safe, callback=callback, allow_self_connections=autapse, rng=rng) fig_name = "FixedNumberPre_%s_np_%d.png" % (simulator_name, np) elif case is 6: conn = OneToOneConnector(safe=safe, callback=callback) fig_name = "OneToOne_%s_np_%d.png" % (simulator_name, np) elif case is 7: conn = FromFileConnector(files.NumpyBinaryFile( 'Results/connections.dat', mode='r'), safe=safe, callback=callback, distributed=True) fig_name = "FromFile_%s_np_%d.png" % (simulator_name, np) elif case is 8: conn = SmallWorldConnector(degree=0.1, rewiring=0., safe=safe, callback=callback, allow_self_connections=autapse) fig_name = "SmallWorld_%s_np_%d.png" % (simulator_name, np) print "Generating data for %s" % fig_name prj = Projection(x, x, conn, synapse, space=sp) mytime = timer.diff() print "Time to connect the cell population:", mytime, 's' print "Nb synapses built", prj.size() if to_file: if not (os.path.isdir('Results')): os.mkdir('Results') print "Saving Connections...." prj.save('all', files.NumpyBinaryFile('Results/connections.dat', mode='w'), gather=True) mytime = timer.diff() print "Time to save the projection:", mytime, 's' if render and to_file: print "Saving Positions...." x.save_positions('Results/positions.dat') end() if node_id == 0 and render and to_file: figure() print "Generating and saving %s" % fig_name positions = numpy.loadtxt('Results/positions.dat') positions[:, 0] -= positions[:, 0].min() connections = files.NumpyBinaryFile('Results/connections.dat', mode='r').read() print positions.shape, connections.shape connections[:, 0] -= connections[:, 0].min() connections[:, 1] -= connections[:, 1].min() idx_pre = connections[:, 0].astype(int) idx_post = connections[:, 1].astype(int) d = distances(positions[idx_pre, 1:3], positions[idx_post, 1:3], 1) subplot(231) title('Cells positions') plot(positions[:, 1], positions[:, 2], '.') subplot(232) title('Weights distribution') hist(connections[:, 2], 50) subplot(233) title('Delay distribution') hist(connections[:, 3], 50) subplot(234) numpy.random.seed(74562) ids = numpy.random.permutation(positions[:, 0])[0:6] colors = ['k', 'r', 'b', 'g', 'c', 'y'] for count, cell in enumerate(ids): draw_rf(cell, positions, connections, colors[count]) subplot(235) plot(d, connections[:, 2], '.') subplot(236) plot(d, connections[:, 3], '.') savefig("Results/" + fig_name) #os.remove('Results/connections.dat') #os.remove('Results/positions.dat') show()
def run_retina(params): """Run the retina using the specified parameters.""" print "Setting up simulation" timer = Timer() timer.start() # start timer on construction pyNN.setup(timestep=params['dt'], max_delay=params['syn_delay'], threads=params['threads'], rng_seeds=params['kernelseeds']) N = params['N'] phr_ON = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')()) phr_OFF = pyNN.Population((N, N), pyNN.native_cell_type('dc_generator')()) noise_ON = pyNN.Population( (N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std'])) noise_OFF = pyNN.Population( (N, N), pyNN.native_cell_type('noise_generator')(mean=0.0, std=params['noise_std'])) phr_ON.set(start=params['simtime'] / 4, stop=params['simtime'] / 4 * 3, amplitude=params['amplitude'] * params['snr']) phr_OFF.set(start=params['simtime'] / 4, stop=params['simtime'] / 4 * 3, amplitude=-params['amplitude'] * params['snr']) # target ON and OFF populations v_init = params['parameters_gc'].pop('Vinit') out_ON = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')( **params['parameters_gc'])) out_OFF = pyNN.Population((N, N), pyNN.native_cell_type('iaf_cond_exp_sfa_rr')( **params['parameters_gc'])) out_ON.initialize(v=v_init) out_OFF.initialize(v=v_init) #print "Connecting the network" retina_proj_ON = pyNN.Projection(phr_ON, out_ON, pyNN.OneToOneConnector()) retina_proj_ON.set(weight=params['weight']) retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF, pyNN.OneToOneConnector()) retina_proj_OFF.set(weight=params['weight']) noise_proj_ON = pyNN.Projection(noise_ON, out_ON, pyNN.OneToOneConnector()) noise_proj_ON.set(weight=params['weight']) noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF, pyNN.OneToOneConnector()) noise_proj_OFF.set(weight=params['weight']) out_ON.record('spikes') out_OFF.record('spikes') # reads out time used for building buildCPUTime = timer.elapsedTime() print "Running simulation" timer.start() # start timer on construction pyNN.run(params['simtime']) simCPUTime = timer.elapsedTime() out_ON_DATA = out_ON.get_data().segments[0] out_OFF_DATA = out_OFF.get_data().segments[0] print "\nRetina Network Simulation:" print(params['description']) print "Number of Neurons : ", N**2 print "Output rate (ON) : ", out_ON.mean_spike_count(), \ "spikes/neuron in ", params['simtime'], "ms" print "Output rate (OFF) : ", out_OFF.mean_spike_count(), \ "spikes/neuron in ", params['simtime'], "ms" print "Build time : ", buildCPUTime, "s" print "Simulation time : ", simCPUTime, "s" return out_ON_DATA, out_OFF_DATA
def callback(data_input): #==================================================================== # Unpacking the Joint Angle Message #==================================================================== global message message = data_input.degree rospy.loginfo('=====> received joint angle in degree %r', message) print message if type(message) != int: input_rates = list(message) n_input_neurons = len(input_rates) else: input_rates = message n_input_neurons = 1 #msg_list= [int(msg.encode('hex'),16) for msg in message] timer = Timer() dt = 0.1 p.setup(timestep=dt) # 0.1ms #==================================================================== # Defining the LSM #==================================================================== n_res=2000 w_exc_b=0.2 w_inh_b=-0.8 rout_w_exc=20 rout_w_inh=-80 n_readout_neurons = 2 n_reservoir_neurons = n_res n_res = n_reservoir_neurons exc_rate = 0.8 # percentage of excitatory neurons in reservoir n_exc = int(round(n_reservoir_neurons*exc_rate)) n_inh = n_reservoir_neurons-n_exc izh_celltype = p.native_cell_type('izhikevich') if_celltype = p.IF_curr_exp celltype = if_celltype spike_source = p.native_cell_type('poisson_generator') inp_pop=p.Population(n_input_neurons*10,spike_source,{'rate':input_rates}) exc_cells = p.Population(n_exc, celltype, label="Excitatory_Cells") inh_cells = p.Population(n_inh, celltype, label="Inhibitory_Cells") # initialize with a uniform random distributin # use seeding for reproducability rngseed = 98766987 parallel_safe = True rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe) unifDistr = RandomDistribution('uniform', (-70,-65), rng=rng) inh_cells.initialize('V_m',unifDistr) exc_cells.initialize('V_m',unifDistr) readout_neurons = p.Population(2, celltype, label="readout_neuron") inp_weight=3. inp_delay =1 inp_weight_distr = RandomDistribution('normal', [inp_weight, 1e-3], rng=rng) # connect each input neuron to 30% of the reservoir neurons inp_conn = p.FixedProbabilityConnector(p_connect=0.3,weights =inp_weight_distr, delays=inp_delay) connections = {} connections['inp2e'] = p.Projection(inp_pop, exc_cells, inp_conn) connections['inp2i'] = p.Projection(inp_pop, inh_cells, inp_conn) pconn = 0.01 # sparse connection probability # scale the weights w.r.t. the network to keep it stable w_exc = w_exc_b/np.sqrt(n_res) # nA w_inh = w_inh_b/np.sqrt(n_res) # nA delay_exc = 1 # defines how long (ms) the synapse takes for transmission delay_inh = 1 weight_distr_exc = RandomDistribution('normal', [w_exc, 1/n_res], rng=rng) weight_distr_inh = RandomDistribution('normal', [w_inh, 1/n_res], rng=rng) exc_conn = p.FixedProbabilityConnector(pconn, weights=weight_distr_exc, delays=delay_exc) inh_conn = p.FixedProbabilityConnector(pconn, weights=weight_distr_inh, delays=delay_inh) connections['e2e'] = p.Projection(exc_cells, exc_cells, exc_conn, target='excitatory') connections['e2i'] = p.Projection(exc_cells, inh_cells, exc_conn, target='excitatory') connections['i2e'] = p.Projection(inh_cells, exc_cells, inh_conn, target='inhibitory') connections['i2i'] = p.Projection(inh_cells, inh_cells, inh_conn, target='inhibitory') rout_conn_exc = p.AllToAllConnector(weights=rout_w_exc, delays=delay_exc) rout_conn_inh = p.AllToAllConnector(weights=rout_w_inh, delays=delay_exc) connections['e2rout'] = p.Projection(exc_cells, readout_neurons, rout_conn_exc, target='excitatory') connections['i2rout'] = p.Projection(inh_cells, readout_neurons, rout_conn_inh, target='inhibitory') readout_neurons.record() exc_cells.record() inh_cells.record() inp_pop.record() p.run(20) r_spikes = readout_neurons.getSpikes() exc_spikes = exc_cells.getSpikes() inh_spikes = inh_cells.getSpikes() inp_spikes = inp_pop.getSpikes() rospy.loginfo('=====> shape of r_spikes %r', np.shape(r_spikes)) #==================================================================== # Compute Readout Spike Rates #==================================================================== alpha_rates = alpha_decoding(r_spikes,dt) mean_rates = mean_decoding(r_spikes,dt) #==================================================================== # Publish Readout Rates #==================================================================== # TODO: error handling if r_spikes is empty pub = rospy.Publisher('/alpha_readout_rates', Pop_List, queue_size=10) alpha_readout_rates = Pop_List alpha_readout_rates = alpha_rates pub.publish(alpha_readout_rates) pub = rospy.Publisher('/mean_readout_rates', Pop_List, queue_size=10) mean_readout_rates = Pop_List mean_readout_rates = mean_rates pub.publish(mean_readout_rates)
def run_model(sim, **options): """ Run a simulation using the parameters read from the file "spike_train_statistics.json" :param sim: the PyNN backend module to be used. :param options: should contain a keyword "simulator" which is the name of the PyNN backend module used. :return: a tuple (`data`, `times`) where `data` is a Neo Block containing the recorded spikes and `times` is a dict containing the time taken for different phases of the simulation. """ import json from pyNN.utility import Timer print("Running") timer = Timer() g = open("spike_train_statistics.json", 'r') d = json.load(g) N = d['param']['N'] max_rate = d['param']['max_rate'] tstop = d['param']['tstop'] d['SpikeSourcePoisson'] = { "duration": tstop } if options['simulator'] == "hardware.brainscales": hardware_preset = d['setup'].pop('hardware_preset', None) if hardware_preset: d['setup']['hardware'] = sim.hardwareSetup[hardware_preset] d['SpikeSourcePoisson']['random'] = True place = mapper.place() timer.start() sim.setup(**d['setup']) spike_sources = sim.Population(N, sim.SpikeSourcePoisson, d['SpikeSourcePoisson']) delta_rate = max_rate/N rates = numpy.linspace(delta_rate, max_rate, N) print("Firing rates: %s" % rates) if PYNN07: spike_sources.tset("rate", rates) else: spike_sources.set(rate=rates) if options['simulator'] == "hardware.brainscales": for i, spike_source in enumerate(spike_sources): place.to(spike_source, hicann=i//8, neuron=i%64) place.commit() if PYNN07: spike_sources.record() else: spike_sources.record('spikes') setup_time = timer.diff() sim.run(tstop) run_time = timer.diff() if PYNN07: spike_array = spike_sources.getSpikes() data = spike_array_to_neo(spike_array, spike_sources, tstop) else: data = spike_sources.get_data() sim.end() closing_time = timer.diff() times = {'setup_time': setup_time, 'run_time': run_time, 'closing_time': closing_time} return data, times
def run(self, params, verbose=True): """ params are the parameters to use """ tmpdir = tempfile.mkdtemp() myTimer = Timer() # === Build the network ======================================================== if verbose: print "Setting up simulation" myTimer.start() # start timer on construction sim.setup(timestep=params['dt'], max_delay=params['syn_delay']) N = params['N'] #dc_generator phr_ON = sim.Population((N, ), 'dc_generator') phr_OFF = sim.Population((N, ), 'dc_generator') for factor, phr in [(-params['snr'], phr_OFF), (params['snr'], phr_ON)]: phr.tset('amplitude', params['amplitude'] * factor) phr.set({ 'start': params['simtime'] / 4, 'stop': params['simtime'] / 4 * 3 }) # internal noise model (see benchmark_noise) noise_ON = sim.Population((N, ), 'noise_generator', { 'mean': 0., 'std': params['noise_std'] }) noise_OFF = sim.Population((N, ), 'noise_generator', { 'mean': 0., 'std': params['noise_std'] }) # target ON and OFF populations (what about a tridimensional Population?) out_ON = sim.Population( (N, ), sim.IF_curr_alpha ) #'IF_cond_alpha) #iaf_sfa_neuron')# EIF_cond_alpha_isfa_ista, IF_cond_exp_gsfa_grr,sim.IF_cond_alpha)#'iaf_sfa_neuron',params['parameters_gc'])#'iaf_cond_neuron')# IF_cond_alpha) # out_OFF = sim.Population( (N, ), sim.IF_curr_alpha ) #'IF_cond_alpha) #IF_curr_alpha)#'iaf_sfa_neuron')#sim.IF_curr_alpha)#,params['parameters_gc']) # initialize membrane potential TODO: and conductances? from pyNN.random import RandomDistribution, NumpyRNG rng = NumpyRNG(seed=params['kernelseed']) vinit_distr = RandomDistribution(distribution='uniform', parameters=[-70, -55], rng=rng) for out_ in [out_ON, out_OFF]: out_.randomInit(vinit_distr) retina_proj_ON = sim.Projection(phr_ON, out_ON, sim.OneToOneConnector()) retina_proj_ON.setWeights(params['weight']) # TODO fix setWeight, add setDelays to 10 ms (relative to stimulus onset) retina_proj_OFF = sim.Projection(phr_OFF, out_OFF, sim.OneToOneConnector()) retina_proj_OFF.setWeights(params['weight']) noise_proj_ON = sim.Projection(noise_ON, out_ON, sim.OneToOneConnector()) noise_proj_ON.setWeights(params['weight']) noise_proj_OFF = sim.Projection( noise_OFF, out_OFF, sim.OneToOneConnector( )) # implication if ON and OFF have the same noise input? noise_proj_OFF.setWeights(params['weight']) out_ON.record() out_OFF.record() # reads out time used for building buildCPUTime = myTimer.elapsedTime() # === Run simulation =========================================================== if verbose: print "Running simulation" myTimer.reset() # start timer on construction sim.run(params['simtime']) simCPUTime = myTimer.elapsedTime() myTimer.reset() # start timer on construction # TODO LUP use something like "for pop in [phr, out]" ? out_ON_filename = os.path.join(tmpdir, 'out_on.gdf') out_OFF_filename = os.path.join(tmpdir, 'out_off.gdf') out_ON.printSpikes(out_ON_filename) # out_OFF.printSpikes(out_OFF_filename) # # TODO LUP get out_ON_DATA on a 2D grid independantly of out_ON.cell.astype(int) out_ON_DATA = load_spikelist(out_ON_filename, range(N), t_start=0.0, t_stop=params['simtime']) out_OFF_DATA = load_spikelist(out_OFF_filename, range(N), t_start=0.0, t_stop=params['simtime']) out = { 'out_ON_DATA': out_ON_DATA, 'out_OFF_DATA': out_OFF_DATA } #,'out_ON_pos':out_ON} # cleans up os.remove(out_ON_filename) os.remove(out_OFF_filename) os.rmdir(tmpdir) writeCPUTime = myTimer.elapsedTime() if verbose: print "\nRetina Network Simulation:" print(params['description']) print "Number of Neurons : ", N print "Output rate (ON) : ", out_ON_DATA.mean_rate( ), "Hz/neuron in ", params['simtime'], "ms" print "Output rate (OFF) : ", out_OFF_DATA.mean_rate( ), "Hz/neuron in ", params['simtime'], "ms" print("Build time : %g s" % buildCPUTime) print("Simulation time : %g s" % simCPUTime) print("Writing time : %g s" % writeCPUTime) return out
def runNetwork(Be, Bi, nn_stim, show_gui=True, dt = defaultParams.dt, N_rec_v = 5, save=False, simtime = defaultParams.Tpost+defaultParams.Tstim+defaultParams.Tblank+defaultParams.Ttrans, extra = {}, kernelseed = 123): exec("from pyNN.%s import *" % simulator_name) in globals() timer = Timer() rec_conn={'EtoE':1, 'EtoI':1, 'ItoE':1, 'ItoI':1} print('####################') print('### (Be, Bi, nn_stim): ', Be, Bi, nn_stim) print('####################') Bee, Bei = Be, Be Bie, Bii = Bi, Bi N = defaultParams.N NE = defaultParams.NE NI = defaultParams.NI print('\n # -----> Num cells: %s, size of pert. inh: %s; base rate %s; pert rate %s'% (N, nn_stim, defaultParams.r_bkg, defaultParams.r_stim)) r_extra = np.zeros(N) r_extra[NE:NE+nn_stim] = defaultParams.r_stim rr1 = defaultParams.r_bkg*np.random.uniform(.75,1.25, N) rr2 = rr1 + r_extra rank = setup(timestep=dt, max_delay=defaultParams.delay_default, reference='ISN', save_format='hdf5', **extra) print("rank =", rank) nump = num_processes() print("num_processes =", nump) import socket host_name = socket.gethostname() print("Host #%d is on %s" % (rank+1, host_name)) if 'threads' in extra: print("%d Initialising the simulator with %d threads..." % (rank, extra['threads'])) else: print("%d Initialising the simulator with single thread..." % rank) timer.start() # start timer on construction print("%d Setting up random number generator using seed %s" % (rank, kernelseed)) ks = open('kernelseed','w') ks.write('%i'%kernelseed) ks.close() rng = NumpyRNG(kernelseed, parallel_safe=True) nesp = defaultParams.neuron_params_default cell_parameters = { 'cm': nesp['C_m']/1000, # Capacitance of the membrane in nF 'tau_refrac': nesp['t_ref'], # Duration of refractory period in ms. 'v_spike': 0.0 , # Spike detection threshold in mV. https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp 'v_reset': nesp['V_reset'], # Reset value for V_m after a spike. In mV. 'v_rest': nesp['E_L'], # Resting membrane potential (Leak reversal potential) in mV. 'tau_m': nesp['C_m']/nesp['g_L'], # Membrane time constant in ms = cm/tau_m*1000.0, C_m/g_L 'i_offset': nesp['I_e']/1000, # Offset current in nA 'a': 0, # Subthreshold adaptation conductance in nS. 'b': 0, # Spike-triggered adaptation in nA 'delta_T': 2 , # Slope factor in mV. See https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp 'tau_w': 144.0, # Adaptation time constant in ms. See https://github.com/nest/nest-simulator/blob/master/models/aeif_cond_alpha.cpp 'v_thresh': nesp['V_th'], # Spike initiation threshold in mV 'e_rev_E': nesp['E_ex'], # Excitatory reversal potential in mV. 'tau_syn_E': nesp['tau_syn_ex'], # Rise time of excitatory synaptic conductance in ms (alpha function). 'e_rev_I': nesp['E_in'], # Inhibitory reversal potential in mV. 'tau_syn_I': nesp['tau_syn_in'], # Rise time of the inhibitory synaptic conductance in ms (alpha function). } print("%d Creating population with %d neurons." % (rank, N)) celltype = EIF_cond_alpha_isfa_ista(**cell_parameters) celltype.default_initial_values['v'] = cell_parameters['v_rest'] # Setting default init v, useful for NML2 export layer_volume = Cuboid(1000,100,1000) layer_structure = RandomStructure(layer_volume, origin=(0,0,0)) layer_structure_input = RandomStructure(layer_volume, origin=(0,-150,0)) default_cell_radius = 15 stim_cell_radius = 10 #EI_pop = Population(N, celltype, structure=layer_structure, label="EI") E_pop = Population(NE, celltype, structure=layer_structure, label='E_pop') E_pop.annotate(color='1 0 0') E_pop.annotate(radius=default_cell_radius) E_pop.annotate(type='E') # temp indicator to use for connection arrowhead #print("%d Creating pop %s." % (rank, E_pop)) I_pop = Population(NI, celltype, structure=layer_structure, label='I_pop') I_pop.annotate(color='0 0 .9') I_pop.annotate(radius=default_cell_radius) I_pop.annotate(type='I') # temp indicator to use for connection arrowhead #print("%d Creating pop %s." % (rank, I_pop)) I_pert_pop = PopulationView(I_pop, np.array(range(0,nn_stim)),label='I_pert_pop') I_nonpert_pop = PopulationView(I_pop, np.array(range(nn_stim,NI)),label='I_nonpert_pop') p_rate = defaultParams.r_bkg print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate)) source_typeA_E = SpikeSourcePoisson(rate=p_rate, start=0,duration=defaultParams.Ttrans+defaultParams.Tblank+defaultParams.Tstim+defaultParams.Tpost) expoissonA_E = Population(NE, source_typeA_E, structure=layer_structure_input, label="stim_E") print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate)) source_typeA_I = SpikeSourcePoisson(rate=p_rate, start=0,duration=defaultParams.Ttrans+defaultParams.Tblank) expoissonA_I = Population(NI, source_typeA_I, structure=layer_structure_input, label="pre_pert_stim_I") print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate)) source_typeB = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank,duration=defaultParams.Tstim+defaultParams.Tpost) #expoissonB_E = Population(NE, source_typeB, label="non_pert_stim_E") expoissonB_I = Population(len(I_nonpert_pop), source_typeB, structure=layer_structure_input, label="non_pert_stim_I") p_rate = defaultParams.r_bkg+defaultParams.r_stim print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate)) source_typeC = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank, duration=defaultParams.Tstim) expoissonC = Population(nn_stim, source_typeC, structure=layer_structure_input, label="pert_stim") p_rate = defaultParams.r_bkg print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate)) source_typeD = SpikeSourcePoisson(rate=p_rate, start=defaultParams.Ttrans+defaultParams.Tblank+defaultParams.Tstim, duration=defaultParams.Tpost) expoissonD = Population(nn_stim, source_typeD, structure=layer_structure_input, label="pert_poststim") for p in [expoissonA_E,expoissonA_I,expoissonB_I,expoissonC,expoissonD]: p.annotate(color='0.8 0.8 0.8') p.annotate(radius=stim_cell_radius) progress_bar = ProgressBar(width=20) connector_E = FixedProbabilityConnector(0.15, rng=rng, callback=progress_bar) connector_I = FixedProbabilityConnector(1, rng=rng, callback=progress_bar) EE_syn = StaticSynapse(weight=0.001*Bee, delay=defaultParams.delay_default) EI_syn = StaticSynapse(weight=0.001*Bei, delay=defaultParams.delay_default) II_syn = StaticSynapse(weight=0.001*Bii, delay=defaultParams.delay_default) IE_syn = StaticSynapse(weight=0.001*Bie, delay=defaultParams.delay_default) #I_syn = StaticSynapse(weight=JI, delay=delay) ext_Connector = OneToOneConnector(callback=progress_bar) ext_syn_bkg = StaticSynapse(weight=0.001*defaultParams.Be_bkg, delay=defaultParams.delay_default) ext_syn_stim = StaticSynapse(weight=0.001*defaultParams.Be_stim, delay=defaultParams.delay_default) E_to_E = Projection(E_pop, E_pop, connector_E, EE_syn, receptor_type="excitatory") print("E --> E\t\t", len(E_to_E), "connections") E_to_I = Projection(E_pop, I_pop, connector_E, EI_syn, receptor_type="excitatory") print("E --> I\t\t", len(E_to_I), "connections") I_to_I = Projection(I_pop, I_pop, connector_I, II_syn, receptor_type="inhibitory") print("I --> I\t\t", len(I_to_I), "connections") I_to_E = Projection(I_pop, E_pop, connector_I, IE_syn, receptor_type="inhibitory") print("I --> E\t\t", len(I_to_E), "connections") input_A_E = Projection(expoissonA_E, E_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory") print("input --> %s cells pre pert\t"%len(E_pop), len(input_A_E), "connections") input_A_I = Projection(expoissonA_I, I_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory") print("input --> %s cells pre pert\t"%len(I_pop), len(input_A_I), "connections") ##input_B_E = Projection(expoissonB_E, E_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory") ##print("input --> %s cells post pert\t"%len(E_pop), len(input_B_E), "connections") input_B_I = Projection(expoissonB_I, I_nonpert_pop, ext_Connector, ext_syn_bkg, receptor_type="excitatory") print("input --> %s cells post pert\t"%len(I_nonpert_pop), len(input_B_I), "connections") input_C = Projection(expoissonC, I_pert_pop, ext_Connector, ext_syn_stim, receptor_type="excitatory") print("input --> %s cells pre pert\t"%len(I_pert_pop), len(input_C), "connections") input_D = Projection(expoissonD, I_pert_pop, ext_Connector, ext_syn_stim, receptor_type="excitatory") print("input --> %s cells pre pert\t"%len(I_pert_pop), len(input_D), "connections") # Can't be used for connections etc. as NeuroML export not (yet) supported EI_pop = Assembly(E_pop, I_pop, label='EI') # Record spikes print("%d Setting up recording in excitatory population." % rank) EI_pop.record('spikes') if N_rec_v>0: EI_pop[0:min(N,N_rec_v)].record('v') # read out time used for building buildCPUTime = timer.elapsedTime() # === Run simulation =========================================================== # run, measure computer time timer.start() # start timer on construction print("%d Running simulation in %s for %g ms (dt=%sms)." % (rank, simulator_name, simtime, dt)) run(simtime) print("Done") simCPUTime = timer.elapsedTime() # write data to file if save and not simulator_name=='neuroml': for pop in [EI_pop]: filename="ISN-%s-%s-%i.gdf"%(simulator_name, pop.label, rank) ff = open(filename, 'w') spikes = pop.get_data('spikes', gather=False) spiketrains = spikes.segments[0].spiketrains print('Saving data recorded for %i spiketrains in pop %s, indices: %s, ids: %s to %s'% \ (len(spiketrains), pop.label, [s.annotations['source_index'] for s in spiketrains], [s.annotations['source_id'] for s in spiketrains], filename)) for spiketrain_i in range(len(spiketrains)): spiketrain = spiketrains[spiketrain_i] source_id = spiketrain.annotations['source_id'] source_index = spiketrain.annotations['source_index'] #print("Writing spike data for cell %s[%s] (gid: %i): %i spikes: [%s,...,%s] "%(pop.label,source_index, source_id, len(spiketrain),spiketrain[0],spiketrain[-1])) for t in spiketrain: ff.write('%s\t%i\n'%(t.magnitude,spiketrain_i)) ff.close() vs = pop.get_data('v', gather=False) for segment in vs.segments: for i in range(len(segment.analogsignals[0].transpose())): filename="ISN-%s-%s-cell%i.dat"%(simulator_name, pop.label, i) print('Saving cell %i in %s to %s'%(i,pop.label,filename)) vm = segment.analogsignals[0].transpose()[i] tt = np.array([t*dt/1000. for t in range(len(vm))]) times_vm = np.array([tt, vm/1000.]).transpose() np.savetxt(filename, times_vm , delimiter = '\t', fmt='%s') spike_data = {} spike_data['senders'] = [] spike_data['times'] = [] index_offset = 1 for pop in [EI_pop]: if rank == 0: spikes = pop.get_data('spikes', gather=False) #print(spikes.segments[0].all_data) num_rec = len(spikes.segments[0].spiketrains) print("Extracting spike info (%i) for %i cells in %s"%(num_rec,pop.size,pop.label)) #assert(num_rec==len(spikes.segments[0].spiketrains)) for i in range(num_rec): ss = spikes.segments[0].spiketrains[i] for s in ss: index = i+index_offset #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index)) spike_data['senders'].append(index) spike_data['times'].append(s) index_offset+=pop.size print("Build time : %g s" % buildCPUTime) print("Simulation time : %g s" % simCPUTime) # === Clean up and quit ======================================================== end()
def run(self, params, verbose=True): tmpdir = tempfile.mkdtemp() timer = Timer() timer.start() # start timer on construction # === Build the network ======================================================== if verbose: print "Setting up simulation" sim.setup(timestep=params.simulation.dt, max_delay=params.simulation.syn_delay, debug=False) N = params.N #dc_generator current_source = sim.DCSource(amplitude=params.snr, start=params.simulation.simtime / 4, stop=params.simulation.simtime / 4 * 3) # internal noise model (NEST specific) noise = sim.Population(N, 'noise_generator', { 'mean': 0., 'std': params.noise_std }) # target population output = sim.Population(N, sim.IF_cond_exp) # initialize membrane potential numpy.random.seed(params.simulation.kernelseed) V_rest, V_spike = -70., -53. output.tset('v_init', V_rest + numpy.random.rand(N, ) * (V_spike - V_rest)) # Connecting the network conn = sim.OneToOneConnector(weights=params.weight) sim.Projection(noise, output, conn) for cell in output: cell.inject(current_source) output.record() # reads out time used for building buildCPUTime = timer.elapsedTime() # === Run simulation =========================================================== if verbose: print "Running simulation" timer.reset() # start timer on construction sim.run(params.simulation.simtime) simCPUTime = timer.elapsedTime() timer.reset() # start timer on construction output_filename = os.path.join(tmpdir, 'output.gdf') #print output_filename output.printSpikes(output_filename) # output_DATA = load_spikelist(output_filename, N, t_start=0.0, t_stop=params.simulation.simtime) writeCPUTime = timer.elapsedTime() if verbose: print "\nFiber Network Simulation:" print "Number of Neurons : ", N print "Mean Output rate : ", output_DATA.mean_rate( ), "Hz during ", params.simulation.simtime, "ms" print("Build time : %g s" % buildCPUTime) print("Simulation time : %g s" % simCPUTime) print("Writing time : %g s" % writeCPUTime) os.remove(output_filename) os.rmdir(tmpdir) return output_DATA
def test_callback(data_input): global message message = data_input.actual.positions msg_list = list(message) #msg_list[0] = int(message[0].encode('hex'),16) #for i in #msg_list = int(message.encode('hex'),16) #print('============= Received image data.',message) rospy.loginfo('=====received data %r', msg_list[0]) timer = Timer() dt = 0.1 p.setup(timestep=dt) # 0.1ms pop_1 = p.Population(1, p.IF_curr_exp, {}, label="pop_1") #input = p.Population(1, p.SpikeSourceArray, {'spike_times': [[0,3,6]]}, label='input') input = p.Population(1, p.SpikeSourcePoisson, {'rate': (msg_list[0] + 1.6) * 100}) stat_syn = p.StaticSynapse(weight=50.0, delay=1) input_proj = p.Projection(input, pop_1, p.OneToOneConnector(), synapse_type=stat_syn, receptor_type='excitatory') pop_1.record(['v', 'spikes']) p.run(10) pop_1_data = pop_1.get_data() spikes = pop_1_data.segments[0].spiketrains[0] mean_rate = int(gaussian_convolution(spikes, dt)) rospy.loginfo('=====mean_rate %r', mean_rate) # mean_rate = 64 rate_command = mean_rate # rate coding of the spike train ''' pub = rospy.Publisher('/cmd_vel_mux/input/teleop', Twist, queue_size=10) # construct the output command command = Twist() command.linear.x = rate_command*0.02 command.angular.z = rate_command/50000. pub.publish(command) ''' pub = rospy.Publisher('/arm_controller/follow_joint_trajectory/goal', FollowJointTrajectoryActionGoal, queue_size=10) command = FollowJointTrajectoryActionGoal() command.header.stamp = rospy.Time.now() command.goal.trajectory.joint_names = ['elbow'] point = JointTrajectoryPoint() point.positions = [rate_command / 10] point.time_from_start = rospy.Duration(1) command.goal.trajectory.points.append(point) pub.publish(command) rospy.loginfo('=====send command %r', command.goal.trajectory.points[0]) fig_settings = { 'lines.linewidth': 0.5, 'axes.linewidth': 0.5, 'axes.labelsize': 'small', 'legend.fontsize': 'small', 'font.size': 8 } plt.rcParams.update(fig_settings) fig1 = plt.figure(1, figsize=(6, 8)) def plot_spiketrains(segment): for spiketrain in segment.spiketrains: y = np.ones_like(spiketrain) * spiketrain.annotations['source_id'] plt.plot(spiketrain, y, '.') plt.ylabel(segment.name) plt.setp(plt.gca().get_xticklabels(), visible=False) def plot_signal(signal, index, colour='b'): label = "Neuron %d" % signal.annotations['source_ids'][index] plt.plot(signal.times, signal[:, index], colour, label=label) plt.ylabel("%s (%s)" % (signal.name, signal.units._dimensionality.string)) plt.setp(plt.gca().get_xticklabels(), visible=False) plt.legend() print("now plotting the network---------------") rospy.loginfo('--------now plotting---------------') n_panels = sum(a.shape[1] for a in pop_1_data.segments[0].analogsignalarrays) + 2 plt.subplot(n_panels, 1, 1) plot_spiketrains(pop_1_data.segments[0]) panel = 3 for array in pop_1_data.segments[0].analogsignalarrays: for i in range(array.shape[1]): plt.subplot(n_panels, 1, panel) plot_signal(array, i, colour='bg'[panel % 2]) panel += 1 plt.xlabel("time (%s)" % array.times.units._dimensionality.string) plt.setp(plt.gca().get_xticklabels(), visible=True) #
Brunel N (2000) Dynamics of sparsely connected networks of excitatory and inhibitory spiking neurons. J Comput Neurosci 8:183-208 Andrew Davison, UNIC, CNRS May 2006 """ from pyNN.utility import get_script_args, Timer, ProgressBar simulator_name = get_script_args(1)[0] exec("from pyNN.%s import *" % simulator_name) from pyNN.random import NumpyRNG, RandomDistribution timer = Timer() # === Define parameters ======================================================== downscale = 50 # scale number of neurons down by this factor # scale synaptic weights up by this factor to # obtain similar dynamics independent of size order = 50000 # determines size of network: # 4*order excitatory neurons # 1*order inhibitory neurons Nrec = 50 # number of neurons to record from, per population epsilon = 0.1 # connectivity: proportion of neurons each neuron projects to # Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1 # here: Case C, asynchronous irregular firing, ~35 Hz eta = 2.0 # rel rate of external input
def runBrunelNetwork(g=5., eta=2., dt=0.1, simtime=1000.0, delay=1.5, epsilon=0.1, order=2500, N_rec=50, N_rec_v=2, save=False, simulator_name='nest', jnml_simulator=None, extra={}): exec("from pyNN.%s import *" % simulator_name) in globals() timer = Timer() # === Define parameters ======================================================== downscale = 1 # scale number of neurons down by this factor # scale synaptic weights up by this factor to # obtain similar dynamics independent of size order = order # determines size of network: # 4*order excitatory neurons # 1*order inhibitory neurons Nrec = N_rec # number of neurons to record from, per population epsilon = epsilon # connectivity: proportion of neurons each neuron projects to # Parameters determining model dynamics, cf Brunel (2000), Figs 7, 8 and Table 1 # here: Case C, asynchronous irregular firing, ~35 Hz eta = eta # rel rate of external input g = g # rel strength of inhibitory synapses J = 0.1 # synaptic weight [mV] delay = delay # synaptic delay, all connections [ms] # single neuron parameters tauMem = 20.0 # neuron membrane time constant [ms] tauSyn = 0.1 # synaptic time constant [ms] tauRef = 2.0 # refractory time [ms] U0 = 0.0 # resting potential [mV] theta = 20.0 # threshold # simulation-related parameters simtime = simtime # simulation time [ms] dt = dt # simulation step length [ms] # seed for random generator used when building connections connectseed = 12345789 use_RandomArray = True # use Python rng rather than NEST rng # seed for random generator(s) used during simulation kernelseed = 43210987 # === Calculate derived parameters ============================================= # scaling: compute effective order and synaptic strength order_eff = int(float(order) / downscale) J_eff = J * downscale # compute neuron numbers NE = int(4 * order_eff) # number of excitatory neurons NI = int(1 * order_eff) # number of inhibitory neurons N = NI + NE # total number of neurons # compute synapse numbers CE = int(epsilon * NE) # number of excitatory synapses on neuron CI = int(epsilon * NI) # number of inhibitory synapses on neuron C = CE + CI # total number of internal synapses per n. Cext = CE # number of external synapses on neuron # synaptic weights, scaled for alpha functions, such that # for constant membrane potential, charge J would be deposited fudge = 0.00041363506632638 # ensures dV = J at V=0 # excitatory weight: JE = J_eff / tauSyn * fudge JE = (J_eff / tauSyn) * fudge # inhibitory weight: JI = - g * JE JI = -g * JE # threshold, external, and Poisson generator rates: nu_thresh = theta / (J_eff * CE * tauMem) nu_ext = eta * nu_thresh # external rate per synapse p_rate = 1000 * nu_ext * Cext # external input rate per neuron (Hz) # number of synapses---just so we know Nsyn = ( C + 1 ) * N + 2 * Nrec # number of neurons * (internal synapses + 1 synapse from PoissonGenerator) + 2synapses" to spike detectors # put cell parameters into a dict cell_params = { 'tau_m': tauMem, 'tau_syn_E': tauSyn, 'tau_syn_I': tauSyn, 'tau_refrac': tauRef, 'v_rest': U0, 'v_reset': U0, 'v_thresh': theta, 'cm': 0.001 } # (nF) # === Build the network ======================================================== # clear all existing network elements and set resolution and limits on delays. # For NEST, limits must be set BEFORE connecting any elements #extra = {'threads' : 2} rank = setup(timestep=dt, max_delay=delay, **extra) print("rank =", rank) np = num_processes() print("np =", np) import socket host_name = socket.gethostname() print("Host #%d is on %s" % (rank + 1, host_name)) if 'threads' in extra: print("%d Initialising the simulator with %d threads..." % (rank, extra['threads'])) else: print("%d Initialising the simulator with single thread..." % rank) # Small function to display information only on node 1 def nprint(s): if rank == 0: print(s) timer.start() # start timer on construction print("%d Setting up random number generator" % rank) rng = NumpyRNG(kernelseed, parallel_safe=True) print("%d Creating excitatory population with %d neurons." % (rank, NE)) celltype = IF_curr_alpha(**cell_params) celltype.default_initial_values[ 'v'] = U0 # Setting default init v, useful for NML2 export E_net = Population(NE, celltype, label="E_net") print("%d Creating inhibitory population with %d neurons." % (rank, NI)) I_net = Population(NI, celltype, label="I_net") print( "%d Initialising membrane potential to random values between %g mV and %g mV." % (rank, U0, theta)) uniformDistr = RandomDistribution('uniform', low=U0, high=theta, rng=rng) E_net.initialize(v=uniformDistr) I_net.initialize(v=uniformDistr) print("%d Creating excitatory Poisson generator with rate %g spikes/s." % (rank, p_rate)) source_type = SpikeSourcePoisson(rate=p_rate) expoisson = Population(NE, source_type, label="expoisson") print("%d Creating inhibitory Poisson generator with the same rate." % rank) inpoisson = Population(NI, source_type, label="inpoisson") # Record spikes print("%d Setting up recording in excitatory population." % rank) E_net.record('spikes') if N_rec_v > 0: E_net[0:min(NE, N_rec_v)].record('v') print("%d Setting up recording in inhibitory population." % rank) I_net.record('spikes') if N_rec_v > 0: I_net[0:min(NI, N_rec_v)].record('v') progress_bar = ProgressBar(width=20) connector = FixedProbabilityConnector(epsilon, rng=rng, callback=progress_bar) E_syn = StaticSynapse(weight=JE, delay=delay) I_syn = StaticSynapse(weight=JI, delay=delay) ext_Connector = OneToOneConnector(callback=progress_bar) ext_syn = StaticSynapse(weight=JE, delay=dt) print( "%d Connecting excitatory population with connection probability %g, weight %g nA and delay %g ms." % (rank, epsilon, JE, delay)) E_to_E = Projection(E_net, E_net, connector, E_syn, receptor_type="excitatory") print("E --> E\t\t", len(E_to_E), "connections") I_to_E = Projection(I_net, E_net, connector, I_syn, receptor_type="inhibitory") print("I --> E\t\t", len(I_to_E), "connections") input_to_E = Projection(expoisson, E_net, ext_Connector, ext_syn, receptor_type="excitatory") print("input --> E\t", len(input_to_E), "connections") print( "%d Connecting inhibitory population with connection probability %g, weight %g nA and delay %g ms." % (rank, epsilon, JI, delay)) E_to_I = Projection(E_net, I_net, connector, E_syn, receptor_type="excitatory") print("E --> I\t\t", len(E_to_I), "connections") I_to_I = Projection(I_net, I_net, connector, I_syn, receptor_type="inhibitory") print("I --> I\t\t", len(I_to_I), "connections") input_to_I = Projection(inpoisson, I_net, ext_Connector, ext_syn, receptor_type="excitatory") print("input --> I\t", len(input_to_I), "connections") # read out time used for building buildCPUTime = timer.elapsedTime() # === Run simulation =========================================================== # run, measure computer time timer.start() # start timer on construction print("%d Running simulation for %g ms (dt=%sms)." % (rank, simtime, dt)) run(simtime) print("Done") simCPUTime = timer.elapsedTime() # write data to file #print("%d Writing data to file." % rank) #(E_net + I_net).write_data("Results/brunel_np%d_%s.pkl" % (np, simulator_name)) if save and not simulator_name == 'neuroml': for pop in [E_net, I_net]: io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.gdf" % (simulator_name, pop.label, rank)) spikes = pop.get_data('spikes', gather=False) for segment in spikes.segments: io.write_segment(segment) io = PyNNTextIO(filename="brunel-PyNN-%s-%s-%i.dat" % (simulator_name, pop.label, rank)) vs = pop.get_data('v', gather=False) for segment in vs.segments: io.write_segment(segment) spike_data = {} spike_data['senders'] = [] spike_data['times'] = [] index_offset = 1 for pop in [E_net, I_net]: if rank == 0: spikes = pop.get_data('spikes', gather=False) #print(spikes.segments[0].all_data) num_rec = len(spikes.segments[0].spiketrains) print("Extracting spike info (%i) for %i cells in %s" % (num_rec, pop.size, pop.label)) #assert(num_rec==len(spikes.segments[0].spiketrains)) for i in range(num_rec): ss = spikes.segments[0].spiketrains[i] for s in ss: index = i + index_offset #print("Adding spike at %s in %s[%i] (cell %i)"%(s,pop.label,i,index)) spike_data['senders'].append(index) spike_data['times'].append(s) index_offset += pop.size #from IPython.core.debugger import Tracer #Tracer()() E_rate = E_net.mean_spike_count() * 1000.0 / simtime I_rate = I_net.mean_spike_count() * 1000.0 / simtime # write a short report nprint("\n--- Brunel Network Simulation ---") nprint("Nodes : %d" % np) nprint("Number of Neurons : %d" % N) nprint("Number of Synapses : %d" % Nsyn) nprint("Input firing rate : %g" % p_rate) nprint("Excitatory weight : %g" % JE) nprint("Inhibitory weight : %g" % JI) nprint("Excitatory rate : %g Hz" % E_rate) nprint("Inhibitory rate : %g Hz" % I_rate) nprint("Build time : %g s" % buildCPUTime) nprint("Simulation time : %g s" % simCPUTime) # === Clean up and quit ======================================================== end() if simulator_name == 'neuroml' and jnml_simulator: from pyneuroml import pynml lems_file = 'LEMS_Sim_PyNN_NeuroML2_Export.xml' print('Going to run generated LEMS file: %s on simulator: %s' % (lems_file, jnml_simulator)) if jnml_simulator == 'jNeuroML': results, events = pynml.run_lems_with_jneuroml( lems_file, nogui=True, load_saved_data=True, reload_events=True) elif jnml_simulator == 'jNeuroML_NEURON': results, events = pynml.run_lems_with_jneuroml_neuron( lems_file, nogui=True, load_saved_data=True, reload_events=True) spike_data['senders'] = [] spike_data['times'] = [] for k in events.keys(): values = k.split('/') index = int( values[1]) if values[0] == 'E_net' else NE + int(values[1]) n = len(events[k]) print( "Loading spikes for %s (index %i): [%s, ..., %s (n=%s)] sec" % (k, index, events[k][0] if n > 0 else '-', events[k][-1] if n > 0 else '-', n)) for t in events[k]: spike_data['senders'].append(index) spike_data['times'].append(t * 1000) #print spike_data return spike_data