def test(): if not HAVE_H5PY: raise SkipTest sim.setup() p1 = sim.Population(10, sim.IF_cond_exp( v_rest=-65, tau_m=lambda i: 10 + 0.1*i, cm=RD('normal', (0.5, 0.05))), label="population_one") p2 = sim.Population(20, sim.IF_curr_alpha( v_rest=-64, tau_m=lambda i: 11 + 0.1*i), label="population_two") prj = sim.Projection(p1, p2, sim.FixedProbabilityConnector(p_connect=0.5), synapse_type=sim.StaticSynapse(weight=RD('uniform', [0.0, 0.1]), delay=0.5), receptor_type='excitatory') net = Network(p1, p2, prj) export_to_sonata(net, "tmp_serialization_test", overwrite=True) net2 = import_from_sonata("tmp_serialization_test/circuit_config.json", sim) for orig_population in net.populations: imp_population = net2.get_component(orig_population.label) assert orig_population.size == imp_population.size for name in orig_population.celltype.default_parameters: assert_array_almost_equal(orig_population.get(name), imp_population.get(name), 12) w1 = prj.get('weight', format='array') prj2 = net2.get_component(asciify(prj.label).decode('utf-8') + "-0") w2 = prj2.get('weight', format='array') assert_array_almost_equal(w1, w2, 12)
def test_native_stdp_model(): if not have_nest: raise SkipTest nest = pyNN.nest from pyNN.utility import init_logging init_logging(logfile=None, debug=True) nest.setup() p1 = nest.Population(10, nest.IF_cond_exp()) p2 = nest.Population(10, nest.SpikeSourcePoisson()) stdp_params = {'Wmax': 50.0, 'lambda': 0.015, 'weight': 0.001} stdp = nest.native_synapse_type("stdp_synapse")(**stdp_params) connector = nest.AllToAllConnector() prj = nest.Projection(p2, p1, connector, receptor_type='excitatory', synapse_type=stdp)
def connect(self): n_cells = self.parameters.config.n_cells n_segments = self.parameters.config.n_segments # connect populations pynn.Projection(self.distal_input, self.distal, pynn.OneToOneConnector(weights=0.025)) for i in range(self.parameters.config.n_columns): # get "compartments" for all cells in this column inhibitions = self.inhibitory[i * n_cells:(i + 1) * n_cells] somas = self.soma[i * n_cells:(i + 1) * n_cells] proximal_input = pynn.PopulationView(self.proximal_input, [i]) # set up connections with columnar symmetry pynn.Projection(inhibitions, somas, pynn.DistanceDependentProbabilityConnector( 'd>=1', weights=0.2), target='SYN_2') # 4 pynn.Projection(proximal_input, somas, pynn.AllToAllConnector(weights=0.08), target='SYN_1') # 1 pynn.Projection(proximal_input, inhibitions, pynn.AllToAllConnector(weights=0.042)) # 2 for j in range(self.parameters.config.n_cells): # get "compartments" for this specific cell segments = self.distal[i * n_cells * n_segments + j * n_segments:i * n_cells * n_segments + (j + 1) * n_segments] inhibition = pynn.PopulationView(self.inhibitory, [i * n_cells + j]) soma = pynn.PopulationView(self.soma, [i * n_cells + j]) # set up connections with cellular symmetry pynn.Projection(segments, inhibition, pynn.AllToAllConnector(weights=0.15), target='inhibitory') # 3 pynn.Projection(segments, soma, pynn.AllToAllConnector(weights=0.15), target='SYN_3') # 3
def test_projection(self): path = tempfile.mkstemp()[1] size_a = random.randint(1, 100) size_b = random.randint(1, 100) dist = pyhmf.RandomDistribution(rng=pyhmf.NativeRNG(1337)) conn_pyhmf = pyhmf.AllToAllConnector(weights=dist, delays=42) proj_pyhmf = pyhmf.Projection( pyhmf.Population(size_a, pyhmf.IF_cond_exp), pyhmf.Population(size_b, pyhmf.IF_cond_exp), conn_pyhmf) proj_pyhmf.saveConnections(getattr(pyhmf, self.file_type)(path, 'w')) conn_pynn = pynn.FromFileConnector(getattr(pynn.recording.files, self.file_type)(path)) proj_pynn = pynn.Projection( pynn.Population(size_a, pynn.IF_cond_exp), pynn.Population(size_b, pynn.IF_cond_exp), conn_pynn) numpy.testing.assert_equal(proj_pyhmf.getWeights(format='array'), proj_pynn.getWeights(format='array'))
def presentStimuli(pres_duration, num_pres_per_stim, num_source, num_target, bright_on_weights, bright_off_weights, bright_lat_weights, dark_on_weights, dark_off_weights, dark_lat_weights, is_repeated=False): """ For presenting a stimulus to the target network. Callback is used to switch between presentation rates. Arguments: num_source num_target num_pres_per_stim, pres_duration """ num_stim = 2 # two stimuli 'bright' and 'dark' total_duration = num_stim * num_pres_per_stim * pres_duration source_on_pop = pynn.Population(num_source, pynn.SpikeSourcePoisson(), label='source_on_pop') source_off_pop = pynn.Population(num_source, pynn.SpikeSourcePoisson(), label='source_off_pop') is_bright, random_on_rates, random_off_rates = getPresentationRatesForCallback(num_stim, num_source, num_pres_per_stim, is_repeated=is_repeated) bright_target_pop = pynn.Population(num_target, pynn.IF_cond_exp, {'i_offset':0.11, 'tau_refrac':3.0, 'v_thresh':-51.0}, label='target_pop') dark_target_pop = pynn.Population(num_target, pynn.IF_cond_exp, {'i_offset':0.11, 'tau_refrac':3.0, 'v_thresh':-51.0}, label='target_pop') bright_on_conn = pynn.Projection(source_on_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_on_weights), receptor_type='excitatory') bright_off_conn = pynn.Projection(source_off_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_off_weights), receptor_type='excitatory') bright_lat_conn = pynn.Projection(bright_target_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_lat_weights), receptor_type='inhibitory') dark_on_conn = pynn.Projection(source_on_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_on_weights), receptor_type='excitatory') dark_off_conn = pynn.Projection(source_off_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_off_weights), receptor_type='excitatory') dark_lat_conn = pynn.Projection(dark_target_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_lat_weights), receptor_type='inhibitory') source_on_pop.record('spikes') source_off_pop.record('spikes') bright_target_pop.record(['spikes']) dark_target_pop.record(['spikes']) pynn.run(total_duration, callbacks=[PoissonWeightVariation(source_on_pop, random_on_rates, pres_duration), PoissonWeightVariation(source_off_pop, random_off_rates, pres_duration)]) pynn.end() source_on_spikes = source_on_pop.get_data('spikes').segments[0].spiketrains source_off_spikes = source_off_pop.get_data('spikes').segments[0].spiketrains bright_spikes = bright_target_pop.get_data('spikes').segments[0].spiketrains dark_spikes = dark_target_pop.get_data('spikes').segments[0].spiketrains return is_bright, source_on_spikes, source_off_spikes, bright_spikes, dark_spikes
measureSpikes = stimSpikes + delayStimMeasure stim = pynn.Population(1, pynn.SpikeSourceArray, {'spike_times': stimSpikes}) measure = pynn.Population(1, pynn.SpikeSourceArray, {'spike_times': measureSpikes}) #create neuron neuron = pynn.Population(1, pynn.IF_cond_exp) #init and import custom NEST synapse model pynn.nest.SetDefaults(synapseModel, synapseParams) stdpModel = pynn.NativeSynapseDynamics(synapseModel) #connect stimuli connStim = pynn.OneToOneConnector(weights=stimWeight) connMeasure = pynn.OneToOneConnector(weights=measureWeight) pynn.Projection(stim, neuron, connStim, target='excitatory') prj = pynn.Projection(measure, neuron, connMeasure, synapse_dynamics=stdpModel, target='excitatory') #record spike times and membrane potential neuron.record() neuron.record_v() connSTDP = pynn.nest.FindConnections(measure) weightList = [] aCausalList = [] aAnticausalList = [] timeGrid = np.arange(0, runtime + timeStep / 2.0, timeStep)
poissonE = sim.Population((1, ), cellclass=sim.SpikeSourcePoisson, cellparams=poissonE_params, label='poissonE') poissonI = sim.Population((1, ), cellclass=sim.SpikeSourcePoisson, cellparams=poissonI_params, label='poissonI') myconn = sim.AllToAllConnector(weights=globalWeight, delays=dt) ### Connections ### prjE_E = sim.Projection(poissonE, popE, method=myconn, target='excitatory') prjI_E = sim.Projection(poissonI, popE, method=myconn, target='inhibitory') prjE_I = sim.Projection(poissonE, popI, method=myconn, target='excitatory') prjI_I = sim.Projection(poissonI, popI, method=myconn, target='inhibitory') ## Record the spikes ## popE.record(to_file=False) popE.record_v(to_file=False) popI.record(to_file=False) popE.record_gsyn(to_file=False) t1 = time() sim.run(tsim) t2 = time() print("Elapsed %f seconds." % (t2 - t1, ))
def test_create_with_synapse_dynamics(self): prj = sim.Projection(self.p1, self.p2, self.all2all, synapse_type=sim.TsodyksMarkramSynapse())
# Create the C2 layer and connect it to the single output neuron training_spiketrains = [[s for s in st] for st in training_pair[1]] C2_populations, compound_C2_population =\ create_C2_populations(training_spiketrains) out_p = sim.Population(1, sim.IF_curr_exp(tau_refrac=.1)) stdp_weight = 7 / s2_prototype_cells stdp = sim.STDPMechanism( weight=stdp_weight, timing_dependence=sim.SpikePairRule(tau_plus=20.0, tau_minus=26.0, A_plus=stdp_weight / 5, A_minus=stdp_weight / 4.48), weight_dependence=sim.AdditiveWeightDependence(w_min=0.0, w_max=15.8 * stdp_weight)) learn_proj = sim.Projection(compound_C2_population, out_p, sim.AllToAllConnector(), stdp) epoch = training_pair[0] print('Simulating for epoch', epoch) # Record the spikes for visualization purposes and to count the number of # fired spikes # compound_C2_population.record('spikes') out_p.record(['spikes', 'v']) # Let the simulation run to "fill" the layer pipeline with spikes sim.run(40) # Datastructure for storing the computed STDP weights for this epoch classifier_weights = []
/ (2 * np.power(sig, 2.))))), 2) cann_connector.append((i, j, weights[i][j]))#, delay_cann2cann)) print("Weight matrix:\n", weights) spike_times = [1000., 2000.] cann_pop = sim.Population(n, sim.IF_cond_alpha(**cell_params), label = "cann_pop") inhib_pop = sim.Population(1, sim.IF_cond_alpha(**cell_params), label = "inhib_pop") spike_source = sim.Population(1, sim.SpikeSourceArray( spike_times = spike_times)) spike_2_conn = sim.Projection(spike_source, cann_pop[spiky:spiky+1], sim.AllToAllConnector(), sim.StaticSynapse(weight = 0.002, delay = 0.1)) cann_2_cann = sim.Projection(cann_pop, cann_pop, sim.FromListConnector(cann_connector, column_names=["weight"]), sim.StaticSynapse(weight = 0.0001, delay = 75)) cann_2_inh = sim.Projection(cann_pop, inhib_pop, sim.AllToAllConnector(), sim.StaticSynapse(weight = 0.02, delay = 0.1), receptor_type = "excitatory") inh_2_cann = sim.Projection(inhib_pop, cann_pop, sim.AllToAllConnector(), sim.StaticSynapse(weight = 0.2, delay = 0.1), receptor_type = "inhibitory") spike_source.record('spikes') cann_pop.record(('v','spikes')) inhib_pop.record(('v','spikes'))
sim.IF_cond_alpha(**cell_params), label="inhib_pop") spike_source_1 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_1)) spike_source_2 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_2)) spike_source_3 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_3)) spike_source_4 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_4)) spike_source_9 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_9)) spike_2_conn_for_1 = sim.Projection(spike_source_1, cann_pop[1:2], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_2 = sim.Projection(spike_source_2, cann_pop[2:3], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_3 = sim.Projection(spike_source_3, cann_pop[3:4], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_4 = sim.Projection(spike_source_4, cann_pop[4:5], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_9 = sim.Projection(spike_source_9, cann_pop[9:10], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) cann_2_cann = sim.Projection(
} neuron1 = sim.create(sim.HH_cond_exp(**cellparams)) neuron1.record(["v"]) neuron1.initialize(v=cellparams["e_rev_leak"]) neuron2 = sim.create(sim.HH_cond_exp(**cellparams)) neuron2.record(["v"]) neuron2.initialize(v=cellparams["e_rev_leak"]) spike_times = [2.0] spike_source = sim.Population(1, sim.SpikeSourceArray(spike_times=spike_times)) sim.Projection(spike_source, neuron1, sim.OneToOneConnector(), sim.StaticSynapse(weight=0.076, delay=0.1), receptor_type='excitatory'), sim.Projection(spike_source, neuron2, sim.OneToOneConnector(), sim.StaticSynapse(weight=0.0755, delay=0.1), receptor_type='excitatory'), sim.run(tEnd) data1 = neuron1.get_data() data2 = neuron2.get_data() signal_names = [s.name for s in data1.segments[0].analogsignalarrays]
stat_syn_input = p.StaticSynapse(weight=50.0, delay=1) stat_syn_rout = p.StaticSynapse(weight=0.0, delay=1) ###################### ###### Connections ####### res_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inp_conn = p.AllToAllConnector() rout_conn = p.AllToAllConnector() connections = {} connections['r2r'] = p.Projection(reservoir, reservoir, res_conn, synapse_type=stat_syn_res, receptor_type='excitatory') connections['inp2r'] = p.Projection(input_neurons, reservoir, inp_conn, synapse_type=stat_syn_input, receptor_type='excitatory') connections['r2rout'] = p.Projection(reservoir, readout_neurons, rout_conn, synapse_type=stat_syn_rout, receptor_type='excitatory')
def test_callback(data_input): global message message = data_input.actual.positions msg_list = list(message) #msg_list[0] = int(message[0].encode('hex'),16) #for i in #msg_list = int(message.encode('hex'),16) #print('============= Received image data.',message) rospy.loginfo('=====received data %r', msg_list[0]) timer = Timer() dt = 0.1 p.setup(timestep=dt) # 0.1ms pop_1 = p.Population(1, p.IF_curr_exp, {}, label="pop_1") #input = p.Population(1, p.SpikeSourceArray, {'spike_times': [[0,3,6]]}, label='input') input = p.Population(1, p.SpikeSourcePoisson, {'rate': (msg_list[0] + 1.6) * 100}) stat_syn = p.StaticSynapse(weight=50.0, delay=1) input_proj = p.Projection(input, pop_1, p.OneToOneConnector(), synapse_type=stat_syn, receptor_type='excitatory') pop_1.record(['v', 'spikes']) p.run(10) pop_1_data = pop_1.get_data() spikes = pop_1_data.segments[0].spiketrains[0] mean_rate = int(gaussian_convolution(spikes, dt)) rospy.loginfo('=====mean_rate %r', mean_rate) # mean_rate = 64 rate_command = mean_rate # rate coding of the spike train ''' pub = rospy.Publisher('/cmd_vel_mux/input/teleop', Twist, queue_size=10) # construct the output command command = Twist() command.linear.x = rate_command*0.02 command.angular.z = rate_command/50000. pub.publish(command) ''' pub = rospy.Publisher('/arm_controller/follow_joint_trajectory/goal', FollowJointTrajectoryActionGoal, queue_size=10) command = FollowJointTrajectoryActionGoal() command.header.stamp = rospy.Time.now() command.goal.trajectory.joint_names = ['elbow'] point = JointTrajectoryPoint() point.positions = [rate_command / 10] point.time_from_start = rospy.Duration(1) command.goal.trajectory.points.append(point) pub.publish(command) rospy.loginfo('=====send command %r', command.goal.trajectory.points[0]) fig_settings = { 'lines.linewidth': 0.5, 'axes.linewidth': 0.5, 'axes.labelsize': 'small', 'legend.fontsize': 'small', 'font.size': 8 } plt.rcParams.update(fig_settings) fig1 = plt.figure(1, figsize=(6, 8)) def plot_spiketrains(segment): for spiketrain in segment.spiketrains: y = np.ones_like(spiketrain) * spiketrain.annotations['source_id'] plt.plot(spiketrain, y, '.') plt.ylabel(segment.name) plt.setp(plt.gca().get_xticklabels(), visible=False) def plot_signal(signal, index, colour='b'): label = "Neuron %d" % signal.annotations['source_ids'][index] plt.plot(signal.times, signal[:, index], colour, label=label) plt.ylabel("%s (%s)" % (signal.name, signal.units._dimensionality.string)) plt.setp(plt.gca().get_xticklabels(), visible=False) plt.legend() print("now plotting the network---------------") rospy.loginfo('--------now plotting---------------') n_panels = sum(a.shape[1] for a in pop_1_data.segments[0].analogsignalarrays) + 2 plt.subplot(n_panels, 1, 1) plot_spiketrains(pop_1_data.segments[0]) panel = 3 for array in pop_1_data.segments[0].analogsignalarrays: for i in range(array.shape[1]): plt.subplot(n_panels, 1, panel) plot_signal(array, i, colour='bg'[panel % 2]) panel += 1 plt.xlabel("time (%s)" % array.times.units._dimensionality.string) plt.setp(plt.gca().get_xticklabels(), visible=True) #
# from numpy import r input = sim.Population(3, sim.SpikeSourceArray) numpy.random.seed(12345) input[0].spike_times = numpy.add.accumulate( numpy.random.exponential(1000.0 / 100.0, size=1000)) input[1].spike_times = numpy.add.accumulate( numpy.random.exponential(1000.0 / 20.0, size=1000)) input[2].spike_times = numpy.add.accumulate( numpy.random.exponential(1000.0 / 50.0, size=1000)) connector = sim.OneToOneConnector(weights=1.0, delays=0.5) conn = [ sim.Projection(input[0:1], cells, connector, target='AMPA_spikeinput'), sim.Projection(input[1:2], cells, connector, target='GABAa_spikeinput'), sim.Projection(input[2:3], cells, connector, target='GABAb_spikeinput') ] cells._record('iaf_V') cells._record('AMPA_g') cells._record('GABAa_g') cells._record('GABAb_g') cells.record() sim.run(100.0) cells.recorders['iaf_V'].write("Results/nineml_neuron.V", filter=[cells[0]]) cells.recorders['AMPA_g'].write("Results/nineml_neuron.g_exc", filter=[cells[0]])
1, sim.SpikeSourcePoisson(duration=simparams['duration'], rate=simparams['input_rate'])) inp.label = 'input cell' outp = sim.Population(1, sim.IF_curr_exp, cellparams=cellparams) outp.label = 'output cell' inp.record('spikes') outp.record(['v', 'spikes']) synapse = sim.StaticSynapse(weight=1, delay=simparams['delay']) connector = sim.OneToOneConnector() connection = sim.Projection(inp, outp, connector, synapse) def report_time(t): print("Time: {}".format(t)) return t + simparams['dt'] par = 'i_offset' for p in [0.01]: outp.set(**{par: p}) cellparams[par] = p outp.initialize(v=cellparams['v_rest']) sim.run(simparams['duration'], callbacks=[report_time]) sim.reset(annotations={par: p}) inp_data = inp.get_data() outp_data = outp.get_data()
for syn_type in ('exc', 'inh'): populations[syn_type] = [sim.Population(population_size, sim.IF_cond_exp, neuron_parameters) for i in range(n_populations)] # --- Connect the populations in a chain ------- connector_exc_exc = sim.AllToAllConnector(weights=weight_exc_exc, delays=delay) connector_exc_inh = sim.AllToAllConnector(weights=weight_exc_inh, delays=delay) connector_inh_exc = sim.AllToAllConnector(weights=weight_inh_exc, delays=delay) for i in range(n_populations): j = (i + 1) % n_populations prj_exc_exc = sim.Projection(populations['exc'][i], populations['exc'][j], connector_exc_exc, target='excitatory') prj_exc_inh = sim.Projection(populations['exc'][i], populations['inh'][j], connector_exc_inh, target='excitatory') prj_inh_exc = sim.Projection(populations['inh'][i], populations['exc'][i], connector_exc_exc, target='inhibitory') # --- Create and connect stimulus -------------- numpy.random.seed(rng_seed) stim_spikes = numpy.random.normal(loc=stimulus_onset, scale=stimulus_sigma, size=population_size) stim_spikes.sort() stimulus = sim.Population(1, sim.SpikeSourceArray, {'spike_times': stim_spikes}) prj_stim_exc = sim.Projection(stimulus, populations['exc'][0],
stat_syn_inh = p.StaticSynapse(weight=20.0, delay=1) stat_syn_input = p.StaticSynapse(weight=50.0, delay=1) ###################### ###### Connections ####### exc_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inh_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inp_conn = p.AllToAllConnector() rout_conn = p.AllToAllConnector() connections = {} connections['e2e'] = p.Projection(reservoir_exc, reservoir_exc, exc_conn, synapse_type=stat_syn_exc, receptor_type='excitatory') connections['e2i'] = p.Projection(reservoir_exc, reservoir_inh, exc_conn, synapse_type=stat_syn_exc, receptor_type='excitatory') connections['i2e'] = p.Projection(reservoir_inh, reservoir_exc, inh_conn, synapse_type=stat_syn_inh, receptor_type='inhibitory') connections['i2i'] = p.Projection(reservoir_inh, reservoir_inh, inh_conn,
#generate_spike_times = [[0], [500], [1000], [1500], [2000], [2500], [3000], # [3500], [4000], [4500]] #, [5000]] #spike_source = sim.Population(n_neurons, # sim.SpikeSourceArray( # spike_times = generate_spike_times)) spike_source = sim.Population( n_neurons, sim.SpikeSourceArray(spike_times=[0, 1000, 3000])) # populations and projections layer_1 = sim.Population(n_neurons, sim.IF_curr_alpha(), label="layer_1") ''' layer_2 = sim.Population(n_neurons, sim.IF_curr_alpha(), label = "layer_2") ''' proj_src_2_l1 = sim.Projection(spike_source, layer_1, sim.OneToOneConnector()) ''' proj_l1_2_l2 = sim.Projection(layer_1, layer_2, sim.OneToOneConnector()) ''' sim.run(sim_time) layer_1.record(('spikes', 'v')) #layer_2.record(['spikes', 'v']) #layer_1.record(['v', 'spikes']) #layer_2.record(['v', 'spikes']) # plot the spikes data_1 = layer_1.get_data().segments[0] vm = data_1.filter(name="v") Figure(Panel(vm, ylabel="Memb. Pot."),
# a sub-set of the inh_gamma_generaters are silenced after a time "tinit" myConnectorE_E_silenced = sim.FixedNumberPreConnector(NumOfConE_E, weights=globalWeight, delays=0.1) myConnectorE_I_silenced = sim.FixedNumberPreConnector(NumOfConE_I, weights=globalWeight, delays=0.1) #myConnectorI = sim.AllToAllConnector(weights=globalWeight, delays=0.1) # InhGamma Generators need "_S" (selective) type synapses # Passing this class to the Projection in a ComposedSynapseType object # is how to get them: #sd = NativeSynapseType('static_synapse_S') #prjE_E = sim.Projection(gammaE_E, popE, method=myConnectorE_E, target='excitatory', synapse_type=sd) #prjE_I = sim.Projection(gammaE_I, popI, method=myConnectorE_I, target='excitatory', synapse_type=sd) prjE_E = sim.Projection(gammaE_E, popE, method=myConnectorE_E, target='excitatory') prjE_I = sim.Projection(gammaE_I, popI, method=myConnectorE_I, target='excitatory') # silenced excitatory input prjE_E = sim.Projection(gammaE_E_silenced, popE, method=myConnectorE_E_silenced, target='excitatory', synapse_type=sd) prjE_I = sim.Projection(gammaE_I_silenced, popI, method=myConnectorE_I_silenced, target='excitatory', synapse_type=sd) # return to default synapse type (non-selective) sd = None #prjI_E = sim.Projection(poissonI_E, popE, method=myConnectorI, target='inhibitory', synapse_type=sd) #prjI_I = sim.Projection(poissonI_I, popI, method=myConnectorI, target='inhibitory', synapse_type=sd)
fc0 = sim.Population(w1.shape[0], fc_celltype) fc0.initialize(**cellvalues) fc0.set(v_thresh=vth0) fc1 = sim.Population(w1.shape[0], fc_celltype) fc1.initialize(**cellvalues) fc1.set(v_thresh=vth1) #fc1.set(i_offset=b1) fc2 = sim.Population(10, fc_celltype) fc2.initialize(**cellvalues) fc2.set(v_thresh=vth2) #fc2.set(i_offset=b2) proj0 = sim.Projection(input_pop, fc0, connector=sim.FromListConnector(conns_0)) proj1 = sim.Projection(fc0, fc1, connector=sim.FromListConnector(conns_1)) proj2 = sim.Projection(fc1, fc2, connector=sim.FromListConnector(conns_2)) #input_pop.record('spikes') fc0.record(['spikes', 'v']) #fc1.record(['spikes','v']) #fc2.record(['spikes','v']) fc2.record(['spikes']) sim.run(duration - dt) #spiketrains_i = input_pop.get_data().segments[-1].spiketrains #spiketrains_0 = fc0.get_data().segments[-1].spiketrains #spiketrains_1 = fc1.get_data().segments[-1].spiketrains
for i in range(weights[layer].shape[1]): for j in range(weights[layer].shape[0]): if float(weights[layer][j, i]) < 0.0: inh_synapses.append([i, j, -1.0 * weights[layer][j, i], delay]) else: exc_synapses.append([i, j, weights[layer][j, i], delay]) pops.append(sim.Population(weights[layer].shape[0], fc_celltype)) vth = v_th - biases[layer] vth[vth < 0.0] = 0.001 pops[layer + 1].set(v_thresh=vth) sim.Projection(pops[layer], pops[layer + 1], connector=sim.FromListConnector(inh_synapses), receptor_type='inhibitory') sim.Projection(pops[layer], pops[layer + 1], connector=sim.FromListConnector(exc_synapses), receptor_type='excitatory') pops[-1].record(['spikes']) pops[0].record(['spikes']) pops[1].record(['spikes', 'v']) num_to_test = 10 acc = 0.0 num_timesteps = duration / dt
def estimate_kb(cell_params_lif): cell_para = copy.deepcopy(cell_params_lif) random.seed(0) p.setup(timestep=1.0, min_delay=1.0, max_delay=16.0) run_s = 10. runtime = 1000. * run_s max_rate = 1000. ee_connector = p.OneToOneConnector(weights=1.0, delays=2.0) pop_list = [] pop_output = [] pop_source = [] x = np.arange(0., 1.01, 0.1) count = 0 trail = 10 for i in x: for j in range(trail): #trails for average pop_output.append(p.Population(1, p.IF_curr_exp, cell_para)) poisson_spikes = mu.poisson_generator(i * max_rate, 0, runtime) pop_source.append( p.Population(1, p.SpikeSourceArray, {'spike_times': poisson_spikes})) p.Projection(pop_source[count], pop_output[count], ee_connector, target='excitatory') pop_output[count].record() count += 1 count = 0 for i in x: cell_para['i_offset'] = i pop_list.append(p.Population(1, p.IF_curr_exp, cell_para)) pop_list[count].record() count += 1 pop_list[count - 1].record_v() p.run(runtime) rate_I = np.zeros(count) rate_P = np.zeros(count) rate_P_max = np.zeros(count) rate_P_min = np.ones(count) * 1000. for i in range(count): spikes = pop_list[i].getSpikes(compatible_output=True) rate_I[i] = len(spikes) / run_s for j in range(trail): spikes = pop_output[i * trail + j].getSpikes(compatible_output=True) spike_num = len(spikes) / run_s rate_P[i] += spike_num if spike_num > rate_P_max[i]: rate_P_max[i] = spike_num if spike_num < rate_P_min[i]: rate_P_min[i] = spike_num rate_P[i] /= trail ''' #plot_spikes(spikes, 'Current = 10. mA') plt.plot(x, rate_I, label='current',) plt.plot(x, rate_P, label='Poisson input') plt.fill_between(x, rate_P_min, rate_P_max, facecolor = 'green', alpha=0.3) ''' x0 = np.where(rate_P > 1.)[0][0] x1 = 4 k = (rate_P[x1] - rate_P[x0]) / (x[x1] - x[x0]) ''' plt.plot(x, k*(x-x[x0])+rate_P[x0], label='linear') plt.legend(loc='upper left', shadow=True) plt.grid('on') plt.show() ''' p.end() return k, x[x0], rate_P[x0]
def create_S2_layers(C1_layers: Dict[float, Sequence[Layer]], feature_size, s2_prototype_cells, refrac_s2=.1, stdp=True, inhibition=True)\ -> Dict[float, List[Layer]]: """ Creates all prototype S2 layers for all sizes. Parameters: `layers_dict`: A dictionary containing for each size a list of C1 layers, for each feature one `feature_size`: `s2_prototype_cells`: `refrac_s2`: `stdp`: Returns: A dictionary containing for each size a list of different S2 layers, for each prototype one. """ f_s = feature_size initial_weight = 25 / (f_s * f_s) weight_rng = rnd.RandomDistribution('normal', mu=initial_weight, sigma=initial_weight / 20) i_offset_rng = rnd.RandomDistribution('normal', mu=.5, sigma=.45) weights = list( map(lambda x: weight_rng.next() * 1000, range(4 * f_s * f_s))) S2_layers = {} i_offsets = list( map(lambda x: i_offset_rng.next(), range(s2_prototype_cells))) ndicts = list(map(lambda x: {}, range(s2_prototype_cells))) ondicts = list(map(lambda x: {}, range(s2_prototype_cells))) omdicts = list(map(lambda x: {}, range(s2_prototype_cells))) for size, layers in C1_layers.items(): n, m = how_many_squares_in_shape(layers[0].shape, (f_s, f_s), f_s) if stdp: l_i_offsets = [list(map(lambda x: rnd.RandomDistribution('normal', mu=i_offsets[i], sigma=.25).next(), range(n * m)))\ for i in range(s2_prototype_cells)] else: l_i_offsets = np.zeros((s2_prototype_cells, n * m)) print('S2 Shape', n, m) layer_list = list( map( lambda i: Layer( sim.Population(n * m, sim.IF_curr_exp(i_offset=l_i_offsets[i], tau_refrac=refrac_s2), structure=space.Grid2D(aspect_ratio=m / n), label=str(i)), (n, m)), range(s2_prototype_cells))) for S2_layer in layer_list: for C1_layer in layers: S2_layer.projections[C1_layer.population.label] =\ connect_layer_to_layer(C1_layer, S2_layer, (f_s, f_s), f_s, [[w] for w in weights[:f_s * f_s]], stdp=stdp, initial_weight=initial_weight, ndicts=ndicts, ondicts=ondicts, omdicts=omdicts) S2_layers[size] = layer_list # Set the labels of the shared connections if stdp: t = time.clock() print('Set shared labels') for s2_label_dicts in [ndicts, ondicts, omdicts]: for i in range(s2_prototype_cells): w_iter = weights.__iter__() for label, (source, target) in s2_label_dicts[i].items(): conns = nest.GetConnections(source=source, target=target) nest.SetStatus(conns, { 'label': label, 'weight': w_iter.__next__() }) print('Setting labels took', time.clock() - t) if inhibition: # Create inhibitory connections between the S2 cells # First between the neurons of the same layer... inh_weight = -10 inh_delay = .1 print('Create S2 self inhibitory connections') for layer_list in S2_layers.values(): for layer in layer_list: sim.Projection( layer.population, layer.population, sim.AllToAllConnector(allow_self_connections=False), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) # ...and between the layers print('Create S2 cross-scale inhibitory connections') for i in range(s2_prototype_cells): for layer_list1 in S2_layers.values(): for layer_list2 in S2_layers.values(): if layer_list1[i] != layer_list2[i]: sim.Projection( layer_list1[i].population, layer_list2[i].population, sim.AllToAllConnector(), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) if stdp: # Create the inhibition between different prototype layers print('Create S2 cross-prototype inhibitory connections') for layer_list in S2_layers.values(): for layer1 in layer_list: for layer2 in layer_list: if layer1 != layer2: sim.Projection( layer1.population, layer2.population, sim.OneToOneConnector(), sim.StaticSynapse(weight=inh_weight - 1, delay=inh_delay)) return S2_layers
for variable in s['variables']: if getattr(population, variable, None) is None: setattr(population, variable, s[label][variable]) if label != 'InputLayer': population.set(i_offset=s[label]['i_offset']) layers.append(population) print('assembly loaded') # ------ Load weights and delays -------- for i in range(len(layers) - 1): print('Loading connections for layer ' + str(layers[i].label)) filepath = os.path.join(layers_path, layers[i + 1].label) conn_list = read_from_file2list(filepath) exc_connector = sim.FromListConnector(conn_list[0]) pro_exc = sim.Projection(layers[i], layers[i + 1], connector=exc_connector, receptor_type='excitatory') if conn_list[1] != []: inh_connector = sim.FromListConnector(read_from_file2list(filepath)[1]) pro_inh = sim.Projection(layers[i], layers[i + 1], connector=inh_connector, receptor_type='inhibitory') print('connections loaded') # -------- Cell initialization ----------- #vars_to_record = ['spikes','v'] #if 'spikes' in vars_to_record: # layers[0].record(['spikes']) # Input layer has no 'v' for k in range(1, len(layers)): layers[k].set(**cellparams)
def setup(self): # set up layers according to backend self.layers = [] if self.backend == 'spinnaker': spin_sim.setup(timestep=1.0, time_scale_factor=1.0, min_delay=1.0, max_delay=None) # set neuron hyperparameters # tau_m should not be set too high, because it can easily lead to an overflow ctx_parameters = {'cm': 1.0, 'i_offset': 0.0, 'tau_m': 100.0, 'tau_refrac': 0.0, 'v_reset': 0.0, 'v_rest': 0.0, 'v_thresh': 1.0} tc_parameters = ctx_parameters.copy() neuron_type = extra_models.IFCurDelta(**tc_parameters) for l in range(0,len(self.weights)): self.layers.append(spin_sim.Population(self.architecture[l+1], neuron_type, initial_values={'v': 0.0})) # set output layer threshold: must be set high enough s.t. output neurons do not spike, # must not be set too high to avoid overflow self.layers[-1].set(v_thresh=10000.0) elif self.backend == 'nest': nest_sim.setup(timestep=1.0, min_delay=1.0) # set neuron type, currently only supports iaf_psc_delta and one set of hyperparameters neuron_type = nest_sim.native_cell_type('iaf_psc_delta') for l in range(0, len(self.weights)-1): self.layers.append(nest_sim.Population(self.architecture[l+1], neuron_type(V_th=1.0, t_ref=0.0, V_min=-np.inf, C_m=1.0, V_reset=0.0, tau_m=10.0**10, E_L=0.0, I_e=0.0))) self.layers[l].initialize(V_m=0.0) # set up output layer seperately with infinite threshold self.layers.append(nest_sim.Population(self.architecture[-1], neuron_type(V_th=np.inf, t_ref=0.0, V_min=-np.inf, C_m=1.0, V_reset=0.0, tau_m=10.0 ** 10, E_L=0.0, I_e=0.0))) self.layers[-1].initialize(V_m=0.0) elif self.backend == 'pynn': raise Exception('Current implementation does not support native pynn types. ' 'Code after this exception acts as an example on how native pynn types' 'are used and can be modified. Additionally, the backend still needs to' 'be specified and is set to nest in the code below.') # the following out-commented lines show how a native pynn type would be used ''' nest_sim.setup(timestep=1.0, time_scale_factor=1.0, min_delay=1.0, max_delay=None) # conductive exponential neuron as an example ctx_parameters = {'cm': 1.0, 'e_rev_E': 0.0, 'e_rev_I': -65.0, 'i_offset': 0.0, 'tau_m': 10000.0, 'tau_refrac': 0.0, 'tau_syn_E': 0.01, 'tau_syn_I': 0.01, 'v_reset': -65.0, 'v_rest': -65.0, 'v_thresh': -64.0} #tc_parameters = ctx_parameters.copy() neuron_type = sim.IF_cond_exp(**tc_parameters) for l in range(0,len(self.weights)): self.layers.append(sim.Population(self.architecture[l+1], neuron_type, initial_values={'v': 0.0})) # output layer should not spike self.layers[-1].set(v_thresh=np.inf) ''' else: raise Exception('backend not supported') # connect layers (input layer is connected to first in simulate by setting offsets for l in range(0, len(self.layers)-1): excitatory_connections = [] inhibitory_connections = [] # 1 and 2 for i in range(0, len(self.layers[l])): for j in range(0, len(self.layers[l+1])): if self.weights[l+1][i][j].detach().item() >= 0: excitatory_connections.append((i, j, self.weights[l+1][i][j].detach().item(),1.0)) else: inhibitory_connections.append((i, j, self.weights[l+1][i][j].detach().item(),1.0)) # when using NEST or native PyNN models, connect has to be called explicitly, when using # SpiNNaker connecting happens automatically if self.backend == 'pynn' or self.backend == 'nest': excitatory_connector = nest_sim.FromListConnector(excitatory_connections, column_names=["weight", "delay"]) excitatory_projection = nest_sim.Projection(self.layers[l], self.layers[l + 1], connector=excitatory_connector) excitatory_connector.connect(excitatory_projection) elif self.backend == 'spinnaker': excitatory_connector = spin_sim.FromListConnector(excitatory_connections, column_names=["weight", "delay"]) excitatory_projection = spin_sim.Projection(self.layers[l], self.layers[l + 1], connector=excitatory_connector) # when using NEST or native PyNN models, connect has to be called explicitly, when using # SpiNNaker connecting happens automatically if self.backend == 'pynn' or self.backend == 'nest': inhibitory_connector = nest_sim.FromListConnector(inhibitory_connections, column_names=["weight", "delay"]) inhibitory_projection = nest_sim.Projection(self.layers[l], self.layers[l + 1], inhibitory_connector, receptor_type='inhibitory') inhibitory_connector.connect(inhibitory_projection) elif self.backend == 'spinnaker': inhibitory_connector = spin_sim.FromListConnector(inhibitory_connections, column_names=["weight", "delay"]) inhibitory_projection = spin_sim.Projection(self.layers[l], self.layers[l + 1], inhibitory_connector, receptor_type='inhibitory') # set biases (constant input currents) for l in range(0,len(self.layers)): for i in range(0,len(self.layers[l])): # use i_offset on spynnaker instead of DCSource offset = self.biases[l][i].detach().item() if self.backend == 'nest': self.layers[l][i:i+1].set(I_e=offset) elif self.backend == 'pynn' or self.backend=='spinnaker': self.layers[l][i:i+1].set(i_offset=offset) # record the potentials of the last layer if self.backend == 'nest' or self.backend == 'pynn': self.layers[-1].record('V_m') elif self.backend == 'spinnaker': self.layers[-1].record('v')
def test_create_simple(self): prj = sim.Projection(self.p1, self.p2, self.all2all, synapse_type=self.syn_a2a)
pop = p.Population( num_output, p.SpikeSourcePoisson, { 'rate': MIN_rate, #test_x[i], 'start': (epo * num_epo + i) * (dur_train + silence), 'duration': dur_train }) temp_popv = p.PopulationView(pop, np.nonzero(train_y[ind])[0]) temp_popv.set('rate', teaching_rate) TeachingPoission.append(pop) #print ImagePoission[10].get('start') ee_connector = p.OneToOneConnector(weights=3.0) for i in range(num_train * num_epo * 1): p.Projection(ImagePoission[i], pop_input, ee_connector, target='excitatory') pop_output = p.Population(num_output, p.IF_curr_exp, cell_params_lif) weight_max = 1.3 stdp_model = p.STDPMechanism( timing_dependence=p.SpikePairRule(tau_plus=10., tau_minus=10.0), weight_dependence=p.MultiplicativeWeightDependence(w_min=0.0, w_max=weight_max, A_plus=0.01, A_minus=0.01) #AdditiveWeightDependence ) ''' weight_distr = p.RandomDistribution(distribution='normal',parameters=[1,0.1]) '''
def __init__(self, sim_params=None, cell_params=None, verbose=True): ''' Parameters : Stimulus, Population, Synapses, Recording, Running ''' self.verbose = verbose self.sim_params = sim_params self.cell_params = cell_params sim.setup() #spike_precision='on_grid')#timestep = .1) N_inh = int(sim_params['nb_neurons'] * sim_params['p']) #total pop * proportion of inhib self.spike_source = sim.Population( N_inh, sim.SpikeSourcePoisson(rate=sim_params['input_rate'], duration=sim_params['simtime'] / 2)) #orientation stimulus, see bottom section of notebook angle = 1. * np.arange(N_inh) rates = self.tuning_function(angle, sim_params['angle_input'] / 180. * N_inh, sim_params['b_input'], N_inh) rates /= rates.mean() rates *= sim_params['input_rate'] for i, cell in enumerate(self.spike_source): cell.set_parameters(rate=rates[i]) #neuron model selection if sim_params['neuron_model'] == 'IF_cond_alpha': model = sim.IF_cond_alpha #LIF with nice dynamics else: model = sim.IF_cond_exp #LIF with exp dynamics #populations E_neurons = sim.Population( N_inh, model(**cell_params), initial_values={ 'v': rnd('uniform', (sim_params['v_init_min'], sim_params['v_init_max'])) }, label="Excitateurs") I_neurons = sim.Population( int(sim_params['nb_neurons'] - N_inh), model(**cell_params), initial_values={ 'v': rnd('uniform', (sim_params['v_init_min'], sim_params['v_init_max'])) }, label="Inhibiteurs") #input to excitatories input_exc = sim.Projection( self.spike_source, E_neurons, sim.OneToOneConnector(), sim.StaticSynapse(weight=sim_params['w_input_exc'], delay=sim_params['s_input_exc'])) #loop through connections type and use associated params, can be a bit slow conn_types = ['exc_inh', 'inh_exc', 'exc_exc', 'inh_inh'] #connection types ''' self.proj = self.set_synapses(conn_types = conn_types, sim_params =sim_params, E_neurons = E_neurons, I_neurons = I_neurons, N_inh = N_inh) ''' #Multi threading support NE MARCHE PAS LAISSER LE NJOBS EN 1 self.proj = Parallel(n_jobs=1, backend='multiprocessing')( delayed(self.set_synapses)(conn_type, sim_params=sim_params, E_neurons=E_neurons, I_neurons=I_neurons, N_inh=N_inh, conn_types=conn_types, verbose=verbose) for conn_type in range(len(conn_types))) if verbose: print('Done building synapses !') #record self.spike_source.record('spikes') E_neurons.record('spikes') I_neurons.record('spikes') #run if verbose: print('Running simulation..') sim.run(sim_params['simtime']) if verbose: print('Done running !') #get the spikes self.E_spikes = E_neurons #.get_data().segments[0] self.I_spikes = I_neurons #.get_data().segments[0] self.P_spikes = self.spike_source #.get_data().segments[0]
def run_retina(params): """Run the retina using the specified parameters.""" tmpdir = tempfile.mkdtemp() print "Setting up simulation" pyNN.Timer.start() # start timer on construction pyNN.setup(timestep=params['dt'], max_delay=params['syn_delay']) pyNN.pynest.setDict([0], {'threads': params['threads']}) pyNN.setRNGseeds(params['kernelseeds']) N = params['N'] phr_ON = pyNN.Population((N, N), 'dc_generator') phr_OFF = pyNN.Population((N, N), 'dc_generator') noise_ON = pyNN.Population((N, N), 'noise_generator', { 'mean': 0., 'std': params['noise_std'] }) noise_OFF = pyNN.Population((N, N), 'noise_generator', { 'mean': 0., 'std': params['noise_std'] }) phr_ON.set({ 'start': params['simtime'] / 4, 'stop': params['simtime'] / 4 * 3 }) phr_ON.tset('amplitude', params['amplitude'] * params['snr']) phr_OFF.set({ 'start': params['simtime'] / 4, 'stop': params['simtime'] / 4 * 3 }) phr_OFF.tset('amplitude', -params['amplitude'] * params['snr']) # target ON and OFF populations out_ON = pyNN.Population((N, N), 'iaf_sfa_neuron', params['parameters_gc']) out_OFF = pyNN.Population((N, N), 'iaf_sfa_neuron', params['parameters_gc']) #print "Connecting the network" retina_proj_ON = pyNN.Projection(phr_ON, out_ON, 'oneToOne') retina_proj_ON.setWeights(params['weight']) retina_proj_OFF = pyNN.Projection(phr_OFF, out_OFF, 'oneToOne') retina_proj_OFF.setWeights(params['weight']) noise_proj_ON = pyNN.Projection(noise_ON, out_ON, 'oneToOne') noise_proj_ON.setWeights(params['weight']) noise_proj_OFF = pyNN.Projection(noise_OFF, out_OFF, 'oneToOne') noise_proj_OFF.setWeights(params['weight']) out_ON_filename = os.path.join(tmpdir, 'out_on.gdf') out_OFF_filename = os.path.join(tmpdir, 'out_off.gdf') out_ON.record() out_OFF.record() # reads out time used for building buildCPUTime = pyNN.Timer.elapsedTime() print "Running simulation" pyNN.Timer.start() # start timer on construction pyNN.run(params['simtime']) simCPUTime = pyNN.Timer.elapsedTime() out_ON.printSpikes(out_ON_filename) out_OFF.printSpikes(out_OFF_filename) out_ON_DATA = tmpfile2spikelist(out_ON_filename, params['dt']) out_OFF_DATA = tmpfile2spikelist(out_OFF_filename, params['dt']) print "\nRetina Network Simulation:" print(params['description']) print "Number of Neurons : ", N**2 print "Output rate (ON) : ", out_ON.meanSpikeCount(), \ "spikes/neuron in ", params['simtime'], "ms" print "Output rate (OFF) : ", out_OFF.meanSpikeCount(), \ "spikes/neuron in ",params['simtime'], "ms" print "Build time : ", buildCPUTime, "s" print "Simulation time : ", simCPUTime, "s" return out_ON_DATA, out_OFF_DATA