def setUp(self): sim.setup() self.p1 = sim.Population(7, sim.IF_cond_exp()) self.p2 = sim.Population(4, sim.IF_cond_exp()) self.p3 = sim.Population(5, sim.IF_curr_alpha()) self.syn_rnd = sim.StaticSynapse(weight=0.123, delay=0.5) self.syn_a2a = sim.StaticSynapse(weight=0.456, delay=0.4) self.random_connect = sim.FixedNumberPostConnector(n=2) self.all2all = sim.AllToAllConnector()
def inhibitory_connect(layers, source, dest1, dest2, dest3, weight): sim.Projection(layers[source].population, layers[dest1].population, sim.OneToOneConnector(), sim.StaticSynapse(weight=weight)) sim.Projection(layers[source].population, layers[dest2].population, sim.OneToOneConnector(), sim.StaticSynapse(weight=weight)) sim.Projection(layers[source].population, layers[dest3].population, sim.OneToOneConnector(), sim.StaticSynapse(weight=weight))
def create_C2_layers(S2_layers: Dict[float, Sequence[Layer]], s2_prototype_cells: int) -> List[sim.Population]: """ Creates the populations of the C2 layer, one for each S2 prototype cell, containing only a single cell which max-pools the spikes of all layers of a prototype. Parameters: `S2_layers`: A dictionary containing for each scale a list of S2 layers, one for each prototype cell `s2_prototype_cells`: The number of S2 prototype cells Returns: A list of populations of size one, one population for each prototype cell """ no_inh_w = 17.15 # synapse weight without S2 inhibitions with_inh_w = 4 * no_inh_w # synapse weight with S2 inhibitions C2_populations = [sim.Population(1, sim.IF_curr_exp(), label=str(prot))\ for prot in range(s2_prototype_cells)] total_connections = sum( map(lambda ll: ll[0].shape[0] * ll[0].shape[1], S2_layers.values())) for s2ll in S2_layers.values(): for prot in range(s2_prototype_cells): sim.Projection( s2ll[prot].population, C2_populations[prot], sim.AllToAllConnector(), sim.StaticSynapse(weight=with_inh_w / total_connections)) return C2_populations
def getSynapseType(lat_conn_strength_params, use_stdp, tau_plus=20.0, tau_minus=20.0, A_plus=0.01, A_minus=0.012, w_min=0, w_max=1.0, static_weight=0.02): """ For getting the required synapse type, fixed or STDP Arguments: lat_conn_strength_params, parameters for the uniform distribution. The connections evolve during training anyway. use_stdp, flag the other Arguments are defaults. Return: the synapse_type to use """ stdp_weight_distn = pynn.random.RandomDistribution( 'uniform', lat_conn_strength_params) stdp = pynn.STDPMechanism( weight=stdp_weight_distn, timing_dependence=pynn.SpikePairRule(tau_plus=tau_plus, tau_minus=tau_minus, A_plus=A_plus, A_minus=A_minus), weight_dependence=pynn.AdditiveWeightDependence(w_min=w_min, w_max=w_max)) synapse_to_use = stdp if use_stdp else pynn.StaticSynapse( weight=static_weight) return synapse_to_use
def connect_layers(input_layer, output_layer, weights, i_s, j_s, i_e, j_e, k_out, stdp=False, initial_weight=0, label_dicts=None): """ Connects a neuron of an output layer to the corresponding square of an input layer. This is a helper function of connect_layer_to_layer() Returns: The created projection between the input and output layers """ m = input_layer.shape[1] view_elements = [] i = i_s while i < i_e: j = j_s while j < j_e: view_elements.append(m * i + j) j += 1 i += 1 if stdp: w_max = initial_weight * 15 stdp_shared = sim.native_synapse_type('stdp_synapse')\ (Wmax=w_max * 1000, mu_plus=0.0, mu_minus=1.0) proj = sim.Projection(input_layer.population[view_elements], output_layer.population[[k_out]], sim.AllToAllConnector(), stdp_shared) ol = int(output_layer.population.label) il = input_layer.population.label out_neuron = output_layer.population[k_out] if label_dicts == None: for i in range(len(view_elements)): label = '{}_{}_{}'.format(ol, il, i) in_neuron = input_layer.population[view_elements[i]] conn = nest.GetConnections(source=[in_neuron], target=[out_neuron]) nest.SetStatus(conn, {'label': label, 'weight': weights[i][0]}) else: for i in range(len(view_elements)): label = '{}_{}_{}'.format(ol, il, i) if not label in label_dicts[ol]: label_dicts[ol][label] = ([], []) in_neuron = input_layer.population[view_elements[i]] label_dicts[ol][label][0].append(in_neuron) label_dicts[ol][label][1].append(out_neuron) else: proj = sim.Projection(input_layer.population[view_elements], output_layer.population[[k_out]], sim.AllToAllConnector(), sim.StaticSynapse(weight=weights)) return proj
def test_ticket244(): nest = pyNN.nest nest.setup(threads=4) p1 = nest.Population(4, nest.IF_curr_exp()) p1.record('spikes') poisson_generator = nest.Population(3, nest.SpikeSourcePoisson(rate=1000.0)) conn = nest.OneToOneConnector() syn = nest.StaticSynapse(weight=1.0) nest.Projection(poisson_generator, p1.sample(3), conn, syn, receptor_type="excitatory") nest.run(15) p1.get_data()
def test_ticket240(): nest = pyNN.nest nest.setup(threads=4) parameters = {'Tau_m': 17.0} p1 = nest.Population(4, nest.IF_curr_exp()) p2 = nest.Population(5, nest.native_cell_type("ht_neuron")(**parameters)) conn = nest.AllToAllConnector() syn = nest.StaticSynapse(weight=1.0) prj = nest.Projection(p1, p2, conn, syn, receptor_type='AMPA') # This should be a nonstandard receptor type but I don't know of one to use. connections = prj.get(('weight',), format='list') assert len(connections) > 0
def test_single_presynaptic_and_single_postsynaptic_neuron(self): prj = sim.Projection(self.p4, self.p4, sim.AllToAllConnector(), synapse_type=sim.StaticSynapse(weight=0.123)) assert prj.shape == (1, 1) weight = 0.456 prj.set(weight=weight) self.assertEqual(prj.get("weight", format="array")[0][0], weight) weight_array = numpy.ones(prj.shape) * weight prj.set(weight=weight_array) self.assertTrue((weight_array == prj.get("weight", format="array")).all())
def create_corner_layer_for(input_layers): shape = input_layers[0].shape total_output_neurons = np.prod(shape) output_population = sim.Population(total_output_neurons, sim.IF_curr_exp(), label='corner') for layer in input_layers: sim.Projection(layer.population, output_population, sim.OneToOneConnector(), sim.StaticSynapse(weight=1., delay=0.5)) return Layer(output_population, shape)
def test_record_native_model(): if not have_nest: raise SkipTest nest = pyNN.nest from pyNN.random import RandomDistribution init_logging(logfile=None, debug=True) nest.setup() parameters = {'tau_m': 17.0} n_cells = 10 p1 = nest.Population(n_cells, nest.native_cell_type("ht_neuron")(**parameters)) p1.initialize(V_m=-70.0, Theta=-50.0) p1.set(theta_eq=-51.5) #assert_arrays_equal(p1.get('theta_eq'), -51.5*numpy.ones((10,))) assert_equal(p1.get('theta_eq'), -51.5) print(p1.get('tau_m')) p1.set(tau_m=RandomDistribution('uniform', low=15.0, high=20.0)) print(p1.get('tau_m')) current_source = nest.StepCurrentSource( times=[50.0, 110.0, 150.0, 210.0], amplitudes=[0.01, 0.02, -0.02, 0.01]) p1.inject(current_source) p2 = nest.Population( 1, nest.native_cell_type("poisson_generator")(rate=200.0)) print("Setting up recording") p2.record('spikes') p1.record('V_m') connector = nest.AllToAllConnector() syn = nest.StaticSynapse(weight=0.001) prj_ampa = nest.Projection(p2, p1, connector, syn, receptor_type='AMPA') tstop = 250.0 nest.run(tstop) vm = p1.get_data().segments[0].analogsignals[0] n_points = int(tstop / nest.get_time_step()) + 1 assert_equal(vm.shape, (n_points, n_cells)) assert vm.max() > 0.0 # should have some spikes
def create_local_inhibition(layers_dict): """ Creates local inhibitory connections from a neuron to its neighbors in an area of a fixed distance. The latency of its neighboring neurons decreases linearly with the distance from the spike from 15% to 5%, as described in Masquelier's paper. Here we assumed that a weight of -10 inhibits the neuron completely and took that as a starting point. """ for size, layers in layers_dict.items(): print('Create local inhibition for size', size) for layer in layers: sim.Projection(layer.population, layer.population, sim.DistanceDependentProbabilityConnector( 'd < 5', allow_self_connections=False), sim.StaticSynapse(weight='.25 * d - 1.75'), space=space.Space(axes='xy'))
def test(): if not HAVE_H5PY and HAVE_NEST: raise SkipTest sim.setup() p1 = sim.Population(10, sim.IF_cond_exp(v_rest=-65, tau_m=lambda i: 10 + 0.1 * i, cm=RD('normal', (0.5, 0.05))), label="population_one") p2 = sim.Population(20, sim.IF_curr_alpha(v_rest=-64, tau_m=lambda i: 11 + 0.1 * i), label="population_two") prj = sim.Projection(p1, p2, sim.FixedProbabilityConnector(p_connect=0.5), synapse_type=sim.StaticSynapse(weight=RD( 'uniform', [0.0, 0.1]), delay=0.5), receptor_type='excitatory') net = Network(p1, p2, prj) export_to_sonata(net, "tmp_serialization_test", overwrite=True) net2 = import_from_sonata("tmp_serialization_test/circuit_config.json", sim) for orig_population in net.populations: imp_population = net2.get_component(orig_population.label) assert orig_population.size == imp_population.size for name in orig_population.celltype.default_parameters: assert_array_almost_equal(orig_population.get(name), imp_population.get(name), 12) w1 = prj.get('weight', format='array') prj2 = net2.get_component(asciify(prj.label).decode('utf-8') + "-0") w2 = prj2.get('weight', format='array') assert_array_almost_equal(w1, w2, 12)
def set_synapses(self, conn_type, sim_params, E_neurons, I_neurons, N_inh, conn_types, verbose): syn = {} proj = {} verbose = True if verbose: print('Building %s synapses..' % conn_types[conn_type]) weight = sim_params['w_{}'.format(conn_types[conn_type])] delay = sim_params['s_{}'.format(conn_types[conn_type])] syn[conn_types[conn_type]] = sim.StaticSynapse(delay=delay) if conn_types[ conn_type][:3] == 'exc': #string slicing, this co is TO exc pre_neurons = E_neurons receptor_type = 'excitatory' else: pre_neurons = I_neurons #TO inh receptor_type = 'inhibitory' if conn_types[conn_type][-3:] == 'exc': #FROM exc post_neurons = E_neurons else: post_neurons = I_neurons #FROM inh sparseness = sim_params['c_{}'.format(conn_types[conn_type])] proj[conn_types[conn_type]] = sim.Projection( pre_neurons, post_neurons, connector=sim.FixedProbabilityConnector(sparseness, rng=sim.NumpyRNG(seed=42)), synapse_type=syn[conn_types[conn_type]], receptor_type=receptor_type) bw = sim_params['b_{}'.format(conn_types[conn_type])] angle_pre = 1. * np.arange(proj[conn_types[conn_type]].pre.size) angle_post = 1. * np.arange(proj[conn_types[conn_type]].post.size) w_ij = self.tuning_function(angle_pre[:, np.newaxis], angle_post[np.newaxis, :], bw, N_inh) * weight proj[conn_types[conn_type]].set(weight=w_ij) return proj
label="inhib_pop") spike_source_1 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_1)) spike_source_2 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_2)) spike_source_3 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_3)) spike_source_4 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_4)) spike_source_9 = sim.Population( 1, sim.SpikeSourceArray(spike_times=spike_time_for_id_9)) spike_2_conn_for_1 = sim.Projection(spike_source_1, cann_pop[1:2], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_2 = sim.Projection(spike_source_2, cann_pop[2:3], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_3 = sim.Projection(spike_source_3, cann_pop[3:4], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_4 = sim.Projection(spike_source_4, cann_pop[4:5], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_9 = sim.Projection(spike_source_9, cann_pop[9:10], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) cann_2_cann = sim.Projection( cann_pop, cann_pop,
###### Neurons ####### input_neurons = p.Population(param.input_nr, p.SpikeSourcePoisson()) reservoir_exc = p.Population(param.res_exc_nr, p.IF_curr_exp, {}, label="reservoir_exc") reservoir_inh = p.Population(param.res_inh_nr, p.IF_curr_exp, {}, label="reservoir_inh") ###################### ###### Synapses ####### stat_syn_input = p.StaticSynapse(weight=dist_input, delay=1) stat_syn_exc = p.StaticSynapse(weight=dist_exc, delay=1) stat_syn_inh = p.StaticSynapse(weight=dist_inh, delay=1) #stat_syn_input = p.StaticSynapse(weight =50.0, delay=1) ###################### ###### Connections ####### exc_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inh_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inp_conn = p.FixedProbabilityConnector(0.5, rng=param.rng) rout_conn = p.AllToAllConnector() connections = {}
neuron1 = sim.create(sim.HH_cond_exp(**cellparams)) neuron1.record(["v"]) neuron1.initialize(v=cellparams["e_rev_leak"]) neuron2 = sim.create(sim.HH_cond_exp(**cellparams)) neuron2.record(["v"]) neuron2.initialize(v=cellparams["e_rev_leak"]) spike_times = [2.0] spike_source = sim.Population(1, sim.SpikeSourceArray(spike_times=spike_times)) sim.Projection(spike_source, neuron1, sim.OneToOneConnector(), sim.StaticSynapse(weight=0.076, delay=0.1), receptor_type='excitatory'), sim.Projection(spike_source, neuron2, sim.OneToOneConnector(), sim.StaticSynapse(weight=0.0755, delay=0.1), receptor_type='excitatory'), sim.run(tEnd) data1 = neuron1.get_data() data2 = neuron2.get_data() signal_names = [s.name for s in data1.segments[0].analogsignalarrays] fig = plt.figure(figsize=(cm2inch(12.4), cm2inch(7)))
###### Neurons ####### input_neurons = p.Population(param.input_nr, p.SpikeSourcePoisson()) readout_neurons = p.Population(param.readout_nr, p.IF_curr_exp, {}, label="readout") reservoir = p.Population(param.reservoir_nr, p.IF_curr_exp, {}, label="reservoir") ###################### ###### Synapses ####### stat_syn_res = p.StaticSynapse(weight=5.0, delay=1) stat_syn_input = p.StaticSynapse(weight=50.0, delay=1) stat_syn_rout = p.StaticSynapse(weight=0.0, delay=1) ###################### ###### Connections ####### res_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inp_conn = p.AllToAllConnector() rout_conn = p.AllToAllConnector() connections = {} connections['r2r'] = p.Projection(reservoir, reservoir,
label='Excitatory layer') cortical_neurons_inh = simulator.Population(Ncell_inh**2, inhibitory_cell, structure=inhibitory_structure, label='Inhibitory layer') ############################# # Add background noise ############################# correlated = False noise_rate = 5800 # Hz g_noise = 0.00089 * 0 # Microsiemens (0.89 Nanosiemens) noise_delay = 1 # ms noise_model = simulator.SpikeSourcePoisson(rate=noise_rate) noise_syn = simulator.StaticSynapse(weight=g_noise, delay=noise_delay) if correlated: # If correlated is True all the cortical neurons receive noise from the same cell. noise_population = simulator.Population(1, noise_model, label='Background Noise') background_noise_to_exc = simulator.Projection( noise_population, cortical_neurons_exc, simulator.AllToAllConnector(), noise_syn) background_noise_to_inh = simulator.Projection( noise_population, cortical_neurons_inh, simulator.AllToAllConnector(), noise_syn) else: # If correlated is False, all cortical neurons receive independent noise noise_population_exc = simulator.Population(
p.setup(timestep=param.dt) dist_input = RandomDistribution('uniform', (1, 10), rng=param.rng) dist_reservoir = RandomDistribution('uniform', (1, 10), rng=param.rng) #print(distribution.next(5)) ###### Neurons ####### input_neurons = p.Population(param.input_nr, p.SpikeSourcePoisson()) readout_neurons = p.Population(param.readout_nr, p.IF_curr_exp, {}, label="readout") reservoir = p.Population(param.reservoir_nr,p.IF_curr_exp, {}, label="reservoir") ###################### ###### Synapses ####### stat_syn_res = p.StaticSynapse(weight =dist_reservoir, delay=1) stat_syn_input = p.StaticSynapse(weight =dist_input, delay=1) stat_syn_rout = p.StaticSynapse(weight =0.0, delay=1) ###################### ###### Connections ####### res_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inp_conn = p.AllToAllConnector() rout_conn = p.AllToAllConnector() connections = {} connections['r2r'] = p.Projection(reservoir, reservoir, res_conn, synapse_type=stat_syn_res, receptor_type='excitatory')
if dist[i][j] > n / 2: dist[i][j] = n - dist[i][j] weights[i][j] = round(weight_to_spike * \ (1 / (sig * np.sqrt(2 * np.pi)) * \ (np.exp(-np.power(dist[i][j] - mu, 2.) \ / (2 * np.power(sig, 2.))))), 2) cann_connector.append((i, j, weights[i][j])) #, delay_cann2cann)) print("Weight matrix:\n", weights) spike_times = [1000., 2000.] spike_source = sim.Population(1, sim.SpikeSourceArray(spike_times=spike_times)) conn = sim.Projection(spike_source, cann_pop[spiky:spiky + 1], sim.AllToAllConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) n2n_conn = sim.Projection( cann_pop, cann_pop, sim.FromListConnector(cann_connector, column_names=["weight"]), sim.StaticSynapse(weight=0.0001, delay=100)) spike_source.record('spikes') cann_pop.record(('v', 'spikes')) sim.run(5000.0) from pyNN.utility.plotting import Figure, Panel data1 = cann_pop.get_data().segments[0] vm = data1.filter(name="v")[0]
readout_neurons = p.Population(param.readout_nr, p.IF_curr_exp, {}, label="readout") reservoir_exc = p.Population(param.res_exc_nr, p.IF_curr_exp, {}, label="reservoir_exc") reservoir_inh = p.Population(param.res_inh_nr, p.IF_curr_exp, {}, label="reservoir_inh") ###################### ###### Synapses ####### stat_syn_exc = p.StaticSynapse(weight=5.0, delay=1) stat_syn_inh = p.StaticSynapse(weight=20.0, delay=1) stat_syn_input = p.StaticSynapse(weight=50.0, delay=1) ###################### ###### Connections ####### exc_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inh_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inp_conn = p.AllToAllConnector() rout_conn = p.AllToAllConnector() connections = {} connections['e2e'] = p.Projection(reservoir_exc, reservoir_exc,
sim.Izhikevich(**parameters_e), initial_values=initialize_e, label="Ex") population_i = sim.Population(n_neurons_i, sim.Izhikevich(**parameters_i), initial_values=initialize_i, label="In") # Assembly entire population population = sim.Assembly(population_e, population_i) # Create synapses syn_weight_e = max_syn_weight_e * np.random.rand(n_neurons_e, n_neurons) synapses_e = sim.Projection(population_e, population, sim.AllToAllConnector(allow_self_connections=False), sim.StaticSynapse(weight=syn_weight_e, delay=1.0), receptor_type='excitatory') syn_weight_i = -max_syn_weight_i * np.random.rand(n_neurons_i, n_neurons) synapses_i = sim.Projection(population_i, population, sim.AllToAllConnector(allow_self_connections=False), sim.StaticSynapse(weight=syn_weight_i, delay=1.0), receptor_type='inhibitory') # Create noise Gaussian generator gauss_gen_e = sim.NoisyCurrentSource(**gauss_gen_e_params) population_e.inject(gauss_gen_e) gauss_gen_i = sim.NoisyCurrentSource(**gauss_gen_i_params) population_i.inject(gauss_gen_i) # Set spike times to be recorded
#V value initialization sim.initialize(Neurons_RS, v=-65.0, gsyn_exc=0, gsyn_inh=0) # v:mV, gsyn_exc:nS, gsyn_inh:nS sim.initialize(Neurons_FS, v=-65.0, gsyn_exc=0, gsyn_inh=0) ## RECURRENT CONNECTIONS #The two populations of neurons are randomly connected # (internally and mutually) with a connectivity probability of 5% # exc_exc exc_exc = sim.Projection(Neurons_RS, Neurons_RS, sim.FixedProbabilityConnector( 0.05, allow_self_connections=False), receptor_type='excitatory', synapse_type=sim.StaticSynapse(weight=0.001)) # exc_inh exc_inh = sim.Projection(Neurons_RS, Neurons_FS, sim.FixedProbabilityConnector( 0.05, allow_self_connections=False), receptor_type='excitatory', synapse_type=sim.StaticSynapse(weight=0.001)) # inh_exc inh_exc = sim.Projection(Neurons_FS, Neurons_RS, sim.FixedProbabilityConnector( 0.05, allow_self_connections=False), receptor_type='inhibitory', synapse_type=sim.StaticSynapse(weight=0.005)) # inh_inh
'v_thresh': -60.0, # (mV) 'cm': 0.5 } sim.setup() neuron1 = sim.Population(n, sim.IF_cond_alpha(**cell_params), label="neuron1") neuron2 = sim.Population(n, sim.IF_cond_alpha(**cell_params), label="neuron2") spike_times = [1000., 2000.] spike_source = sim.Population(1, sim.SpikeSourceArray(spike_times=spike_times)) conn = sim.Projection(spike_source, neuron1[spiky:spiky + 1], sim.AllToAllConnector(), sim.StaticSynapse(weight=0.002, delay=1.)) n2n_conn = sim.Projection(neuron1, neuron2, sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=1.)) spike_source.record('spikes') neuron1.record(('v', 'spikes')) neuron2.record(('v', 'spikes')) sim.run(5000.0) #print neuron1.get_spike_counts() from pyNN.utility.plotting import Figure, Panel data1 = neuron1.get_data().segments[0] data2 = neuron1.get_data().segments[0] vm = data1.filter(name="v")[0]
def test_callback(data_input): global message message = data_input.actual.positions msg_list = list(message) #msg_list[0] = int(message[0].encode('hex'),16) #for i in #msg_list = int(message.encode('hex'),16) #print('============= Received image data.',message) rospy.loginfo('=====received data %r', msg_list[0]) timer = Timer() dt = 0.1 p.setup(timestep=dt) # 0.1ms pop_1 = p.Population(1, p.IF_curr_exp, {}, label="pop_1") #input = p.Population(1, p.SpikeSourceArray, {'spike_times': [[0,3,6]]}, label='input') input = p.Population(1, p.SpikeSourcePoisson, {'rate': (msg_list[0] + 1.6) * 100}) stat_syn = p.StaticSynapse(weight=50.0, delay=1) input_proj = p.Projection(input, pop_1, p.OneToOneConnector(), synapse_type=stat_syn, receptor_type='excitatory') pop_1.record(['v', 'spikes']) p.run(10) pop_1_data = pop_1.get_data() spikes = pop_1_data.segments[0].spiketrains[0] mean_rate = int(gaussian_convolution(spikes, dt)) rospy.loginfo('=====mean_rate %r', mean_rate) # mean_rate = 64 rate_command = mean_rate # rate coding of the spike train ''' pub = rospy.Publisher('/cmd_vel_mux/input/teleop', Twist, queue_size=10) # construct the output command command = Twist() command.linear.x = rate_command*0.02 command.angular.z = rate_command/50000. pub.publish(command) ''' pub = rospy.Publisher('/arm_controller/follow_joint_trajectory/goal', FollowJointTrajectoryActionGoal, queue_size=10) command = FollowJointTrajectoryActionGoal() command.header.stamp = rospy.Time.now() command.goal.trajectory.joint_names = ['elbow'] point = JointTrajectoryPoint() point.positions = [rate_command / 10] point.time_from_start = rospy.Duration(1) command.goal.trajectory.points.append(point) pub.publish(command) rospy.loginfo('=====send command %r', command.goal.trajectory.points[0]) fig_settings = { 'lines.linewidth': 0.5, 'axes.linewidth': 0.5, 'axes.labelsize': 'small', 'legend.fontsize': 'small', 'font.size': 8 } plt.rcParams.update(fig_settings) fig1 = plt.figure(1, figsize=(6, 8)) def plot_spiketrains(segment): for spiketrain in segment.spiketrains: y = np.ones_like(spiketrain) * spiketrain.annotations['source_id'] plt.plot(spiketrain, y, '.') plt.ylabel(segment.name) plt.setp(plt.gca().get_xticklabels(), visible=False) def plot_signal(signal, index, colour='b'): label = "Neuron %d" % signal.annotations['source_ids'][index] plt.plot(signal.times, signal[:, index], colour, label=label) plt.ylabel("%s (%s)" % (signal.name, signal.units._dimensionality.string)) plt.setp(plt.gca().get_xticklabels(), visible=False) plt.legend() print("now plotting the network---------------") rospy.loginfo('--------now plotting---------------') n_panels = sum(a.shape[1] for a in pop_1_data.segments[0].analogsignalarrays) + 2 plt.subplot(n_panels, 1, 1) plot_spiketrains(pop_1_data.segments[0]) panel = 3 for array in pop_1_data.segments[0].analogsignalarrays: for i in range(array.shape[1]): plt.subplot(n_panels, 1, panel) plot_signal(array, i, colour='bg'[panel % 2]) panel += 1 plt.xlabel("time (%s)" % array.times.units._dimensionality.string) plt.setp(plt.gca().get_xticklabels(), visible=True) #
I_vec, u_vec, s_vec = LIF_CUBA(spikes=spikes_e) # %% PyNN CUBA # Setup sim.setup(timestep=0.1, min_delay=0.1, max_delay=10.0) IF_sim = sim.Population(1, sim.IF_curr_exp(), label="IF_curr_exp") IF_sim.record('v') spike_times = np.arange(50,100,10) spike_input = sim.Population(1,sim.SpikeSourceArray(spike_times=spike_times),label="Input spikes") # Connections w = 1 connections = sim.Projection(spike_input, IF_sim, connector=sim.AllToAllConnector(), synapse_type=sim.StaticSynapse(weight=w,delay=0.1), receptor_type="excitatory") # Running simulation in MS IF_sim.record('v') sim.run(100.0) # Data v_data = IF_sim.get_data() data = IF_sim.get_data().segments[0] v_cuba = data.filter(name="v")[0] # Plotting plt.plot(v_cuba) # End
def __init__(self, sim_params=None, cell_params=None, verbose=True): ''' Parameters : Stimulus, Population, Synapses, Recording, Running ''' self.verbose = verbose self.sim_params = sim_params self.cell_params = cell_params sim.setup() #spike_precision='on_grid')#timestep = .1) N_inh = int(sim_params['nb_neurons'] * sim_params['p']) #total pop * proportion of inhib self.spike_source = sim.Population( N_inh, sim.SpikeSourcePoisson(rate=sim_params['input_rate'], duration=sim_params['simtime'] / 2)) #orientation stimulus, see bottom section of notebook angle = 1. * np.arange(N_inh) rates = self.tuning_function(angle, sim_params['angle_input'] / 180. * N_inh, sim_params['b_input'], N_inh) rates /= rates.mean() rates *= sim_params['input_rate'] for i, cell in enumerate(self.spike_source): cell.set_parameters(rate=rates[i]) #neuron model selection if sim_params['neuron_model'] == 'IF_cond_alpha': model = sim.IF_cond_alpha #LIF with nice dynamics else: model = sim.IF_cond_exp #LIF with exp dynamics #populations E_neurons = sim.Population( N_inh, model(**cell_params), initial_values={ 'v': rnd('uniform', (sim_params['v_init_min'], sim_params['v_init_max'])) }, label="Excitateurs") I_neurons = sim.Population( int(sim_params['nb_neurons'] - N_inh), model(**cell_params), initial_values={ 'v': rnd('uniform', (sim_params['v_init_min'], sim_params['v_init_max'])) }, label="Inhibiteurs") #input to excitatories input_exc = sim.Projection( self.spike_source, E_neurons, sim.OneToOneConnector(), sim.StaticSynapse(weight=sim_params['w_input_exc'], delay=sim_params['s_input_exc'])) #loop through connections type and use associated params, can be a bit slow conn_types = ['exc_inh', 'inh_exc', 'exc_exc', 'inh_inh'] #connection types ''' self.proj = self.set_synapses(conn_types = conn_types, sim_params =sim_params, E_neurons = E_neurons, I_neurons = I_neurons, N_inh = N_inh) ''' #Multi threading support NE MARCHE PAS LAISSER LE NJOBS EN 1 self.proj = Parallel(n_jobs=1, backend='multiprocessing')( delayed(self.set_synapses)(conn_type, sim_params=sim_params, E_neurons=E_neurons, I_neurons=I_neurons, N_inh=N_inh, conn_types=conn_types, verbose=verbose) for conn_type in range(len(conn_types))) if verbose: print('Done building synapses !') #record self.spike_source.record('spikes') E_neurons.record('spikes') I_neurons.record('spikes') #run if verbose: print('Running simulation..') sim.run(sim_params['simtime']) if verbose: print('Done running !') #get the spikes self.E_spikes = E_neurons #.get_data().segments[0] self.I_spikes = I_neurons #.get_data().segments[0] self.P_spikes = self.spike_source #.get_data().segments[0]
def create_S2_layers(C1_layers: Dict[float, Sequence[Layer]], feature_size, s2_prototype_cells, refrac_s2=.1, stdp=True, inhibition=True)\ -> Dict[float, List[Layer]]: """ Creates all prototype S2 layers for all sizes. Parameters: `layers_dict`: A dictionary containing for each size a list of C1 layers, for each feature one `feature_size`: `s2_prototype_cells`: `refrac_s2`: `stdp`: Returns: A dictionary containing for each size a list of different S2 layers, for each prototype one. """ f_s = feature_size initial_weight = 25 / (f_s * f_s) weight_rng = rnd.RandomDistribution('normal', mu=initial_weight, sigma=initial_weight / 20) i_offset_rng = rnd.RandomDistribution('normal', mu=.5, sigma=.45) weights = list( map(lambda x: weight_rng.next() * 1000, range(4 * f_s * f_s))) S2_layers = {} i_offsets = list( map(lambda x: i_offset_rng.next(), range(s2_prototype_cells))) ndicts = list(map(lambda x: {}, range(s2_prototype_cells))) ondicts = list(map(lambda x: {}, range(s2_prototype_cells))) omdicts = list(map(lambda x: {}, range(s2_prototype_cells))) for size, layers in C1_layers.items(): n, m = how_many_squares_in_shape(layers[0].shape, (f_s, f_s), f_s) if stdp: l_i_offsets = [list(map(lambda x: rnd.RandomDistribution('normal', mu=i_offsets[i], sigma=.25).next(), range(n * m)))\ for i in range(s2_prototype_cells)] else: l_i_offsets = np.zeros((s2_prototype_cells, n * m)) print('S2 Shape', n, m) layer_list = list( map( lambda i: Layer( sim.Population(n * m, sim.IF_curr_exp(i_offset=l_i_offsets[i], tau_refrac=refrac_s2), structure=space.Grid2D(aspect_ratio=m / n), label=str(i)), (n, m)), range(s2_prototype_cells))) for S2_layer in layer_list: for C1_layer in layers: S2_layer.projections[C1_layer.population.label] =\ connect_layer_to_layer(C1_layer, S2_layer, (f_s, f_s), f_s, [[w] for w in weights[:f_s * f_s]], stdp=stdp, initial_weight=initial_weight, ndicts=ndicts, ondicts=ondicts, omdicts=omdicts) S2_layers[size] = layer_list # Set the labels of the shared connections if stdp: t = time.clock() print('Set shared labels') for s2_label_dicts in [ndicts, ondicts, omdicts]: for i in range(s2_prototype_cells): w_iter = weights.__iter__() for label, (source, target) in s2_label_dicts[i].items(): conns = nest.GetConnections(source=source, target=target) nest.SetStatus(conns, { 'label': label, 'weight': w_iter.__next__() }) print('Setting labels took', time.clock() - t) if inhibition: # Create inhibitory connections between the S2 cells # First between the neurons of the same layer... inh_weight = -10 inh_delay = .1 print('Create S2 self inhibitory connections') for layer_list in S2_layers.values(): for layer in layer_list: sim.Projection( layer.population, layer.population, sim.AllToAllConnector(allow_self_connections=False), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) # ...and between the layers print('Create S2 cross-scale inhibitory connections') for i in range(s2_prototype_cells): for layer_list1 in S2_layers.values(): for layer_list2 in S2_layers.values(): if layer_list1[i] != layer_list2[i]: sim.Projection( layer_list1[i].population, layer_list2[i].population, sim.AllToAllConnector(), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) if stdp: # Create the inhibition between different prototype layers print('Create S2 cross-prototype inhibitory connections') for layer_list in S2_layers.values(): for layer1 in layer_list: for layer2 in layer_list: if layer1 != layer2: sim.Projection( layer1.population, layer2.population, sim.OneToOneConnector(), sim.StaticSynapse(weight=inh_weight - 1, delay=inh_delay)) return S2_layers
sim.setup(simparams['dt']) inp = sim.Population( 1, sim.SpikeSourcePoisson(duration=simparams['duration'], rate=simparams['input_rate'])) inp.label = 'input cell' outp = sim.Population(1, sim.IF_curr_exp, cellparams=cellparams) outp.label = 'output cell' inp.record('spikes') outp.record(['v', 'spikes']) synapse = sim.StaticSynapse(weight=1, delay=simparams['delay']) connector = sim.OneToOneConnector() connection = sim.Projection(inp, outp, connector, synapse) def report_time(t): print("Time: {}".format(t)) return t + simparams['dt'] par = 'i_offset' for p in [0.01]: outp.set(**{par: p}) cellparams[par] = p outp.initialize(v=cellparams['v_rest']) sim.run(simparams['duration'], callbacks=[report_time])
# ============= Validation ============= # print('Constructing new network with the learned weights') sim.setup(threads=args.threads, min_delay=.1) # Create the validation network and connect the C2 neurons to it validation_spiketrains = [[s for s in st] for st in validation_pair[1]] C2_populations, compound_C2_population =\ create_C2_populations(validation_spiketrains) classifier_neurons = [sim.Population(1, sim.IF_curr_exp())\ for cat in range(categories)] for category in range(categories): # for category in range(1): sim.Projection(compound_C2_population, classifier_neurons[category], sim.AllToAllConnector(), sim.StaticSynapse(weight=classifier_weights[category])) # Record the spikes for visualization purposes compound_C2_population.record('spikes') for pop in classifier_neurons: pop.record(['spikes', 'v']) # Let the simulation run to "fill" the layer pipeline with spikes sim.run(40) for pop in C2_populations: pop.get_data(clear=True) predicted_labels = [] # Simulate and classify the images for i in range(validation_image_count):