def run_test(w_list, cell_para, spike_source_data): pop_list = [] p.setup(timestep=1.0, min_delay=1.0, max_delay=3.0) #input poisson layer input_size = w_list[0].shape[0] pop_in = p.Population(input_size, p.SpikeSourceArray, {'spike_times': []}) for j in range(input_size): pop_in[j].spike_times = spike_source_data[j] pop_list.append(pop_in) for w in w_list: pos_w = np.copy(w) pos_w[pos_w < 0] = 0 neg_w = np.copy(w) neg_w[neg_w > 0] = 0 output_size = w.shape[1] pop_out = p.Population(output_size, p.IF_curr_exp, cell_para) p.Projection(pop_in, pop_out, p.AllToAllConnector(weights=pos_w), target='excitatory') p.Projection(pop_in, pop_out, p.AllToAllConnector(weights=neg_w), target='inhibitory') pop_list.append(pop_out) pop_in = pop_out pop_out.record() run_time = np.ceil(np.max(spike_source_data)[0] / 1000.) * 1000 p.run(run_time) spikes = pop_out.getSpikes(compatible_output=True) return spikes
def connect_layers(input_layer, output_layer, weights, i_s, j_s, i_e, j_e, k_out, stdp=False, initial_weight=0, label_dicts=None): """ Connects a neuron of an output layer to the corresponding square of an input layer. This is a helper function of connect_layer_to_layer() Returns: The created projection between the input and output layers """ m = input_layer.shape[1] view_elements = [] i = i_s while i < i_e: j = j_s while j < j_e: view_elements.append(m * i + j) j += 1 i += 1 if stdp: w_max = initial_weight * 15 stdp_shared = sim.native_synapse_type('stdp_synapse')\ (Wmax=w_max * 1000, mu_plus=0.0, mu_minus=1.0) proj = sim.Projection(input_layer.population[view_elements], output_layer.population[[k_out]], sim.AllToAllConnector(), stdp_shared) ol = int(output_layer.population.label) il = input_layer.population.label out_neuron = output_layer.population[k_out] if label_dicts == None: for i in range(len(view_elements)): label = '{}_{}_{}'.format(ol, il, i) in_neuron = input_layer.population[view_elements[i]] conn = nest.GetConnections(source=[in_neuron], target=[out_neuron]) nest.SetStatus(conn, {'label': label, 'weight': weights[i][0]}) else: for i in range(len(view_elements)): label = '{}_{}_{}'.format(ol, il, i) if not label in label_dicts[ol]: label_dicts[ol][label] = ([], []) in_neuron = input_layer.population[view_elements[i]] label_dicts[ol][label][0].append(in_neuron) label_dicts[ol][label][1].append(out_neuron) else: proj = sim.Projection(input_layer.population[view_elements], output_layer.population[[k_out]], sim.AllToAllConnector(), sim.StaticSynapse(weight=weights)) return proj
def create_C2_layers(S2_layers: Dict[float, Sequence[Layer]], s2_prototype_cells: int) -> List[sim.Population]: """ Creates the populations of the C2 layer, one for each S2 prototype cell, containing only a single cell which max-pools the spikes of all layers of a prototype. Parameters: `S2_layers`: A dictionary containing for each scale a list of S2 layers, one for each prototype cell `s2_prototype_cells`: The number of S2 prototype cells Returns: A list of populations of size one, one population for each prototype cell """ no_inh_w = 17.15 # synapse weight without S2 inhibitions with_inh_w = 4 * no_inh_w # synapse weight with S2 inhibitions C2_populations = [sim.Population(1, sim.IF_curr_exp(), label=str(prot))\ for prot in range(s2_prototype_cells)] total_connections = sum( map(lambda ll: ll[0].shape[0] * ll[0].shape[1], S2_layers.values())) for s2ll in S2_layers.values(): for prot in range(s2_prototype_cells): sim.Projection( s2ll[prot].population, C2_populations[prot], sim.AllToAllConnector(), sim.StaticSynapse(weight=with_inh_w / total_connections)) return C2_populations
def test_set_array(self): weight = 0.123 prj = sim.Projection(self.p1, self.p2, sim.AllToAllConnector()) weight_array = np.ones(prj.shape) * weight prj.set(weight=weight_array) self.assertTrue((weight_array == prj.get("weight", format="array")).all())
def test_nest_projection_gaussian(): p1 = pynn.Population(2, pynn.IF_cond_exp()) p2 = pynn.Population(2, pynn.IF_cond_exp()) c = pynn.Projection(p1, p2, pynn.AllToAllConnector(allow_self_connections=False)) c.set(weight=pynn.random.RandomDistribution('normal', mu=0.5, sigma=0.1)) weights = c.get('weight', format='array') assert len(weights[weights == 0]) < 1
def connect(self): n_cells = self.parameters.config.n_cells n_segments = self.parameters.config.n_segments # connect populations pynn.Projection(self.distal_input, self.distal, pynn.OneToOneConnector(weights=0.025)) for i in range(self.parameters.config.n_columns): # get "compartments" for all cells in this column inhibitions = self.inhibitory[i * n_cells:(i + 1) * n_cells] somas = self.soma[i * n_cells:(i + 1) * n_cells] proximal_input = pynn.PopulationView(self.proximal_input, [i]) # set up connections with columnar symmetry pynn.Projection(inhibitions, somas, pynn.DistanceDependentProbabilityConnector( 'd>=1', weights=0.2), target='SYN_2') # 4 pynn.Projection(proximal_input, somas, pynn.AllToAllConnector(weights=0.08), target='SYN_1') # 1 pynn.Projection(proximal_input, inhibitions, pynn.AllToAllConnector(weights=0.042)) # 2 for j in range(self.parameters.config.n_cells): # get "compartments" for this specific cell segments = self.distal[i * n_cells * n_segments + j * n_segments:i * n_cells * n_segments + (j + 1) * n_segments] inhibition = pynn.PopulationView(self.inhibitory, [i * n_cells + j]) soma = pynn.PopulationView(self.soma, [i * n_cells + j]) # set up connections with cellular symmetry pynn.Projection(segments, inhibition, pynn.AllToAllConnector(weights=0.15), target='inhibitory') # 3 pynn.Projection(segments, soma, pynn.AllToAllConnector(weights=0.15), target='SYN_3') # 3
def setUp(self): sim.setup() self.p1 = sim.Population(7, sim.IF_cond_exp()) self.p2 = sim.Population(4, sim.IF_cond_exp()) self.p3 = sim.Population(5, sim.IF_curr_alpha()) self.syn_rnd = sim.StaticSynapse(weight=0.123, delay=0.5) self.syn_a2a = sim.StaticSynapse(weight=0.456, delay=0.4) self.random_connect = sim.FixedNumberPostConnector(n=2) self.all2all = sim.AllToAllConnector()
def connect(self): """Setup connections between populations""" params = self.parameters.projections # generate weights with normal distributed jitter and set up stimulus w = params.stimulus.weight + np.random.normal( 0, params.stimulus.jitter, len(self.columns)) stimulus_connector = pynn.OneToOneConnector(weights=w) pynn.Projection(self.stimulus, self.columns, stimulus_connector) # projection to accumulate/count the number of active columns accumulation_connector = pynn.AllToAllConnector( weights=params.accumulation.weight) pynn.Projection(self.columns, self.kill_switch, accumulation_connector) # projection to inhibit all columns inhibition_connector = pynn.AllToAllConnector( weights=params.inhibition.weight) pynn.Projection(self.kill_switch, self.columns, inhibition_connector, target='inhibitory') # forward inhibition forward_inhibition_connector = pynn.FixedProbabilityConnector( params.forward_inhibition.probability, weights=params.forward_inhibition.weight) pynn.Projection(self.stimulus, self.columns, forward_inhibition_connector, target='inhibitory') # calculate connectivity matrix n_columns = self.parameters.populations.columns.size n_inputs = self.parameters.config.input_size self.connections = (np.random.uniform(0, 1, n_columns * n_inputs) > 0.60).reshape(len(self.columns), n_inputs).astype(np.int64) self.permanences = np.random.normal(.3, .05, n_columns * n_inputs).reshape( len(self.columns), n_inputs)
def test_ticket240(): nest = pyNN.nest nest.setup(threads=4) parameters = {'Tau_m': 17.0} p1 = nest.Population(4, nest.IF_curr_exp()) p2 = nest.Population(5, nest.native_cell_type("ht_neuron")(**parameters)) conn = nest.AllToAllConnector() syn = nest.StaticSynapse(weight=1.0) prj = nest.Projection(p1, p2, conn, syn, receptor_type='AMPA') # This should be a nonstandard receptor type but I don't know of one to use. connections = prj.get(('weight',), format='list') assert len(connections) > 0
def test_single_presynaptic_and_single_postsynaptic_neuron(self): prj = sim.Projection(self.p4, self.p4, sim.AllToAllConnector(), synapse_type=sim.StaticSynapse(weight=0.123)) assert prj.shape == (1, 1) weight = 0.456 prj.set(weight=weight) self.assertEqual(prj.get("weight", format="array")[0][0], weight) weight_array = numpy.ones(prj.shape) * weight prj.set(weight=weight_array) self.assertTrue((weight_array == prj.get("weight", format="array")).all())
def create_brain(): """ Initializes PyNN with the minimal neuronal network """ sim.setup(timestep=0.1, min_delay=0.1, max_delay=20.0, threads=1, rng_seeds=[1234]) # Following parameters were taken from the husky braitenberg brain experiment (braitenberg.py) SENSORPARAMS = { 'cm': 0.025, 'v_rest': -60.5, 'tau_m': 10., 'e_rev_E': 0.0, 'e_rev_I': -75.0, 'v_reset': -60.5, 'v_thresh': -60.0, 'tau_refrac': 10.0, 'tau_syn_E': 2.5, 'tau_syn_I': 2.5 } SYNAPSE_PARAMS = { "weight": 0.5e-4, "delay": 20.0, 'U': 1.0, 'tau_rec': 1.0, 'tau_facil': 1.0 } cell_class = sim.IF_cond_alpha(**SENSORPARAMS) # Define the network structure: 2 neurons (1 sensor and 1 actors) population = sim.Population(size=2, cellclass=cell_class) synapse_type = sim.TsodyksMarkramSynapse(**SYNAPSE_PARAMS) connector = sim.AllToAllConnector() # Connect neurons sim.Projection(presynaptic_population=population[0:1], postsynaptic_population=population[1:2], connector=connector, synapse_type=synapse_type, receptor_type='excitatory') sim.initialize(population, v=population.get('v_rest')) return population
def presentStimuli(pres_duration, num_pres_per_stim, num_source, num_target, bright_on_weights, bright_off_weights, bright_lat_weights, dark_on_weights, dark_off_weights, dark_lat_weights, is_repeated=False): """ For presenting a stimulus to the target network. Callback is used to switch between presentation rates. Arguments: num_source num_target num_pres_per_stim, pres_duration """ num_stim = 2 # two stimuli 'bright' and 'dark' total_duration = num_stim * num_pres_per_stim * pres_duration source_on_pop = pynn.Population(num_source, pynn.SpikeSourcePoisson(), label='source_on_pop') source_off_pop = pynn.Population(num_source, pynn.SpikeSourcePoisson(), label='source_off_pop') is_bright, random_on_rates, random_off_rates = getPresentationRatesForCallback(num_stim, num_source, num_pres_per_stim, is_repeated=is_repeated) bright_target_pop = pynn.Population(num_target, pynn.IF_cond_exp, {'i_offset':0.11, 'tau_refrac':3.0, 'v_thresh':-51.0}, label='target_pop') dark_target_pop = pynn.Population(num_target, pynn.IF_cond_exp, {'i_offset':0.11, 'tau_refrac':3.0, 'v_thresh':-51.0}, label='target_pop') bright_on_conn = pynn.Projection(source_on_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_on_weights), receptor_type='excitatory') bright_off_conn = pynn.Projection(source_off_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_off_weights), receptor_type='excitatory') bright_lat_conn = pynn.Projection(bright_target_pop, bright_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=bright_lat_weights), receptor_type='inhibitory') dark_on_conn = pynn.Projection(source_on_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_on_weights), receptor_type='excitatory') dark_off_conn = pynn.Projection(source_off_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_off_weights), receptor_type='excitatory') dark_lat_conn = pynn.Projection(dark_target_pop, dark_target_pop, connector=pynn.AllToAllConnector(), synapse_type=pynn.StaticSynapse(weight=dark_lat_weights), receptor_type='inhibitory') source_on_pop.record('spikes') source_off_pop.record('spikes') bright_target_pop.record(['spikes']) dark_target_pop.record(['spikes']) pynn.run(total_duration, callbacks=[PoissonWeightVariation(source_on_pop, random_on_rates, pres_duration), PoissonWeightVariation(source_off_pop, random_off_rates, pres_duration)]) pynn.end() source_on_spikes = source_on_pop.get_data('spikes').segments[0].spiketrains source_off_spikes = source_off_pop.get_data('spikes').segments[0].spiketrains bright_spikes = bright_target_pop.get_data('spikes').segments[0].spiketrains dark_spikes = dark_target_pop.get_data('spikes').segments[0].spiketrains return is_bright, source_on_spikes, source_off_spikes, bright_spikes, dark_spikes
def getConnectorType(conn_type, ff_prob=None, lat_prob=None): """ For getting the feed-forward and lateral connection types. Arguments: conn_type, str, choices = ['all_to_all', 'fixed_prob', 'one_of_each'] ff_prob, float, probability of connection for feed-forward lat_prob, float, probability of connection for lateral Returns: ff_conn, lat_conn """ if conn_type == 'all_to_all': ff_conn = pynn.AllToAllConnector() lat_conn = pynn.AllToAllConnector(allow_self_connections=False) elif conn_type == 'fixed_prob': if (ff_prob == None) or (lat_prob == None): print(dt.datetime.now().isoformat() + ' ERROR: ' + 'One of the connections probabilities is "None".') sys.exit(2) else: ff_conn = pynn.FixedProbabilityConnector( ff_prob, rng=pynn.random.NumpyRNG(seed=1798)) lat_conn = pynn.FixedProbabilityConnector( lat_prob, allow_self_connections=False, rng=pynn.random.NumpyRNG(seed=1916)) elif conn_type == 'one_of_each': if ff_prob == None: print(dt.datetime.now().isoformat() + ' ERROR: ' + 'Feed forwards connections probabilities is "None".') sys.exit(2) else: ff_conn = pynn.FixedProbabilityConnector( ff_prob, rng=pynn.random.NumpyRNG(seed=1798)) lat_conn = pynn.AllToAllConnector(allow_self_connections=False) else: print(dt.datetime.now().isoformat() + ' ERROR: ' + 'Unrecognised connection type.') sys.exit(2) return ff_conn, lat_conn
def connect_layers(firstLayer, secondLayer): # === Parameters for STDP mechanism === # === Timing dependence === # Time constant of the positive part of the STDP curve, in milliseconds. tau_plus = 20.0 # Time constant of the negative part of the STDP curve, in milliseconds. tau_minus = 20.0 # Amplitude of the positive part of the STDP curve. A_plus = 0.006 # Amplitude of the negative part of the STDP curve. A_minus = 0.0055 # === Weight dependence === # Minimum synaptic weight w_min = 0.0 # Maximum synaptic weight w_max = 0.1 # Default weight w_default = 0.0 # === Delay === delay = 0.1 # Synapses to use later for testing global synapses # The amplitude of the weight change is independent of the current weight. # If the new weight would be less than w_min it is set to w_min. If it would be greater than w_max it is set to w_max. stdp_synapse = sim.STDPMechanism( timing_dependence=sim.SpikePairRule(tau_plus=tau_plus, tau_minus=tau_minus, A_plus=A_plus, A_minus=A_minus), weight_dependence=sim.AdditiveWeightDependence(w_min=w_min, w_max=w_max), dendritic_delay_fraction=1.0, weight=w_default, delay=delay) # Save synapses onto a global variable synapses = sim.Projection(inputLayer, outputLayer, sim.AllToAllConnector(), synapse_type=stdp_synapse)
def test_record_native_model(): if not have_nest: raise SkipTest nest = pyNN.nest from pyNN.random import RandomDistribution init_logging(logfile=None, debug=True) nest.setup() parameters = {'tau_m': 17.0} n_cells = 10 p1 = nest.Population(n_cells, nest.native_cell_type("ht_neuron")(**parameters)) p1.initialize(V_m=-70.0, Theta=-50.0) p1.set(theta_eq=-51.5) #assert_arrays_equal(p1.get('theta_eq'), -51.5*numpy.ones((10,))) assert_equal(p1.get('theta_eq'), -51.5) print(p1.get('tau_m')) p1.set(tau_m=RandomDistribution('uniform', low=15.0, high=20.0)) print(p1.get('tau_m')) current_source = nest.StepCurrentSource( times=[50.0, 110.0, 150.0, 210.0], amplitudes=[0.01, 0.02, -0.02, 0.01]) p1.inject(current_source) p2 = nest.Population( 1, nest.native_cell_type("poisson_generator")(rate=200.0)) print("Setting up recording") p2.record('spikes') p1.record('V_m') connector = nest.AllToAllConnector() syn = nest.StaticSynapse(weight=0.001) prj_ampa = nest.Projection(p2, p1, connector, syn, receptor_type='AMPA') tstop = 250.0 nest.run(tstop) vm = p1.get_data().segments[0].analogsignals[0] n_points = int(tstop / nest.get_time_step()) + 1 assert_equal(vm.shape, (n_points, n_cells)) assert vm.max() > 0.0 # should have some spikes
def recognizer_weights_from(feature_np_array): """ Builds a network from the firing rates of the given feature_np_array for the input neurons and learns the weights to recognize the image through STDP. """ in_p = create_spike_source_layer_from(feature_np_array).population out_p = sim.Population(1, sim.IF_curr_exp(i_offset=5)) synapse = sim.STDPMechanism( weight=-0.2, timing_dependence=sim.SpikePairRule(tau_plus=20.0, tau_minus=20.0, A_plus=0.01, A_minus=0.005), weight_dependence=sim.AdditiveWeightDependence(w_min=0, w_max=0.4)) proj = sim.Projection(in_p, out_p, sim.AllToAllConnector(), synapse) sim.run(500) return proj.get('weight', 'array')
def test_record_native_model(): nest = pyNN.nest from pyNN.random import RandomDistribution from pyNN.utility import init_logging init_logging(logfile=None, debug=True) nest.setup() parameters = {'Tau_m': 17.0} n_cells = 10 p1 = nest.Population(n_cells, nest.native_cell_type("ht_neuron"), parameters) p1.initialize('V_m', -70.0) p1.initialize('Theta', -50.0) p1.set('Theta_eq', -51.5) assert_equal(p1.get('Theta_eq'), [-51.5] * 10) print p1.get('Tau_m') p1.rset('Tau_m', RandomDistribution('uniform', [15.0, 20.0])) print p1.get('Tau_m') current_source = nest.StepCurrentSource({ 'times': [50.0, 110.0, 150.0, 210.0], 'amplitudes': [0.01, 0.02, -0.02, 0.01] }) p1.inject(current_source) p2 = nest.Population(1, nest.native_cell_type("poisson_generator"), {'rate': 200.0}) print "Setting up recording" p2.record() p1._record('V_m') connector = nest.AllToAllConnector(weights=0.001) prj_ampa = nest.Projection(p2, p1, connector, target='AMPA') tstop = 250.0 nest.run(tstop) n_points = int(tstop / nest.get_time_step()) + 1 assert_equal(p1.recorders['V_m'].get().shape, (n_points * n_cells, 3)) id, t, v = p1.recorders['V_m'].get().T assert v.max() > 0.0 # should have some spikes
def test_native_stdp_model(): nest = pyNN.nest from pyNN.utility import init_logging init_logging(logfile=None, debug=True) nest.setup() p1 = nest.Population(10, nest.IF_cond_exp()) p2 = nest.Population(10, nest.SpikeSourcePoisson()) stdp_params = {'Wmax': 50.0, 'lambda': 0.015, 'weight': 0.001} stdp = nest.native_synapse_type("stdp_synapse")(**stdp_params) connector = nest.AllToAllConnector() prj = nest.Projection(p2, p1, connector, receptor_type='excitatory', synapse_type=stdp)
def test_native_stdp_model(): nest = pyNN.nest from pyNN.utility import init_logging init_logging(logfile=None, debug=True) nest.setup() p1 = nest.Population(10, nest.IF_cond_exp) p2 = nest.Population(10, nest.SpikeSourcePoisson) stdp_params = {'Wmax': 50.0, 'lambda': 0.015} stdp = nest.NativeSynapseDynamics("stdp_synapse", stdp_params) connector = nest.AllToAllConnector(weights=0.001) prj = nest.Projection(p2, p1, connector, target='excitatory', synapse_dynamics=stdp)
training_spiketrains = [[s for s in st] for st in training_pair[1]] C2_populations, compound_C2_population =\ create_C2_populations(training_spiketrains) out_p = sim.Population(1, sim.IF_curr_exp(tau_refrac=.1)) stdp_weight = 7 / s2_prototype_cells stdp = sim.STDPMechanism( weight=stdp_weight, timing_dependence=sim.SpikePairRule(tau_plus=20.0, tau_minus=26.0, A_plus=stdp_weight / 5, A_minus=stdp_weight / 4.48), weight_dependence=sim.AdditiveWeightDependence(w_min=0.0, w_max=15.8 * stdp_weight)) learn_proj = sim.Projection(compound_C2_population, out_p, sim.AllToAllConnector(), stdp) epoch = training_pair[0] print('Simulating for epoch', epoch) # Record the spikes for visualization purposes and to count the number of # fired spikes # compound_C2_population.record('spikes') out_p.record(['spikes', 'v']) # Let the simulation run to "fill" the layer pipeline with spikes sim.run(40) # Datastructure for storing the computed STDP weights for this epoch classifier_weights = []
sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_4 = sim.Projection(spike_source_4, cann_pop[4:5], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) spike_2_conn_for_9 = sim.Projection(spike_source_9, cann_pop[9:10], sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=0.1)) cann_2_cann = sim.Projection( cann_pop, cann_pop, sim.FromListConnector(cann_connector, column_names=["weight"]), sim.StaticSynapse(weight=0.0001, delay=75)) cann_2_inh = sim.Projection(cann_pop, inhib_pop, sim.AllToAllConnector(), sim.StaticSynapse(weight=0.02, delay=0.1), receptor_type="excitatory") inh_2_cann = sim.Projection(inhib_pop, cann_pop, sim.AllToAllConnector(), sim.StaticSynapse(weight=0.2, delay=0.1), receptor_type="inhibitory") #spike_source.record('spikes') cann_pop.record(('v', 'spikes')) inhib_pop.record(('v', 'spikes')) sim.run(5000.0)
/ (2 * np.power(sig, 2.))))), 2) cann_connector.append((i, j, weights[i][j]))#, delay_cann2cann)) print("Weight matrix:\n", weights) spike_times = [1000., 2000.] cann_pop = sim.Population(n, sim.IF_cond_alpha(**cell_params), label = "cann_pop") inhib_pop = sim.Population(1, sim.IF_cond_alpha(**cell_params), label = "inhib_pop") spike_source = sim.Population(1, sim.SpikeSourceArray( spike_times = spike_times)) spike_2_conn = sim.Projection(spike_source, cann_pop[spiky:spiky+1], sim.AllToAllConnector(), sim.StaticSynapse(weight = 0.002, delay = 0.1)) cann_2_cann = sim.Projection(cann_pop, cann_pop, sim.FromListConnector(cann_connector, column_names=["weight"]), sim.StaticSynapse(weight = 0.0001, delay = 75)) cann_2_inh = sim.Projection(cann_pop, inhib_pop, sim.AllToAllConnector(), sim.StaticSynapse(weight = 0.02, delay = 0.1), receptor_type = "excitatory") inh_2_cann = sim.Projection(inhib_pop, cann_pop, sim.AllToAllConnector(), sim.StaticSynapse(weight = 0.2, delay = 0.1), receptor_type = "inhibitory") spike_source.record('spikes') cann_pop.record(('v','spikes'))
###################### ###### Synapses ####### stat_syn_res = p.StaticSynapse(weight=5.0, delay=1) stat_syn_input = p.StaticSynapse(weight=50.0, delay=1) stat_syn_rout = p.StaticSynapse(weight=0.0, delay=1) ###################### ###### Connections ####### res_conn = p.FixedProbabilityConnector(param.res_pconn, rng=param.rng) inp_conn = p.AllToAllConnector() rout_conn = p.AllToAllConnector() connections = {} connections['r2r'] = p.Projection(reservoir, reservoir, res_conn, synapse_type=stat_syn_res, receptor_type='excitatory') connections['inp2r'] = p.Projection(input_neurons, reservoir, inp_conn, synapse_type=stat_syn_input, receptor_type='excitatory')
'start': 0.0, 'duration': tsim } #poissonI_params = {'rate': rateI, 'start': 0.0, 'duration': tsim} poissonE = sim.Population((1, ), cellclass=sim.SpikeSourcePoisson, cellparams=poissonE_params, label='poissonE') poissonI = sim.Population((1, ), cellclass=sim.SpikeSourcePoisson, cellparams=poissonI_params, label='poissonI') myconn = sim.AllToAllConnector(weights=globalWeight, delays=dt) ### Connections ### prjE_E = sim.Projection(poissonE, popE, method=myconn, target='excitatory') prjI_E = sim.Projection(poissonI, popE, method=myconn, target='inhibitory') prjE_I = sim.Projection(poissonE, popI, method=myconn, target='excitatory') prjI_I = sim.Projection(poissonI, popI, method=myconn, target='inhibitory') ## Record the spikes ## popE.record(to_file=False) popE.record_v(to_file=False) popI.record(to_file=False) popE.record_gsyn(to_file=False)
) ''' weight_distr = p.RandomDistribution(distribution='normal',parameters=[1,0.1]) ''' if run_i == 0: stdp_weight = 0.0 directory = 'weight%d' % par_start if not os.path.exists(directory): os.makedirs(directory) else: stdp_weight = np.load('%s/weight_%d.npy' % (directory, (run_i - 1))) proj_stdp = p.Projection( #pop_input, pop_output, p.AllToAllConnector(weights = weight_distr), pop_input, pop_output, p.AllToAllConnector(weights=stdp_weight), synapse_dynamics=p.SynapseDynamics(slow=stdp_model)) for i in range(num_output): conn_list = list() for j in range(num_output): if i != j: conn_list.append((i, j, -18.1, 1.0)) p.Projection(pop_output, pop_output, p.FromListConnector(conn_list), target='inhibitory') for i in range(num_train * num_epo): p.Projection(TeachingPoission[i], pop_output,
def create_S2_layers(C1_layers: Dict[float, Sequence[Layer]], feature_size, s2_prototype_cells, refrac_s2=.1, stdp=True, inhibition=True)\ -> Dict[float, List[Layer]]: """ Creates all prototype S2 layers for all sizes. Parameters: `layers_dict`: A dictionary containing for each size a list of C1 layers, for each feature one `feature_size`: `s2_prototype_cells`: `refrac_s2`: `stdp`: Returns: A dictionary containing for each size a list of different S2 layers, for each prototype one. """ f_s = feature_size initial_weight = 25 / (f_s * f_s) weight_rng = rnd.RandomDistribution('normal', mu=initial_weight, sigma=initial_weight / 20) i_offset_rng = rnd.RandomDistribution('normal', mu=.5, sigma=.45) weights = list( map(lambda x: weight_rng.next() * 1000, range(4 * f_s * f_s))) S2_layers = {} i_offsets = list( map(lambda x: i_offset_rng.next(), range(s2_prototype_cells))) ndicts = list(map(lambda x: {}, range(s2_prototype_cells))) ondicts = list(map(lambda x: {}, range(s2_prototype_cells))) omdicts = list(map(lambda x: {}, range(s2_prototype_cells))) for size, layers in C1_layers.items(): n, m = how_many_squares_in_shape(layers[0].shape, (f_s, f_s), f_s) if stdp: l_i_offsets = [list(map(lambda x: rnd.RandomDistribution('normal', mu=i_offsets[i], sigma=.25).next(), range(n * m)))\ for i in range(s2_prototype_cells)] else: l_i_offsets = np.zeros((s2_prototype_cells, n * m)) print('S2 Shape', n, m) layer_list = list( map( lambda i: Layer( sim.Population(n * m, sim.IF_curr_exp(i_offset=l_i_offsets[i], tau_refrac=refrac_s2), structure=space.Grid2D(aspect_ratio=m / n), label=str(i)), (n, m)), range(s2_prototype_cells))) for S2_layer in layer_list: for C1_layer in layers: S2_layer.projections[C1_layer.population.label] =\ connect_layer_to_layer(C1_layer, S2_layer, (f_s, f_s), f_s, [[w] for w in weights[:f_s * f_s]], stdp=stdp, initial_weight=initial_weight, ndicts=ndicts, ondicts=ondicts, omdicts=omdicts) S2_layers[size] = layer_list # Set the labels of the shared connections if stdp: t = time.clock() print('Set shared labels') for s2_label_dicts in [ndicts, ondicts, omdicts]: for i in range(s2_prototype_cells): w_iter = weights.__iter__() for label, (source, target) in s2_label_dicts[i].items(): conns = nest.GetConnections(source=source, target=target) nest.SetStatus(conns, { 'label': label, 'weight': w_iter.__next__() }) print('Setting labels took', time.clock() - t) if inhibition: # Create inhibitory connections between the S2 cells # First between the neurons of the same layer... inh_weight = -10 inh_delay = .1 print('Create S2 self inhibitory connections') for layer_list in S2_layers.values(): for layer in layer_list: sim.Projection( layer.population, layer.population, sim.AllToAllConnector(allow_self_connections=False), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) # ...and between the layers print('Create S2 cross-scale inhibitory connections') for i in range(s2_prototype_cells): for layer_list1 in S2_layers.values(): for layer_list2 in S2_layers.values(): if layer_list1[i] != layer_list2[i]: sim.Projection( layer_list1[i].population, layer_list2[i].population, sim.AllToAllConnector(), sim.StaticSynapse(weight=inh_weight, delay=inh_delay)) if stdp: # Create the inhibition between different prototype layers print('Create S2 cross-prototype inhibitory connections') for layer_list in S2_layers.values(): for layer1 in layer_list: for layer2 in layer_list: if layer1 != layer2: sim.Projection( layer1.population, layer2.population, sim.OneToOneConnector(), sim.StaticSynapse(weight=inh_weight - 1, delay=inh_delay)) return S2_layers
'cm': 0.5 } sim.setup() neuron1 = sim.Population(n, sim.IF_cond_alpha(**cell_params), label="neuron1") neuron2 = sim.Population(n, sim.IF_cond_alpha(**cell_params), label="neuron2") generate_spike_times = [ 0., 1020., 1040., 1060., 1080., 1100., 1120., 1140., 1160., 2000. ] spike_source = sim.Population( n, sim.SpikeSourceArray(spike_times=generate_spike_times)) conn = sim.Projection(spike_source, neuron1, sim.AllToAllConnector(), sim.StaticSynapse(weight=0.002, delay=1.)) n2n_conn = sim.Projection(neuron1, neuron2, sim.OneToOneConnector(), sim.StaticSynapse(weight=0.002, delay=1.)) spike_source.record('spikes') neuron1.record(('v', 'spikes')) neuron2.record(('v', 'spikes')) sim.run(5000.0) #print neuron1.get_spike_counts() from pyNN.utility.plotting import Figure, Panel data1 = neuron1.get_data().segments[0] data2 = neuron1.get_data().segments[0]
#seeds = numpy.arange(numberOfNodes) + int((time.time()*100)%2**32) # seeds which are same every time, different for each node seeds = numpy.arange(numberOfNodes) # bcast, as we can't be sure each node has the same time, and therefore # different seeds. This way, all nodes get the list from rank=0. seeds = MPI.COMM_WORLD.bcast(seeds) #rng = NumpyRNG(seed=seeds[rank], parallel_safe=False, rank=rank, # num_processes=numberOfNodes) nest.SetKernelStatus({'rng_seeds': list(seeds)}) ## Connections ## #myConnectorE = sim.AllToAllConnector(weights=globalWeight, delays=0.1) myConnectorI = sim.AllToAllConnector(weights=globalWeight, delays=0.1) # Connectors which make the specified number of connections from pre to post # the inh_gamma_generator sends a independent realization to each post connection # So this is "as if" there where "num connection" independent inh_gamma_generators # impinging on the target myConnectorE_E = sim.FixedNumberPreConnector(int(connectionsE_E) - NumOfConE_E, weights=globalWeight, delays=0.1) myConnectorE_I = sim.FixedNumberPreConnector(int(connectionsE_I) - NumOfConE_I, weights=globalWeight, delays=0.1) # a sub-set of the inh_gamma_generaters are silenced after a time "tinit" myConnectorE_E_silenced = sim.FixedNumberPreConnector(NumOfConE_E, weights=globalWeight, delays=0.1) myConnectorE_I_silenced = sim.FixedNumberPreConnector(NumOfConE_I, weights=globalWeight, delays=0.1) #myConnectorI = sim.AllToAllConnector(weights=globalWeight, delays=0.1) # InhGamma Generators need "_S" (selective) type synapses # Passing this class to the Projection in a ComposedSynapseType object
sim.setup(timestep=0.1) # --- Create populations of neurons ------------ populations = {'exc': [], 'inh': []} for syn_type in ('exc', 'inh'): populations[syn_type] = [sim.Population(population_size, sim.IF_cond_exp, neuron_parameters) for i in range(n_populations)] # --- Connect the populations in a chain ------- connector_exc_exc = sim.AllToAllConnector(weights=weight_exc_exc, delays=delay) connector_exc_inh = sim.AllToAllConnector(weights=weight_exc_inh, delays=delay) connector_inh_exc = sim.AllToAllConnector(weights=weight_inh_exc, delays=delay) for i in range(n_populations): j = (i + 1) % n_populations prj_exc_exc = sim.Projection(populations['exc'][i], populations['exc'][j], connector_exc_exc, target='excitatory') prj_exc_inh = sim.Projection(populations['exc'][i], populations['inh'][j], connector_exc_inh, target='excitatory') prj_inh_exc = sim.Projection(populations['inh'][i], populations['exc'][i], connector_exc_exc, target='inhibitory') # --- Create and connect stimulus -------------- numpy.random.seed(rng_seed)
# %%CUBA I_vec, u_vec, s_vec = LIF_CUBA(spikes=spikes_e) # %% PyNN CUBA # Setup sim.setup(timestep=0.1, min_delay=0.1, max_delay=10.0) IF_sim = sim.Population(1, sim.IF_curr_exp(), label="IF_curr_exp") IF_sim.record('v') spike_times = np.arange(50,100,10) spike_input = sim.Population(1,sim.SpikeSourceArray(spike_times=spike_times),label="Input spikes") # Connections w = 1 connections = sim.Projection(spike_input, IF_sim, connector=sim.AllToAllConnector(), synapse_type=sim.StaticSynapse(weight=w,delay=0.1), receptor_type="excitatory") # Running simulation in MS IF_sim.record('v') sim.run(100.0) # Data v_data = IF_sim.get_data() data = IF_sim.get_data().segments[0] v_cuba = data.filter(name="v")[0] # Plotting plt.plot(v_cuba)