tp.PlotTargets(tp.FindCenterElement(layer), layer, fig=fig, tgt_color='red') pylab.savefig(file_name) #pylab.show() tp.DumpLayerConnections(layer, conn_dict['synapse_model'], file_name + ".dump") positions = tp.GetTargetPositions(layer, layer) l2d_specs = {"rows": 25, "columns": 25, "elements": 'iaf_neuron'} # A simple 2D grid layer layer_2d_grid = tp.CreateLayer(l2d_specs) positions_3d = [[ np.random.uniform(-0.5, 0.5), np.random.uniform(-0.5, 0.5), np.random.uniform(-0.5, 0.5) ] for a in xrange(200)] positions_2d = [[np.random.uniform(-0.5, 0.5), np.random.uniform(-0.5, 0.5)] for b in xrange(2000)] l3d_specs = {"elements": 'iaf_neuron', "positions": positions_3d} l2d1_specs = {"elements": 'iaf_neuron', "positions": positions_2d} layer_3d = tp.CreateLayer(l3d_specs)
# populations layerProps = { 'rows': Params['N'], 'columns': Params['N'], 'extent': [Params['visSize'], Params['visSize']], 'edge_wrap': True } # This dictionary does not yet specify the elements to put into the # layer, since they will differ from layer to layer. We will add them # below by updating the ``'elements'`` dictionary entry for each # population. # Retina # ------ layerProps.update({'elements': 'RetinaNode'}) retina = topo.CreateLayer(layerProps) # Original: Gabor retina input if Params['lambda_dg'] >= 0: # Now set phases of retinal oscillators; we use a list comprehension instead # of a loop. [ nest.SetStatus( [n], { "phase": phaseInit( topo.GetPosition([n])[0], Params["lambda_dg"], Params["phi_dg"]) }) for n in nest.GetLeaves(retina)[0] ] else:
ylim=ylim, xticks=xticks, yticks=yticks, xlabel='', ylabel='') fig.gca().grid(False) # ----------------------------------------------- # Simple connection #{ conn1 #} l = tp.CreateLayer({ 'rows': 11, 'columns': 11, 'extent': [11., 11.], 'elements': 'iaf_psc_alpha' }) conndict = { 'connection_type': 'divergent', 'mask': { 'rectangular': { 'lower_left': [-2., -1.], 'upper_right': [2., 1.] } } } tp.ConnectLayers(l, l, conndict) #{ end #} fig = plt.figure()
def test_RotatedBoxMaskByPolarAngle(self): """Test rotated box mask with polar angle.""" pos = [[x * 1., y * 1., z * 1.] for x in range(-2, 3) for y in range(-2, 3) for z in range(-2, 3)] layer = topo.CreateLayer({'positions': pos, 'extent': [5., 5., 5.], 'elements': 'iaf_psc_alpha'}) # First test without rotation maskdict = {'lower_left': [-0.5, -1.0, -1.0], 'upper_right': [0.5, 1.0, 1.0]} mask = topo.CreateMask('box', maskdict) cntr = [0., 0., 0.] gid_list = topo.SelectNodesByMask(layer, cntr, mask) sorted_gid_list = sorted(gid_list) self.assertEqual(sorted_gid_list, [58, 59, 60, 63, 64, 65, 68, 69, 70]) # Test with a polar angle of 90 degrees. maskdict = {'lower_left': [-0.5, -1.0, -1.0], 'upper_right': [0.5, 1.0, 1.0], 'polar_angle': 90.} mask = topo.CreateMask('box', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) sorted_gid_list = sorted(gid_list) self.assertEqual(sorted_gid_list, [34, 39, 44, 59, 64, 69, 84, 89, 94]) # Test with a polar angle of 180 degrees, should be the same as the # one without a polar angle. maskdict = {'lower_left': [-0.5, -1.0, -1.0], 'upper_right': [0.5, 1.0, 1.0], 'polar_angle': 180.} mask = topo.CreateMask('box', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) sorted_gid_list = sorted(gid_list) self.assertEqual(sorted_gid_list, [58, 59, 60, 63, 64, 65, 68, 69, 70]) # Test with a polar angle of 45 degrees. maskdict = {'lower_left': [-0.5, -1.5, -1.5], 'upper_right': [0.5, 1.5, 1.5], 'polar_angle': 45.} mask = topo.CreateMask('box', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) sorted_gid_list = sorted(gid_list) self.assertEqual(sorted_gid_list, [33, 38, 43, 59, 64, 69, 85, 90, 95]) # Test with a polar angle of 135 degrees. The GIDs should be # perpendicular to the ones obtained by a polar angle of 45 degrees. maskdict = {'lower_left': [-0.5, -1.5, -1.5], 'upper_right': [0.5, 1.5, 1.5], 'polar_angle': 135.} mask = topo.CreateMask('box', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) sorted_gid_list = sorted(gid_list) self.assertEqual(sorted_gid_list, [35, 40, 45, 59, 64, 69, 83, 88, 93]) # Test two symmetric masks in x and z direction. One with no polar # angle and one with a polar angle of 90 degrees. As the masks are # symmetrical in x and z, a polar angle of 90 degrees should give the # same GIDs as the one without a polar angle. maskdict = {'lower_left': [-1., -0.5, -1.], 'upper_right': [1., 0.5, 1.]} mask = topo.CreateMask('box', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) sorted_gid_list_1 = sorted(gid_list) self.assertEqual(sorted_gid_list_1, [38, 39, 40, 63, 64, 65, 88, 89, 90]) maskdict = {'lower_left': [-1., -0.5, -1.], 'upper_right': [1., 0.5, 1.], 'polar_angle': 90.} mask = topo.CreateMask('box', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) sorted_gid_list = sorted(gid_list) self.assertEqual(sorted_gid_list, [38, 39, 40, 63, 64, 65, 88, 89, 90]) self.assertEqual(sorted_gid_list_1, sorted_gid_list)
def test_RotatedRectangularMask(self): """Test rotated rectangular mask. We have: lower_left: [-1., -0.5] upper_right: [ 1., 0.5] So, if we have: layer: 2 7 12 17 22 3 8 13 18 23 4 9 14 19 24 5 10 15 20 25 6 11 16 21 26 and have azimuth_angle = 0, we should get gids 9, 14, 19 if we select GIDs by mask. If we have azimuth_angle = 90, we should get gids 13, 14, 15. """ # Test 2D layer layer = topo.CreateLayer({'rows': 5, 'columns': 5, 'extent': [5., 5.], 'elements': 'iaf_psc_alpha'}) # First test without rotation. maskdict = {'lower_left': [-1., -0.5], 'upper_right': [1., 0.5]} mask = topo.CreateMask('rectangular', maskdict) cntr = [0., 0.] gid_list = topo.SelectNodesByMask(layer, cntr, mask) self.assertEqual(gid_list, (9, 14, 19,)) # Test if we get correct GIDs when rotating 90 degrees. maskdict = {'lower_left': [-1., -0.5], 'upper_right': [1., 0.5], 'azimuth_angle': 90.0} mask = topo.CreateMask('rectangular', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) self.assertEqual(gid_list, (13, 14, 15,)) # Test rotation with an azimuth angle of 45 degrees. maskdict = {'lower_left': [-1.5, -0.5], 'upper_right': [1.5, 0.5], 'azimuth_angle': 45.0} mask = topo.CreateMask('rectangular', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) self.assertEqual(gid_list, (10, 14, 18,)) # Test rotation with an azimuth angle of 135 degrees. maskdict = {'lower_left': [-1.5, -0.5], 'upper_right': [1.5, 0.5], 'azimuth_angle': 135.0} mask = topo.CreateMask('rectangular', maskdict) gid_list = topo.SelectNodesByMask(layer, cntr, mask) self.assertEqual(gid_list, (8, 14, 20,)) # Test that an error is raised if we send in a polar angle to a 2D # mask. maskdict = {'lower_left': [-1.5, -0.5], 'upper_right': [1.5, 0.5], 'polar_angle': 45.0} with self.assertRaises(nest.NESTError): mask = topo.CreateMask('rectangular', maskdict)
Create layer of 4x3 iaf_neurons, visualize BCCN Tutorial @ CNS*09 Hans Ekkehard Plesser, UMB ''' import nest import pylab import nest.topology as topo pylab.ion() nest.ResetKernel() l1 = topo.CreateLayer({ 'columns': 4, 'rows': 3, 'extent': [2.0, 1.5], 'elements': 'iaf_neuron' }) nest.PrintNetwork() nest.PrintNetwork(2) nest.PrintNetwork(2, l1) topo.PlotLayer(l1, nodesize=50) # beautify pylab.axis([-1.0, 1.0, -0.75, 0.75]) pylab.axes().set_aspect('equal', 'box') pylab.axes().set_xticks((-0.75, -0.25, 0.25, 0.75)) pylab.axes().set_yticks((-0.5, 0, 0.5)) pylab.grid(True)
def run(self, do_only_setup=False): """Run a simulation in nest :param do_only_setup: only do network setup, do not simulation. This can be useul for debugging or to run theory predictions :return: """ if self.params["gen"] == None: self.params["gen"] = gen_params_baserate print "Using network: %s (E: %s, I: %s)" % ( self.handler.network, self.handler.network.e.neuron_model.name, self.handler.network.i.neuron_model.name) self.reset() nest.ResetKernel() n_e = int(self.handler.p_e[npr.C_E]) n_i = int(self.handler.p_i[npr.C_I]) n_threads = self.cores nest.SetKernelStatus({ 'print_time': True, 'local_num_threads': n_threads }) N_vp = nest.GetKernelStatus(['total_num_virtual_procs'])[0] # create models # excitatory neurons neuron_params = { 'V_reset': self.handler.p_e[npr.VRES], # hardcoded in the model 'V_th': -50mV, 't_ref': self.handler.p_e[npr.TAU_RP], 'E_L': self.handler.p_e[npr.VL], 'g_L': self.handler.p_e[npr.GM], 'C_m': self.handler.p_e[npr.CM], 'AMPA_g_peak': self.handler.p_e[npr.G_AMPA], 'AMPA_Tau_1': self.handler.p_e[npr.TAU_AMPA], 'GABA_g_peak': self.handler.p_e[npr.G_GABA], 'GABA_Tau_1': self.handler.p_e[npr.TAU_GABA], # WE RESCALE THE NMDA CONDUCTANCE BY THE CONNECTIVITY, multiplies everything and leaves weights as they are 'NMDA_g_peak': self.handler.p_e[npr.G_NMDA] / self.params["gen"][mpr.P_EE], 'NMDA_Tau_1': self.handler.p_e[npr.TAU_NMDA] } nest.CopyModel(self.handler.network.e.neuron_model.name, 'ht_neuron_ex', params=neuron_params) # inhibitory neurons neuron_params = { 'V_reset': self.handler.p_i[npr.VRES], # hardcoded in the model 'V_th': -50mV, 't_ref': self.handler.p_i[npr.TAU_RP], 'E_L': self.handler.p_i[npr.VL], 'g_L': self.handler.p_i[npr.GM], 'C_m': self.handler.p_i[npr.CM], 'AMPA_g_peak': self.handler.p_i[npr.G_AMPA], 'AMPA_Tau_1': self.handler.p_i[npr.TAU_AMPA], 'GABA_Tau_1': self.handler.p_i[npr.TAU_GABA], 'GABA_g_peak': self.handler.p_i[npr.G_GABA], 'NMDA_g_peak': self.handler.p_i[npr.G_NMDA], 'NMDA_Tau_1': self.handler.p_i[npr.TAU_NMDA], } nest.CopyModel(self.handler.network.i.neuron_model.name, 'ht_neuron_in', params=neuron_params) # get receptor ID information, so we can connect to the # different synapses ex_receptors = nest.GetDefaults('ht_neuron_ex')['receptor_types'] in_receptors = nest.GetDefaults('ht_neuron_in')['receptor_types'] # CREATE NEURON POPULATIONS ----------------------- # ex_cells = tp.CreateLayer({ 'rows': 1, 'columns': n_e, 'elements': 'ht_neuron_ex', 'extent': [2. * np.pi, 1.], 'edge_wrap': True }) in_cells = tp.CreateLayer({ 'rows': 1, 'columns': n_i, 'elements': 'ht_neuron_in', 'extent': [2. * np.pi, 1.], 'edge_wrap': True }) nu_ext_e = self.handler.p_e[npr.NU_EXT] * 1e3 nu_ext_i = self.handler.p_i[npr.NU_EXT] * 1e3 # create Poisson generators ex_noise = nest.Create( 'poisson_generator', 1, params={'rate': nu_ext_e * self.handler.p_e[npr.C_EXT]}) in_noise = nest.Create( 'poisson_generator', 1, params={'rate': nu_ext_i * self.handler.p_i[npr.C_EXT]}) nest.OneToOneConnect(ex_noise * n_e, nest.GetNodes(ex_cells)[0], params={ 'weight': 1., 'receptor_type': ex_receptors['AMPA'] }, model='static_synapse') nest.OneToOneConnect(in_noise * n_i, nest.GetNodes(in_cells)[0], params={ 'weight': 1., 'receptor_type': in_receptors['AMPA'] }, model='static_synapse') # signal initiators if self.params["gen"]["sig_len"] > 0: start = self.params["gen"]["sig_start"] stop = self.params["gen"]["sig_start"] + self.params["gen"][ "sig_len"] print "Creating poisson signal from t=%i to t=%i" % ( self.params["gen"]["sig_start"], self.params["gen"]["sig_start"] + self.params["gen"]["sig_len"]) sig_len = int(self.params["gen"]["sig_width"] * n_e) sig_range = get_cue_neurons(self.params["gen"]["sig_width"], self.params["gen"]["sig_center"], n_e) print "Signal (width %g) will be sent to %i neurons centered at %g" % ( self.params["gen"]["sig_width"], len(sig_range), self.params["gen"]["sig_center"]) ex_neurons = nest.GetNodes(ex_cells)[0] # WATCH OUT, setting range to length. sometimes due to rounding this # was not equal sig_range = sig_range[:sig_len] if self.params["gen"]["sig_fade"]: print "Using fading poisson signal." ex_signal = nest.Create('poisson_generator', 1, params={ 'rate': self.params["gen"]["sig_rate"], 'start': start, 'stop': start + self.params["gen"]["sig_len"] / 2. }) nest.OneToOneConnect(ex_signal * sig_len, [ex_neurons[i] for i in sig_range], params={ 'weight': self.params["gen"]["sig_weight"], 'receptor_type': ex_receptors['AMPA'] }, model='static_synapse') ex_signal = nest.Create( 'poisson_generator', 1, params={ 'rate': 0.5 * self.params["gen"]["sig_rate"], 'start': start + self.params["gen"]["sig_len"] / 2., 'stop': stop }) nest.OneToOneConnect(ex_signal * sig_len, [ex_neurons[i] for i in sig_range], params={ 'weight': self.params["gen"]["sig_weight"], 'receptor_type': ex_receptors['AMPA'] }, model='static_synapse') else: print "Using non-fading poisson signal." ex_signal = nest.Create('poisson_generator', 1, params={ 'rate': self.params["gen"]["sig_rate"], 'start': start, 'stop': start + self.params["gen"]["sig_len"] }) nest.OneToOneConnect(ex_signal * sig_len, [ex_neurons[i] for i in sig_range], params={ 'weight': self.params["gen"]["sig_weight"], 'receptor_type': ex_receptors['AMPA'] }, model='static_synapse') # STP CONNECTIONS if self.handler.network.i.nmda_synapse.is_stp: print "Using E->I stp (%s) with params: U=%f tau_r=%f rau_f=%f" % ( tsodyks_model, self.handler.network.i.nmda_synapse.U, self.handler.network.i.nmda_synapse.tau_r, self.handler.network.i.nmda_synapse.tau_f) nest.CopyModel( tsodyks_model, 'NMDA_EI', { 'U': self.handler.network.i.nmda_synapse.U, 'tau_fac': self.handler.network.i.nmda_synapse.tau_f, 'tau_rec': self.handler.network.i.nmda_synapse.tau_r, 'receptor_type': in_receptors['NMDA'] }) # REGULAR CONNECTIONS else: print "Using E->I static synapses" nest.CopyModel('static_synapse', 'NMDA_EI', {'receptor_type': in_receptors['NMDA']}) # E->I all to all connected conndict_EI = { "connection_type": "divergent", "synapse_model": "NMDA_EI", "mask": { "grid": { "rows": 1, "columns": n_i } }, "weights": 1., "kernel": 1., "allow_autapses": False } tp.ConnectLayers(ex_cells, in_cells, conndict_EI) # I->I all to all connected nest.CopyModel('static_synapse', 'GABA_II', {'receptor_type': in_receptors['GABA']}) conndict_II = { "connection_type": "divergent", "synapse_model": "GABA_II", "mask": { "grid": { "rows": 1, "columns": n_i } }, "weights": 1., "kernel": 1., "allow_autapses": False } tp.ConnectLayers(in_cells, in_cells, conndict_II) # I->E all to all connected nest.CopyModel('static_synapse', 'GABA_IE', {'receptor_type': ex_receptors['GABA']}) conndict_IE = { "connection_type": "divergent", "synapse_model": "GABA_IE", "mask": { "grid": { "rows": 1, "columns": n_e } }, "weights": 1., "kernel": 1., "allow_autapses": False } tp.ConnectLayers(in_cells, ex_cells, conndict_IE) nest.SetKernelStatus({ 'rng_seeds': range(self.params["gen"]["base_seed"] + N_vp + 1, self.params["gen"]["base_seed"] + 2 * N_vp + 1) }) nest.SetKernelStatus( {'grng_seed': self.params["gen"]["base_seed"] + N_vp}) print "Seed for connectivity (should stay the same): ", nest.GetKernelStatus( )["rng_seeds"] # noise on membrane parameters if self.params["gen"][mpr.EL_NOISE] > 0. or not self.params["gen"][ mpr.EL_NOISE_ARRAY] == None: print "Using nonzero leak noise:" if not self.params["gen"][mpr.EL_NOISE_ARRAY] == None: leak_variance = self.params["gen"][mpr.EL_NOISE_ARRAY] print "Setting variable leak currents with given noise array, mean: %.2f, std: %.2f" % ( np.mean(leak_variance), np.std(leak_variance)) assert len(leak_variance) == n_e, "noise array has wrong size" else: np.random.seed(self.params["gen"]["base_seed"] + N_vp) self.params["gen"][ mpr.EL_SEED] = self.params["gen"]["base_seed"] + N_vp leak_variance = np.random.normal( self.handler.p_e[npr.VL], self.params["gen"][mpr.EL_NOISE], n_e) self.params["gen"][mpr.EL_NOISE_ARRAY] = leak_variance print "Setting variable leak currents: mean %.1f, std %.1f (seed=%i)" % ( np.mean(leak_variance), np.std(leak_variance), self.params["gen"][mpr.EL_SEED]) print leak_variance[:10], "..." nest.SetStatus(nest.GetNodes(ex_cells)[0], "E_L", leak_variance) # EE CONNECTIONS ################################## # TO SAVE CONNECTIONS WE DO AMPA DIST DEPENDENT CONNS WITH THE SAME SEED if self.params["gen"]["return_w_mat"]: print "Using fake synapses for weight matrix output" nest.CopyModel('static_synapse', 'NMDA_EE', {'receptor_type': ex_receptors['AMPA']}) else: # STP CONNECTIONS if self.handler.network.e.nmda_synapse.is_stp: print "Using E->E stp (%s) with params: U=%f tau_r=%f rau_f=%f" % ( tsodyks_model, self.handler.network.e.nmda_synapse.U, self.handler.network.e.nmda_synapse.tau_r, self.handler.network.e.nmda_synapse.tau_f) nest.CopyModel( tsodyks_model, 'NMDA_EE', { 'U': self.handler.network.e.nmda_synapse.U, 'tau_fac': self.handler.network.e.nmda_synapse.tau_f, 'tau_rec': self.handler.network.e.nmda_synapse.tau_r, 'receptor_type': ex_receptors['NMDA'] }) # REGULAR CONNECTIONS else: print "Using static synapses" nest.CopyModel('static_synapse', 'NMDA_EE', {'receptor_type': ex_receptors['NMDA']}) if self.params["gen"][mpr.P_EE] < 1.: print "Using connecticity p_ee: %f" % self.params["gen"][mpr.P_EE] if self.params["gen"][mpr.W_NOISE] > 0.: print "Using w noise with level (multiplied after by w1:%f): %f" % ( self.params["gen"][mpr.W_NOISE], self.handler.p_e[npr.W_1]) print "Using w parameters - w_j: %.2f --> w_0: %.2f, w_1: %.2f, w_sigma: %.2f" % ( self.handler.p_mf[mpr.W_J], self.handler.p_e[npr.W_0], self.handler.p_e[npr.W_1], self.handler.p_e[npr.W_SIGMA]) conndict_EE = { "connection_type": "divergent", "synapse_model": "NMDA_EE", "mask": { "grid": { "rows": 1, "columns": n_e } }, "weights": { "gaussian_noisy": { "c": self.handler.p_e[npr.W_0], "p_center": self.handler.p_e[npr.W_1], "sigma": self.handler.p_e[npr.W_SIGMA], "sigma_noise": self.params["gen"][mpr.W_NOISE] * self.handler.p_e[npr.W_1] } }, "kernel": self.params["gen"][mpr.P_EE], "allow_autapses": False } if self.params["gen"][mpr.P_EE] < 1.: conndict_EE["kernel"] = self.params["gen"][mpr.P_EE] nest.SetKernelStatus({ 'rng_seeds': range(self.params["gen"]["base_seed"] + N_vp + 1, self.params["gen"]["base_seed"] + 2 * N_vp + 1) }) nest.SetKernelStatus( {'grng_seed': self.params["gen"]["base_seed"] + N_vp}) tp.ConnectLayers(ex_cells, ex_cells, conndict_EE) if self.params["gen"]["return_w_mat"]: w_mat = np.zeros((n_e, n_e)) cids = nest.GetNodes(ex_cells)[0] conns = nest.FindConnections(cids, synapse_type="NMDA_EE") stats = nest.GetStatus(conns) id_min = min(cids) id_max = max(cids) assert id_max - id_min == n_e - 1, "something is wrong with weight estimation" for stat in stats: pre = stat['source'] - id_min post = stat['target'] - id_min weight = stat['weight'] w_mat[post, pre] = weight return w_mat if self.params["gen"]["show_weights"]: target = [] weight = [] a = nest.GetNodes(ex_cells)[0][0] b = nest.FindConnections([a]) for con in b: aha = nest.GetStatus([con])[0] if aha['target'] < n_e: target.append(aha['target']) weight.append(aha['weight']) print "Total weight is: %f" % (np.sum(weight) / float(n_e)) pl.figure() pl.scatter((np.array(target) / float(n_e) * 360.), weight) tmp = np.zeros(n_e) for i in range(n_e): tmp[i] = self.handler.p_e[npr.W_0] + self.handler.p_e[ npr.W_1] * np.exp(-((i - n_e - 2) / (1. * n_e / (2 * np.pi)))**2 / 2. / (self.handler.p_e[npr.W_SIGMA])**2) pl.plot((np.arange(n_e) / float(n_e) * 360.), tmp, 'r') pl.show() print "Targets (should stay constant)" print target if self.params["gen"]["base_seed_run"] is not None: print "Using preset seed for run: %i" % self.params["gen"][ "base_seed_run"] msd = self.params["gen"]["base_seed_run"] else: print "Using random seed for run: seeding by time." np.random.seed(int(time.time())) msd = np.random.randint(100000000000) self.params["gen"]["base_seed_run"] = msd nest.SetKernelStatus( {'rng_seeds': range(msd + N_vp + 1, msd + 2 * N_vp + 1)}) nest.SetKernelStatus({'grng_seed': msd + N_vp}) # READOUTS ----------------------- # # spike detectors ex_spikes = nest.Create("spike_detector") nest.SetStatus(ex_spikes, [{ "label": "ex", "withtime": True, "withgid": True }]) nest.ConvergentConnect(nest.GetNodes(ex_cells)[0], ex_spikes, model="static_synapse") in_spikes = nest.Create("spike_detector") nest.SetStatus(in_spikes, [{ "label": "in", "withtime": True, "withgid": True }]) nest.ConvergentConnect(nest.GetNodes(in_cells)[0], in_spikes, model="static_synapse") spikes = nest.Create("spike_detector") nest.SetStatus(spikes, [{"withtime": True, "withgid": True}]) nest.ConvergentConnect(nest.GetNodes(ex_cells)[0], spikes, model="static_synapse") nest.ConvergentConnect(nest.GetNodes(in_cells)[0], spikes, model="static_synapse") # write cell ids to object self.ex_cells = np.array(nest.GetNodes(ex_cells)[0]) self.in_cells = np.array(nest.GetNodes(in_cells)[0]) if do_only_setup: return True # SIMULATE ----------------------- # print "Seeds for run: ", nest.GetKernelStatus()["rng_seeds"] print "Running %.1f ms" % self.params["gen"]["tmax"] starttime = time.time() # COOLDOWN FIRST ########### if self.params["gen"]["do_bistab_pert"]: ex_signal_init = nest.Create('poisson_generator', 1, params={ 'rate': 1500., 'start': 1000., 'stop': 1500. }) sig_range = [] d = 4 for i in range(d): sig_range += get_cue_neurons(.025, i / float(d), n_e) sig_len = len(sig_range) nest.OneToOneConnect(ex_signal_init * sig_len, [ex_neurons[i] for i in sig_range], params={ 'weight': 2., 'receptor_type': ex_receptors['AMPA'] }, model='static_synapse') # FULL SIM ########### nest.Simulate(self.params["gen"]["tmax"]) print "Done (%.2fs): " % ((time.time() - starttime)), # Output --------------------------- # ex_ev = nest.GetStatus(ex_spikes, "events")[0] self.spikes["e"] = process_spikes(ex_ev, self.params["gen"]["tmax"]) in_ev = nest.GetStatus(in_spikes, "events")[0] self.spikes["i"] = process_spikes(in_ev, self.params["gen"]["tmax"]) all_ev = nest.GetStatus(spikes, "events")[0] self.spikes["all"] = process_spikes(all_ev, self.params["gen"]["tmax"]) window = 100. ex_spiketrains = self.spikes["e"]["spike_trains"].spiketrains kernel, norm, m_idx = analysis.make_kernel('exp', window, 1) act_len = len(self.ex_cells) act = np.zeros((act_len, int(self.params["gen"]["tmax"]))) for l, spktr in enumerate(ex_spiketrains): nrnid = np.where(self.ex_cells == spktr)[0] if len(nrnid) == 0: continue taxis, raxis = ex_spiketrains[spktr].instantaneous_rate(1, kernel, norm, m_idx, trim=False) act[nrnid, :] = raxis # end of bump activity only bump_t_end = self.params["gen"]["tmax"] - self.params["gen"][ "bump_shape_enddist"] bump_t_start = self.params["gen"]["sig_start"] + self.params["gen"][ "sig_len"] + 1000. # 1s of buffer between start and avg if bump_t_end <= bump_t_start: bump_t_start_old = bump_t_start bump_t_start = self.params["gen"]["sig_start"] + self.params[ "gen"]["sig_len"] print "Run is not long enough (t_max: %.1f, bump_t_start: %.1f, bump_t_end: %.1f). Setting bump_t_start = %.1f" % ( self.params["gen"]["tmax"], bump_t_start_old, bump_t_end, bump_t_start) dir_vec = np.exp(2. * 1.j * np.pi * np.arange(act_len, dtype=float) / float(act_len)) dirs = (np.angle(np.dot(np.transpose(act), dir_vec))) / np.pi * act_len / 2. % act_len act_bump = act[:, int(bump_t_start):int(bump_t_end)] dirs_bump = dirs[int(bump_t_start):int(bump_t_end)] # rectify bump to center and fit gauss curve shift_n = (len(act_bump) / 2. - dirs_bump).round().astype(int) out_means = [] points = np.arange(0, act_bump.shape[1], 20) for j in points: out_means.append(np.roll(act_bump[:, j], shift_n[j], 0)) out_means = np.array(out_means).T if self.params["gen"]["show_results"] or self.params["gen"][ "show_spikes"]: nest.raster_plot.from_device(ex_spikes, hist=True) pl.title("Exctitatory Population") pl.savefig('lastrun_spikes_e.pdf') nest.raster_plot.from_device(in_spikes, hist=True) pl.title("Inhibitory Population") pl.savefig('lastrun_spikes_i.pdf') return { "shape_mean": np.mean(out_means, 1), "shape_std": np.std(out_means, 1), "shape": out_means, "dirs": dirs, "bump_rate": act, "spikes": self.spikes, "pop_rate": self.get_rates(bump_t_start, bump_t_end) }
import nest import nest.topology as tp import math import pylab l=tp.CreateLayer({'rows':21,'columns':21,'elements':'iaf_neuron'}) conndict={'connection_type':'divergent','mask':{'circular':{'radius':0.4}},'kernel':{'gaussian':{'p_center':1.0,'sigma':0.15}}} tp.ConnectLayers(l,l,conndict) fig=tp.PlotLayer(l,nodesize=80) ctr=tp.FindCenterElement(l) tp.PlotTargets(ctr,l,fig=fig, mask=conndict['mask'],kernel=conndict['kernel'], src_size=250,tgt_color='red',tgt_size=20, kernel_color='green') l1=tp.CreateLayer({'rows':5,'columns':5,'elements':'iaf_neuron'}) l2=tp.CreateLayer({'rows':5,'columns':5,'elements':'iaf_neuron','center':[-1.0,1.0]}) l3=tp.CreateLayer({'rows':5,'columns':5,'elements':'iaf_neuron','center':[1.5,0.5]}) tp.ConnectLayers(l1,l3,mix_par)
"V_th": theta } nest.CopyModel("iaf_psc_alpha", "exc", params=neuron_params) nest.CopyModel("iaf_psc_alpha", "inh", params=neuron_params) """ Part a: Create layers The following code distributes the neurons randomly across the layer. """ ex_pos = [[np.random.uniform(-extentX / 2., extentX / 2.),\ np.random.uniform(-extentY / 2., extentY / 2.)] for j in xrange(ne)] layer_dict.update({"positions": ex_pos, "elements": "exc"}) excNeurons = topo.CreateLayer(layer_dict) in_pos = [[np.random.uniform(-extentX / 2., extentX / 2.),\ np.random.uniform(-extentY / 2., extentY / 2.)] for j in xrange(ni)] layer_dict.update({"positions": in_pos, "elements": "inh"}) inhNeurons = topo.CreateLayer(layer_dict) """ Write your code here -------------------- Create a layer of inhibitory neurons and another of excitatory neurons. Use the funcion topo.CreateLayer() for that. """ """ Part b: Connect the layers """
# for layer connections some terminologies are important should be taken to consider: # Connection dictionary, Source, Target, Connection type, Convergent connection, Divergent connection # Driver, Pool, Displacement, Distance, Mask, Kernel, Autapase and Multapse # Connection type Driver Pool # Convergent Target Layer Source Layer # Divergent Source Layer Target Layer # A mask describes which area of the pool layer shall be searched for nodes to connect for any given node in the driver layer. # Any connection between layers through "ConnectLayers" (Connection dictionary should at least provide the # "connection_type" information l = tp.CreateLayer({ "rows": 11, "columns": 11, "extent": [11.0, 11.0], "elements": "iaf_neuron" }) conn_dic = { "connection_type": "divergent", "mask": { "rectangular": { "lower_left": [-2.0, -1.0], "upper_right": [2.0, 1.0] } } } tp.ConnectLayers(l, l, conn_dic) # the above layer connection model is divergent, so it takes all nodes of Layer 'l' and connects it to all neighbor nodes inside the rectangular # where the selected node is in center. (one by one)
ax.set_aspect('equal', 'box') ax.set_xticks(xticks) ax.set_yticks(yticks) ax.grid(True) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return # -------------------------------------------------- nest.ResetKernel() #{ layer1 #} import nest.topology as tp l = tp.CreateLayer({'rows': 5, 'columns': 5, 'elements': 'iaf_neuron'}) #{ end #} fig = tp.PlotLayer(l, nodesize=50) beautify_layer(l, fig, xlabel='x-axis (columns)', ylabel='y-axis (rows)') ax = fig.gca() tx = [] for r in range(5): tx.append( ax.text(0.65, 0.4 - r * 0.2, str(r), horizontalalignment='center', verticalalignment='center')) tx.append( ax.text(-0.4 + r * 0.2,
element = [sensor_x[i], sensor_y[i]] if i < P.num_rods: rod_pos.append(element) elif P.num_rods <= i < P.num_rods + P.num_S_cones: S_cone_pos.append(element) else: M_cone_pos.append(element) RGC_pos = RGCs(P.num_RGCs) # set up layer---------------------------------------------------------------------------------------------------------- retina_rods = tp.CreateLayer({ 'positions': rod_pos, 'elements': 'iaf_psc_alpha', 'extent': [diam_retina, diam_retina] }) retina_S_cones = tp.CreateLayer({ 'positions': S_cone_pos, 'elements': 'iaf_psc_alpha', 'extent': [diam_retina, diam_retina] }) retina_M_cones = tp.CreateLayer({ 'positions': M_cone_pos, 'elements': 'iaf_psc_alpha', 'extent': [diam_retina, diam_retina] })
positions = topo.GetPosition(nest.GetLeaves(topologyLayer)[0]) def geometry_function(idx): return positions[idx] return geometry_function """ We create two layers that have 20x20 neurons of type `iaf_psc_alpha`. """ pop1 = topo.CreateLayer({ 'elements': 'iaf_psc_alpha', 'rows': 20, 'columns': 20 }) pop2 = topo.CreateLayer({ 'elements': 'iaf_psc_alpha', 'rows': 20, 'columns': 20 }) """ For each layer, we create a CSA-style geometry function and a CSA metric based on them. """ g1 = geometryFunction(pop1) g2 = geometryFunction(pop2) d = csa.euclidMetric2d(g1, g2)
def create_spike_times(Sim, NModel): if Sim.overwrite_files == "no": if Sim.data_folder.is_dir()==True: print("You are not allowed to overwrite the files.") sys.exit("Error message") else: Sim.data_folder.mkdir(parents=True) elif Sim.overwrite_files == "yes": if Sim.data_folder.is_dir()==True: print("You have overwritten old files.") else: Sim.data_folder.mkdir(parents=True) else: print("You have two options for overwrite files") connections_path = Sim.data_folder / "connections.txt" spikes_path = Sim.data_folder / "spikes.mat" spikes_split_path = Sim.data_folder / "spikes_split.mat" multimeter_data_path = Sim.data_folder / "multimeter_data.mat" hyperparameters_path = Sim.data_folder / "hyperparameters.mat" position_path = Sim.data_folder / "position.mat" # Define important simulation parameters nest.ResetKernel() seed=1008.0 nest.SetKernelStatus({"resolution": Sim.resolution, "print_time": True, "overwrite_files":True, "grng_seed": int(seed), "rng_seeds": [int(seed)] }) # Construct the position grid of the neural network(NN) jit = 0.03 if NModel.nr_neurons==100: xs = np.arange(-0.45,.451,0.1) # defines the size of the network elif NModel.nr_neurons==16: xs = np.arange(-0.15,.151,0.1) else: print("Current network can constitute of 16 or 100 neurons.") np.random.seed(int(seed)) pos = [[x,y] for y in xs for x in xs] pos = [[p[0]+np.random.uniform(-jit,jit),p[1]+np.random.uniform(-jit,jit)] for p in pos] # Construct the neurons on the grid and establish connections between them. # The probabilty of connection varies with the distance between the neurons # Define synapse connections nest.SetDefaults("tsodyks_synapse",{"delay": NModel.t_delay, #1.5 in Stetter's code "tau_rec": NModel.tau_rec, "tau_fac":0.0, "U": NModel.U }) conn1 = { "connection_type":"divergent", "mask": {"circular":{"radius":0.75}}, "kernel": {"gaussian":{"p_center":1.,"sigma":0.15}}, #0.15 for 100 neurons "allow_autapses":False, "synapse_model":"tsodyks_synapse", "weights": NModel.alpha_int } # specify the neural model neuron_param= { #"I_e" : 0.0, "C_m" : 1.0, "tau_m" : NModel.tau_m, "t_ref" : NModel.tau_s, #refactory periods in ms 2.0 is default "E_L" : 0.0, "V_th" : NModel.V_thres, "V_m" : 0.0, "V_reset" : 0.0 } nest.SetDefaults("iaf_psc_alpha", neuron_param) layer_dict_ex = {"positions": pos, "extent" : [1.1,1.1], "elements" : "iaf_psc_alpha"} layer = topp.CreateLayer(layer_dict_ex) topp.ConnectLayers(layer,layer,conn1) # Plot layer topp.PlotLayer(layer) # change the seed for different Poisson spike trains nest.SetKernelStatus({ 'grng_seed': int(seed), 'rng_seeds': [int(seed)] }) # Creation of a poisson generator nest.CopyModel('poisson_generator', 'PG', params={'rate': NModel.poisson_spike_rate}) #1.6 in the paper, I don't know why they changed it in the programm pg = topp.CreateLayer({ 'rows' : 1, 'columns' : 1, 'elements' : 'PG'}) cdict_stim = {'connection_type' : 'divergent', 'weights': NModel.alpha_ext} topp.ConnectLayers(pg,layer,cdict_stim) # create multimeter nrns=nest.GetLeaves(layer,local_only=True)[0] multimeter = nest.Create("multimeter", NModel.nr_neurons) nest.SetStatus(multimeter, {"withtime":True, "record_from":["V_m","I_syn_ex"],"interval":Sim.interval}) #, "input_currents_ex","input_currents_in" nest.Connect(multimeter,nrns,"one_to_one") # Create spike detector sd1 = nest.Create('spike_detector') nest.SetStatus(sd1,{'precise_times':True}) nest.Connect(nrns,sd1) # Simulate nest.Simulate(Sim.sim_length + Sim.interval) # Retrieve the generated data [potential,currents_ex,spikes,time]=get_data(multimeter,sd1,NModel.nr_neurons,Sim.nr_samples) # Save the spikes to a file spikesdict={'N1':spikes} sio.savemat(str(spikes_path),spikesdict) if Sim.P>1: potential_fin = time_series_split(potential,Sim.P)[0] currents_ex_fin = time_series_split(currents_ex,Sim.P)[0] spikesdict_split = spike_times_split(spikes, Sim.sim_length, Sim.P) else: potential_fin=potential currents_ex_fin=currents_ex spikesdict_split = {'N1':spikes} # Pass important hyperparameters to create_fluorescent_data_from_spike_times.py hyperdict = { 'P' : Sim.P, 'length_ts' : Sim.length_ts, 'nr_samples' : Sim.nr_samples, 'nr_neurons' : NModel.nr_neurons, 'interval' : Sim.interval } sio.savemat(str(hyperparameters_path),hyperdict) # save the position sio.savemat(str(position_path), {'position' : pos}) # save the results sio.savemat(str(multimeter_data_path),mdict={ 'potential' : potential_fin, 'input_currents_ex': currents_ex_fin }) sio.savemat(str(spikes_split_path),spikesdict_split) topp.DumpLayerConnections(layer,'tsodyks_synapse',str(connections_path))
import pylab pylab.ion() import nest import nest.topology as topo nest.ResetKernel() nest.set_verbosity('M_WARNING') # create two test layers a = topo.CreateLayer({ 'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': 'iaf_neuron', 'edge_wrap': True }) b = topo.CreateLayer({ 'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': 'iaf_neuron', 'edge_wrap': True }) topo.ConnectLayers( a, b, { 'connection_type': 'convergent', 'mask': {
''' import nest import nest.topology as topo import pylab import random pylab.ion() nest.ResetKernel() nest.CopyModel('iaf_psc_alpha', 'pyr') nest.CopyModel('iaf_psc_alpha', 'in') ctx = topo.CreateLayer({ 'columns': 4, 'rows': 3, 'extent': [2.0, 1.5], 'elements': ['pyr', 'in'] }) nest.PrintNetwork() nest.PrintNetwork(2) nest.PrintNetwork(2, ctx) # ctx_leaves is a work-around until NEST 3.0 is released ctx_leaves = nest.GetLeaves(ctx)[0] # extract position information ppyr = pylab.array( tuple(
def test_GetTargetNodesPositions(self): """Interface check for finding targets.""" ldict = { 'elements': ['iaf_neuron', 'iaf_psc_alpha'], 'rows': 3, 'columns': 3, 'extent': [2., 2.], 'edge_wrap': True } cdict = { 'connection_type': 'divergent', 'mask': { 'grid': { 'rows': 2, 'columns': 2 } } } nest.ResetKernel() l = topo.CreateLayer(ldict) ian = [ gid for gid in nest.GetLeaves(l)[0] if nest.GetStatus([gid], 'model')[0] == 'iaf_neuron' ] ipa = [ gid for gid in nest.GetLeaves(l)[0] if nest.GetStatus([gid], 'model')[0] == 'iaf_psc_alpha' ] # connect ian -> all using static_synapse cdict.update({ 'sources': { 'model': 'iaf_neuron' }, 'synapse_model': 'static_synapse' }) topo.ConnectLayers(l, l, cdict) for k in ['sources', 'synapse_model']: cdict.pop(k) # connect ipa -> ipa using stdp_synapse cdict.update({ 'sources': { 'model': 'iaf_psc_alpha' }, 'targets': { 'model': 'iaf_psc_alpha' }, 'synapse_model': 'stdp_synapse' }) topo.ConnectLayers(l, l, cdict) for k in ['sources', 'targets', 'synapse_model']: cdict.pop(k) t = topo.GetTargetNodes(ian[:1], l) self.assertEqual(len(t), 1) p = topo.GetTargetPositions(ian[:1], l) self.assertEqual(len(p), 1) self.assertTrue(all([len(pp) == 2 for pp in p[0]])) t = topo.GetTargetNodes(ian, l) self.assertEqual(len(t), len(ian)) self.assertTrue(all( [len(g) == 8 for g in t])) # 2x2 mask x 2 neurons / element -> eight targets p = topo.GetTargetPositions(ian, l) self.assertEqual(len(p), len(ian)) t = topo.GetTargetNodes(ian, l, tgt_model='iaf_neuron') self.assertEqual(len(t), len(ian)) self.assertTrue(all([len(g) == 4 for g in t])) # 2x2 mask -> four targets t = topo.GetTargetNodes(ian, l, tgt_model='iaf_psc_alpha') self.assertEqual(len(t), len(ian)) self.assertTrue(all([len(g) == 4 for g in t])) # 2x2 mask -> four targets t = topo.GetTargetNodes(ipa, l) self.assertEqual(len(t), len(ipa)) self.assertTrue(all([len(g) == 4 for g in t])) # 2x2 mask -> four targets t = topo.GetTargetNodes(ipa, l, syn_model='static_synapse') self.assertEqual(len(t), len(ipa)) self.assertTrue(all([len(g) == 0 for g in t])) # no static syns t = topo.GetTargetNodes(ipa, l, syn_model='stdp_synapse') self.assertEqual(len(t), len(ipa)) self.assertTrue(all([len(g) == 4 for g in t])) # 2x2 mask -> four targets
def geometryFunction(topologyLayer): positions = topo.GetPosition(nest.GetLeaves(topologyLayer)[0]) def geometry_function(idx): return positions[idx] return geometry_function """ We create two layers that have 20x20 neurons of type `iaf_neuron`. """ pop1 = topo.CreateLayer({'elements': 'iaf_neuron', 'rows': 20, 'columns': 20}) pop2 = topo.CreateLayer({'elements': 'iaf_neuron', 'rows': 20, 'columns': 20}) """ For each layer, we create a CSA-style geometry function and a CSA metric based on them. """ g1 = geometryFunction(pop1) g2 = geometryFunction(pop2) d = csa.euclidMetric2d(g1, g2) """ The connection set ``cs`` describes a Gaussian connectivity profile with sigma = 0.2 and cutoff at 0.5, and two values (10000.0 and 1.0) used as weight and delay, respectively. """
"positions": poss, "elements": "iaf_psc_alpha", "extent": [1.1, 1.1] } my_layer_dict_on_grid = { "rows": 11, "columns": 11, "extent": [11.0, 11.0], "elements": 'iaf_psc_alpha' } # connectivity specifications with a mask conndict = { 'connection_type': 'divergent', 'mask': { 'rectangular': { 'lower_left': [-2.0, -1.0], 'upper_right': [2.0, 1.0] } } } my_layer = topp.CreateLayer(my_layer_dict_on_grid) topp.PlotLayer(my_layer) topp.ConnectLayers(my_layer, my_layer, conndict) nest.PrintNetwork(depth=1) topp.PlotTargets([5], my_layer) plt.show()
BCCN Tutorial @ CNS*09 Hans Ekkehard Plesser, UMB ''' import nest import nest.topology as topo import pylab pylab.ion() nest.ResetKernel() # create two test layers a = topo.CreateLayer({ 'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': 'iaf_psc_alpha' }) b = topo.CreateLayer({ 'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': 'iaf_psc_alpha' }) conndict = { 'connection_type': 'divergent', 'mask': { 'circular': { 'radius': 0.5 }
Hans Ekkehard Plesser, 2010-11-03 """ import nest import nest.topology as topo import os nest.sli_run('M_ERROR setverbosity') nest.SetKernelStatus({'total_num_virtual_procs': 4}) l1 = topo.CreateLayer({ 'rows': 50, 'columns': 40, 'elements': ['iaf_neuron', 2], 'edge_wrap': True }) l2 = topo.CreateLayer({ 'rows': 50, 'columns': 40, 'elements': ['iaf_neuron', 2], 'edge_wrap': True }) topo.ConnectLayers( l1, l2, { 'connection_type': 'convergent', 'mask': { 'circular': {
def main(args, rt, sim_num, var_id): ########################################### #### PARAMETERS ####################### ########################################### freq_num = args['freq_num'] # number of auditory frequencies amp_factor = args[ 'amp_factor'] # strength of signal coming from generators sim_time = args['sim_time'] # duration of simulation (ms) grid_size = args['grid_size'] # side lengths of topological layers (nm) base_stim_rate = args['base_stim_rate'] # stimulus rate (Hz) tun_rad = args['tun_rad'] # broadness of tuning curve neuron_mod = args['neuron_mod'] stim_layer_param = { 'extent': grid_size, # size of layer (nm^2) 'rows': amp_factor, # strength of signal amplification 'columns': freq_num, # one column per frequency 'elements': 'poisson_generator' } pyr_layer_param = { 'extent': grid_size, # size of layer (nm^2) 'rows': args['pyr_layer_num'], # neurons per frequency 'columns': freq_num, # one column per frequency 'elements': neuron_mod } inh_layer_param = { 'extent': grid_size, # size of layer (nm^2) 'rows': args['inh_layer_num'], # neurons per frequency 'columns': freq_num, # one column per frequency 'elements': neuron_mod } stim_conn_param = { 'connection_type': 'divergent', # connection based on target layer 'mask': { 'circular': { 'radius': grid_size[0] * args['stim_conn_rad'] } }, 'kernel': { 'gaussian': { # connection probability based on distance 'p_center': args['stim_conn_p_center'], 'sigma': args['stim_conn_p_sigma'] } }, 'weights': { 'gaussian': { # weight of connection based on distance 'p_center': args['stim_conn_weight_center'], 'sigma': args['stim_conn_weight_sigma'], 'min': 0.0 } } } pyr_conn_param = { 'connection_type': 'divergent', # connection based on target layer 'mask': { 'circular': { 'radius': grid_size[0] * args['pyr_conn_rad'] } }, 'kernel': { 'gaussian': { # connection probability based on distance 'p_center': args['pyr_conn_p_center'], 'sigma': args['pyr_conn_p_sigma'] } }, 'weights': { 'gaussian': { # weight of connection based on distance 'p_center': args['pyr_conn_weight_center'], 'sigma': args['pyr_conn_weight_sigma'], 'min': 0.0 } } } inh_conn_param = { 'connection_type': 'divergent', # connection based on target layer 'mask': { 'circular': { 'radius': grid_size[0] * args['inh_conn_rad'] } }, 'kernel': { 'gaussian': { # connection probability based on distance 'p_center': args['inh_conn_p_center'], 'sigma': args['inh_conn_p_sigma'] } }, 'weights': { 'gaussian': { # weight of connection based on distance 'p_center': -1 * args['inh_conn_weight_center'], 'sigma': args['inh_conn_weight_sigma'], 'max': 0.0 } } } pypy_conn_param = { 'connection_type': 'divergent', # connection based on target layer 'mask': { 'circular': { 'radius': grid_size[0] * args['pypy_conn_rad'] } }, 'kernel': { 'gaussian': { # connection probability based on distance 'p_center': args['pypy_conn_p_center'], 'sigma': args['pypy_conn_p_sigma'] } }, 'weights': { 'gaussian': { # weight of connection based on distance 'p_center': args['pypy_conn_weight_center'], 'sigma': args['pypy_conn_weight_sigma'], 'min': 0.0 } } } sample_size = args['sample_size'] # number of neurons to randomly sample np.random.seed(args['seed']) # set numpy seed for reproducability nest.ResetKernel() # reset NEST nest.SetKernelStatus({'local_num_threads': 3}) # threading for efficiency ########################################### #### NETWORK SETUP #################### ########################################### # Create layers layers = { 'stim': topp.CreateLayer(stim_layer_param), 'pyr': topp.CreateLayer(pyr_layer_param), 'inh': topp.CreateLayer(inh_layer_param) } # Connect layers topp.ConnectLayers(layers['stim'], layers['pyr'], stim_conn_param) topp.ConnectLayers(layers['pyr'], layers['inh'], pyr_conn_param) topp.ConnectLayers(layers['inh'], layers['pyr'], inh_conn_param) topp.ConnectLayers(layers['pyr'], layers['pyr'], pypy_conn_param) # Connect spike detectors to random recording neurons spk_det = { 'pyr': nest.Create('spike_detector'), 'inh': nest.Create('spike_detector') } rec_neurons = { 'pyr': np.random.choice(nest.GetNodes(layers['pyr'])[0], size=sample_size, replace=False).tolist(), 'inh': np.random.choice(nest.GetNodes(layers['inh'])[0], size=sample_size, replace=False).tolist() } for n in spk_det.keys(): nest.Connect(rec_neurons[n], spk_det[n]) ########################################### #### SIMULATION ####################### ########################################### # Initialize dictionary of firing rates firing_rates = { 'pyr': [[] for i in range(sample_size)], 'inh': [[] for i in range(sample_size)] } for freq in range(freq_num): nest.ResetNetwork() nest.SetKernelStatus({'time': 0.0}) rt.live_update(freq, sim_num, var_id) # Set rate for stim_layer neurons based on frequency of stimulus for row in range(amp_factor): for col in range(max(0, freq - tun_rad), min(freq_num, freq + tun_rad + 1)): rate_fac = max(0.0, (tun_rad - abs(freq - col)) / float(tun_rad)) nest.SetStatus(topp.GetElement(layers['stim'], [col, row]), {'rate': rate_fac * base_stim_rate}) # Simulate and record event data from spike detectors nest.Simulate(sim_time) # Store firing rate data for each set of neurons for n in spk_det.keys(): sender_fires = [0] * sample_size for i in nest.GetStatus(spk_det[n])[0]['events']['senders']: sender_fires[rec_neurons[n].index(i)] += 1 for i in range(sample_size): firing_rates[n][i].append(1000 * sender_fires[i] / sim_time) # Reset rates for stim_layer neurons for row in range(amp_factor): for col in range(max(0, freq - tun_rad), min(freq_num, freq + tun_rad + 1)): nest.SetStatus(topp.GetElement(layers['stim'], [col, row]), {'rate': 0.0}) return firing_rates
def test_ConnectWithRotatedRectangleMask(self): """Test connection with rotated rectangle mask. We have: lower_left = [-1.5, -0.5] upper_right = [ 1.5, 0.5] azimuth_angle = 45 degrees Each source node should then connect to: - The node in the same position in target layer - The node above the node to the right of that position - The node below the node to the left of the position. So, if we have sources: targets: 2 7 12 17 22 28 33 38 43 48 3 8 13 18 23 29 34 39 44 49 4 9 14 19 24 30 35 40 45 50 5 10 15 20 25 31 36 41 46 51 6 11 16 21 26 32 37 42 47 52 some example connections will be: ______ / / 2 -> / 28 / / / /_______ / _______ / 44 / 14 -> / 40 / / 36 / /_______ / """ source = topo.CreateLayer({'rows': 5, 'columns': 5, 'extent': [5., 5.], 'elements': 'iaf_psc_alpha'}) target = topo.CreateLayer({'rows': 5, 'columns': 5, 'extent': [5., 5.], 'elements': 'iaf_psc_alpha'}) conndict = {'connection_type': 'divergent', 'mask': {'rectangular': {'lower_left': [-1.5, -0.5], 'upper_right': [1.5, 0.5], 'azimuth_angle': 45.}}} topo.ConnectLayers(source, target, conndict) ref = [[2, 28], [3, 29], [3, 33], [4, 30], [4, 34], [5, 31], [5, 35], [6, 32], [6, 36], [7, 29], [7, 33], [8, 30], [8, 34], [8, 38], [9, 31], [9, 35], [9, 39], [10, 32], [10, 36], [10, 40], [11, 37], [11, 41], [12, 34], [12, 38], [13, 35], [13, 39], [13, 43], [14, 36], [14, 40], [14, 44], [15, 37], [15, 41], [15, 45], [16, 42], [16, 46], [17, 39], [17, 43], [18, 40], [18, 44], [18, 48], [19, 41], [19, 45], [19, 49], [20, 42], [20, 46], [20, 50], [21, 47], [21, 51], [22, 44], [22, 48], [23, 45], [23, 49], [24, 46], [24, 50], [25, 47], [25, 51], [26, 52]] connections = nest.GetConnections() for conn, conn_ref in zip(connections, ref): conn_list = [conn[0], conn[1]] self.assertEqual(conn_list, conn_ref)
print('Building layers') layerCreateStart = time.time() layerBase = { 'rows': N_rows, 'columns': N_columns, 'extent': [(N_columns * 1.0), (N_rows * 1.0)] } layerBase.update({ 'elements': [ 'exf_iaf_neuron', N_exf, 'exb_iaf_neuron', N_exb, 'inh_iaf_neuron', N_inh ] }) ly = tp.CreateLayer(layerBase) lyNoiseExf = tp.CreateLayer({ 'rows': 1, 'columns': 1, 'elements': ['noise_exf', ExtNeu] }) lyNoiseExb = tp.CreateLayer({ 'rows': 1, 'columns': 1, 'elements': ['noise_exb', ExtNeu] }) lyNoiseInh = tp.CreateLayer({ 'rows': 1, 'columns': 1, 'elements': ['noise_inh', ExtNeu]
Create two layers of 30x30 elements and connect them using a Gaussian probabilistic kernel, visualize. BCCN Tutorial @ CNS*09 Hans Ekkehard Plesser, UMB ''' import pylab import nest import nest.topology as topo pylab.ion() nest.ResetKernel() # create two test layers a = topo.CreateLayer({'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': 'iaf_neuron'}) b = topo.CreateLayer({'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': 'iaf_neuron'}) conndict = {'connection_type': 'divergent', 'mask': {'circular': {'radius': 3.0}}, 'kernel': {'gaussian': {'p_center': 1.0, 'sigma': 0.5}}, 'weights': 1.0, 'delays': 1.0} topo.ConnectLayers(a, b, conndict) # plot targets of neurons in different grid locations # first, clear existing figure, get current figure pylab.clf() fig = pylab.gcf()
def __init__(self): nest.ResetKernel() nest.SetKernelStatus({ "resolution": 0.1, "print_time": True, "overwrite_files": True, "local_num_threads": 8 }) nest.CopyModel('iaf_psc_alpha', 'exci') nest.CopyModel('iaf_psc_alpha', 'inhi') nest.CopyModel('static_synapse', 'exc', {'weight': 5.0}) nest.CopyModel('static_synapse', 'inh', {'weight': -5.0}) self.l = tp.CreateLayer({ 'rows': 90, 'columns': 90, 'elements': ['exci', 5, 'inhi', 5], 'edge_wrap': False }) cdict = { 'connection_type': 'divergent', 'mask': { 'circular': { 'radius': 0.2 } }, 'kernel': { 'gaussian': { 'p_center': 0.8, 'sigma': 0.075 } }, 'delays': { 'linear': { 'c': 2.0, 'a': 0.02 } }, 'sources': { 'model': 'exci' }, 'targets': { 'model': 'inhi' }, 'synapse_model': 'exc' } tp.ConnectLayers(self.l, self.l, cdict) self.rec_ex = tp.CreateLayer({ 'rows': 1, 'columns': 1, 'elements': 'spike_detector' }) cdict_rec_ex = { 'connection_type': 'convergent', 'sources': { 'model': "exci" } } tp.ConnectLayers(self.l, self.rec_ex, cdict_rec_ex) # Background stimulation stim = tp.CreateLayer({ 'rows': 1, 'columns': 1, 'elements': 'poisson_generator' }) stim_i = nest.GetLeaves(stim, local_only=True)[0] nest.SetStatus(stim_i, {'rate': 30000.}) background_stim_dict = { 'connection_type': 'divergent', 'mask': { 'grid': { 'rows': 90, 'columns': 90 } }, 'synapse_model': 'exc' } tp.ConnectLayers(stim, self.l, background_stim_dict) nest.Simulate(2000.) rec_ex_true = nest.GetLeaves(self.rec_ex, local_only=True)[0] self.events_ex = nest.GetStatus(rec_ex_true, "events")[0]
from mpl_toolkits.mplot3d import Axes3D pylab.ion() nest.ResetKernel() # generate list of 1000 (x,y,z) triplets pos = [[ random.uniform(-0.5, 0.5), random.uniform(-0.5, 0.5), random.uniform(-0.5, 0.5) ] for j in range(1000)] l1 = topo.CreateLayer({ 'extent': [1.5, 1.5, 1.5], # must specify 3d extent AND center 'center': [0., 0., 0.], 'positions': pos, 'elements': 'iaf_psc_alpha' }) # visualize # xext, yext = nest.GetStatus(l1, 'topology')[0]['extent'] # xctr, yctr = nest.GetStatus(l1, 'topology')[0]['center'] # l1_children is a work-around until NEST 3.0 is released l1_children = nest.hl_api.GetChildren(l1)[0] # extract position information, transpose to list of x, y and z positions xpos, ypos, zpos = zip(*topo.GetPosition(l1_children)) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(xpos, ypos, zpos, s=15, facecolor='b', edgecolor='none')
# import nest import pylab as pl import numpy as np import nest.topology as tp fig, ax = pl.subplots(1, figsize=(6, 5)) nc, nr = 5, 3 d = 0.1 layer = tp.CreateLayer({"columns": nc, "rows": nr, "elements": "iaf_psc_alpha", "extent": [nc * d, nr * d], "center": [nc* d / 2., 0.]}) # tp.PlotLayer([layer1[0], layer2[0]], nodecolor="k", nodesize=50, fig=fig) tp.PlotLayer(layer, nodecolor="k", nodesize=50, fig=fig) ax.set_xlabel("x") ax.set_ylabel("y") ax.set_aspect('equal', 'box') ax.set_xticks(np.arange(0,0.6,0.1)) fig.savefig("figs/02.png") # pl.show()
import nest import nest.topology as topo import pylab pylab.ion() nest.ResetKernel() nest.set_verbosity('M_WARNING') # create two test layers nest.CopyModel('iaf_neuron', 'pyr') nest.CopyModel('iaf_neuron', 'in') a = topo.CreateLayer({ 'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': ['pyr', 'in'] }) b = topo.CreateLayer({ 'columns': 30, 'rows': 30, 'extent': [3.0, 3.0], 'elements': ['pyr', 'in'] }) topo.ConnectLayers( a, b, { 'connection_type': 'divergent', 'sources': { 'model': 'pyr' },
print("Building network") ''' Configuration of the model `iaf_psc_alpha` and `poisson_generator` using SetDefaults(). This function expects the model to be the inserted as a string and the parameter to be specified in a dictionary. All instances of theses models created after this point will have the properties specified in the dictionary by default. ''' nest.SetDefaults("iaf_psc_alpha", neuron_params) ''' Creation of the topology layers for excitatory and inhibitory neurons. GIDs and neuron positions are written to file. ''' layer_in = tp.CreateLayer(layerdict_in) layer_ex = tp.CreateLayer(layerdict_ex) layer_stim = tp.CreateLayer(layerdict_stim) tp.DumpLayerNodes(layer_ex, os.path.join(spike_output_path, label_positions + '-0.dat')) tp.DumpLayerNodes(layer_in, os.path.join(spike_output_path, label_positions + '-1.dat')) tp.DumpLayerNodes(layer_stim, os.path.join(spike_output_path, label_positions + '-2.dat')) nodes_ex = nest.GetChildren(layer_ex)[0] # nodes of ex/in neurons nodes_in = nest.GetChildren(layer_in)[0] nodes_stim = nest.GetChildren(layer_stim)[0] ''' Distribute initial membrane voltages.