def test_local_vps(self): num_procs = nest.NumProcesses() n_vp = 3 * num_procs nest.SetKernelStatus({'total_num_virtual_procs': n_vp}) local_vps = list(nest.GetLocalVPs()) # Use thread-vp mapping of neurons to check mapping in kernel nrns = nest.GetLocalNodeCollection( nest.Create('iaf_psc_delta', 2 * n_vp)) for n in nrns: thrd = n.get('thread') vp = n.get('vp') assert vp == local_vps[thrd]
def test_sigmoid_growth_curve(self): beta_ca = 0.0001 tau_ca = 10000.0 growth_rate = 0.0001 eps = 0.10 psi = 0.10 local_nodes = nest.GetLocalNodeCollection(self.pop) local_nodes.set({ 'beta_Ca': beta_ca, 'tau_Ca': tau_ca, 'synaptic_elements': { 'se': { 'growth_curve': 'sigmoid', 'growth_rate': growth_rate, 'eps': eps, 'psi': 0.1, 'z': 0.0 } } }) self.se_integrator.append( SigmoidNumericSEI(tau_ca=tau_ca, beta_ca=beta_ca, eps=eps, psi=psi, growth_rate=growth_rate)) self.simulate() # check that we got the same values from one run to another # expected = self.se_nest[:, 30] # print(self.se_nest[:, 30].__repr__()) expected = numpy.array([ 0.07798757689720627, 0.07796809230928879, 0.07796745199672085, 0.07807166878406996, 0.07794925570454732, 0.0780381869323308, 0.0780054060483019, 0.0779518888224286, 0.07792681014092591, 0.07798540508673037 ]) local_pop_as_list = list(local_nodes) for count, n in enumerate(self.pop): loc = self.se_nest[local_pop_as_list.index(n), 30] ex = expected[count] testing.assert_allclose(loc, ex, rtol=self.rtol)
def test_consistent_local_vps(): """ Test local_vps field of kernel status. This test ensures that the PyNEST-generated local_vps information agrees with the thread-VP mappings in the kernel. """ n_vp = 3 * nest.num_processes nest.total_num_virtual_procs = n_vp local_vps = list(nest.GetLocalVPs()) # Use thread-vp mapping of neurons to check mapping in kernel nrns = nest.GetLocalNodeCollection(nest.Create('iaf_psc_delta', 2 * n_vp)) vp_direct = list(nrns.vp) vp_indirect = [local_vps[t] for t in nrns.thread] assert vp_direct == vp_indirect
def test_sigmoid_growth_curve(self): beta_ca = 0.0001 tau_ca = 10000.0 growth_rate = 0.0001 eps = 0.10 psi = 0.10 local_nodes = nest.GetLocalNodeCollection(self.pop) local_nodes.set({ 'beta_Ca': beta_ca, 'tau_Ca': tau_ca, 'synaptic_elements': { 'se': { 'growth_curve': 'sigmoid', 'growth_rate': growth_rate, 'eps': eps, 'psi': 0.1, 'z': 0.0 } } }) self.se_integrator.append( SigmoidNumericSEI(tau_ca=tau_ca, beta_ca=beta_ca, eps=eps, psi=psi, growth_rate=growth_rate)) self.simulate() # check that we got the same values from one run to another # expected = self.se_nest[:, 30] # print(self.se_nest[:, 30].__repr__()) expected = numpy.array([ 0.07801164, 0.07796841, 0.07807825, 0.07797382, 0.07802574, 0.07805961, 0.07808139, 0.07794451, 0.07799474, 0.07794458 ]) local_pop_as_list = list(local_nodes) for count, n in enumerate(self.pop): loc = self.se_nest[local_pop_as_list.index(n), 30] ex = expected[count] testing.assert_almost_equal(loc, ex, decimal=5)
def build_network(logger): """Builds the network including setting of simulation and neuron parameters, creation of neurons and connections Requires an instance of Logger as argument """ tic = time.time() # start timer on construction # unpack a few variables for convenience NE = brunel_params['NE'] NI = brunel_params['NI'] model_params = brunel_params['model_params'] stdp_params = brunel_params['stdp_params'] # set global kernel parameters nest.SetKernelStatus({ 'total_num_virtual_procs': params['nvp'], 'resolution': params['dt'], 'overwrite_files': True}) nest.SetDefaults('iaf_psc_alpha', model_params) nest.message(M_INFO, 'build_network', 'Creating excitatory population.') E_neurons = nest.Create('iaf_psc_alpha', NE) nest.message(M_INFO, 'build_network', 'Creating inhibitory population.') I_neurons = nest.Create('iaf_psc_alpha', NI) if brunel_params['randomize_Vm']: nest.message(M_INFO, 'build_network', 'Randomzing membrane potentials.') random_vm = nest.random.normal(brunel_params['mean_potential'], brunel_params['sigma_potential']) nest.GetLocalNodeCollection(E_neurons).V_m = random_vm nest.GetLocalNodeCollection(I_neurons).V_m = random_vm # number of incoming excitatory connections CE = int(1. * NE / params['scale']) # number of incomining inhibitory connections CI = int(1. * NI / params['scale']) nest.message(M_INFO, 'build_network', 'Creating excitatory stimulus generator.') # Convert synapse weight from mV to pA conversion_factor = convert_synapse_weight( model_params['tau_m'], model_params['tau_syn_ex'], model_params['C_m']) JE_pA = conversion_factor * brunel_params['JE'] nu_thresh = model_params['V_th'] / ( CE * model_params['tau_m'] / model_params['C_m'] * JE_pA * np.exp(1.) * tau_syn) nu_ext = nu_thresh * brunel_params['eta'] E_stimulus = nest.Create('poisson_generator', 1, { 'rate': nu_ext * CE * 1000.}) nest.message(M_INFO, 'build_network', 'Creating excitatory spike recorder.') if params['record_spikes']: recorder_label = os.path.join( brunel_params['filestem'], 'alpha_' + str(stdp_params['alpha']) + '_spikes') E_recorder = nest.Create('spike_recorder', params={ 'record_to': 'ascii', 'label': recorder_label }) BuildNodeTime = time.time() - tic logger.log(str(BuildNodeTime) + ' # build_time_nodes') logger.log(str(memory_thisjob()) + ' # virt_mem_after_nodes') tic = time.time() nest.SetDefaults('static_synapse_hpc', {'delay': brunel_params['delay']}) nest.CopyModel('static_synapse_hpc', 'syn_std') nest.CopyModel('static_synapse_hpc', 'syn_ex', {'weight': JE_pA}) nest.CopyModel('static_synapse_hpc', 'syn_in', {'weight': brunel_params['g'] * JE_pA}) stdp_params['weight'] = JE_pA nest.SetDefaults('stdp_pl_synapse_hom_hpc', stdp_params) nest.message(M_INFO, 'build_network', 'Connecting stimulus generators.') # Connect Poisson generator to neuron nest.Connect(E_stimulus, E_neurons, {'rule': 'all_to_all'}, {'synapse_model': 'syn_ex'}) nest.Connect(E_stimulus, I_neurons, {'rule': 'all_to_all'}, {'synapse_model': 'syn_ex'}) nest.message(M_INFO, 'build_network', 'Connecting excitatory -> excitatory population.') nest.Connect(E_neurons, E_neurons, {'rule': 'fixed_indegree', 'indegree': CE, 'allow_autapses': False, 'allow_multapses': True}, {'synapse_model': 'stdp_pl_synapse_hom_hpc'}) nest.message(M_INFO, 'build_network', 'Connecting inhibitory -> excitatory population.') nest.Connect(I_neurons, E_neurons, {'rule': 'fixed_indegree', 'indegree': CI, 'allow_autapses': False, 'allow_multapses': True}, {'synapse_model': 'syn_in'}) nest.message(M_INFO, 'build_network', 'Connecting excitatory -> inhibitory population.') nest.Connect(E_neurons, I_neurons, {'rule': 'fixed_indegree', 'indegree': CE, 'allow_autapses': False, 'allow_multapses': True}, {'synapse_model': 'syn_ex'}) nest.message(M_INFO, 'build_network', 'Connecting inhibitory -> inhibitory population.') nest.Connect(I_neurons, I_neurons, {'rule': 'fixed_indegree', 'indegree': CI, 'allow_autapses': False, 'allow_multapses': True}, {'synapse_model': 'syn_in'}) if params['record_spikes']: if params['nvp'] != 1: local_neurons = nest.GetLocalNodeCollection(E_neurons) # GetLocalNodeCollection returns a stepped composite NodeCollection, which # cannot be sliced. In order to allow slicing it later on, we're creating a # new regular NodeCollection from the plain node IDs. local_neurons = nest.NodeCollection(local_neurons.tolist()) else: local_neurons = E_neurons if len(local_neurons) < brunel_params['Nrec']: nest.message( M_ERROR, 'build_network', """Spikes can only be recorded from local neurons, but the number of local neurons is smaller than the number of neurons spikes should be recorded from. Aborting the simulation!""") exit(1) nest.message(M_INFO, 'build_network', 'Connecting spike recorders.') nest.Connect(local_neurons[:brunel_params['Nrec']], E_recorder, 'all_to_all', 'static_synapse_hpc') # read out time used for building BuildEdgeTime = time.time() - tic logger.log(str(BuildEdgeTime) + ' # build_edge_time') logger.log(str(memory_thisjob()) + ' # virt_mem_after_edges') return E_recorder if params['record_spikes'] else None