def simulate_example(hz_1=0., hz_2=100., load=True): global I_E global NEURON_MODELS global N_GPE global N_SEL global N_MSN global N_STN global MSN_RATE_BASE global STN_RATE_BASE global SNAME global SPATH global SYNAPSE_MODELS global SEL_ONSET N_EXP = 200 RATE_BASE = 25 # Base rate RATE_SELE_1 = hz_1 RATE_SELE_2 = hz_2 # Selection rate SAVE_AT = SPATH + '/' + NEURON_MODELS[0] + '-example.pkl' SEL_TIME_1 = 500. SEL_TIME_2 = 200. sim_time = SEL_TIME_1 + SEL_TIME_2 + SEL_ONSET + 500. SNAME_NB = hz_1 + hz_2 + 1000 EXPERIMENTS = range(N_EXP) MODEL_LIST = models() my_nest.ResetKernel() my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND) GPE_list = [] # GPE input for each experiment for i_exp in EXPERIMENTS: GPE = MyPoissonInput(n=N_GPE, sd=True) GPE_list.append(GPE) MSN_list = [] # MSN input for each experiment for i_exp in EXPERIMENTS: MSN = MyPoissonInput(n=N_MSN, sd=True) MSN_list.append(MSN) STN_list = [] # MSN input for each experiment for i_exp in EXPERIMENTS: STN = MyPoissonInput(n=N_STN, sd=True) STN_list.append(STN) SNR_list = [] # SNR groups for each synapse for i_syn in SYNAPSE_MODELS_TESTED: I_e = my_nest.GetDefaults(NEURON_MODELS[0])['I_e'] + I_E SNR = MyGroup(NEURON_MODELS[0], n=N_EXP, params={'I_e': I_e}, sd=True, mm=False, mm_dt=.1, record_from=['']) SNR_list.append(SNR) if not load: for i_exp in EXPERIMENTS: GPE = GPE_list[i_exp] MSN = MSN_list[i_exp] STN = STN_list[i_exp] # Set spike times # Base rate MSN for id in MSN[:]: MSN.set_spike_times(id=id, rates=[MSN_RATE_BASE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate for id in GPE[0:N_GPE - N_SEL]: GPE.set_spike_times(id=id, rates=[RATE_BASE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate STN for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_RATE_BASE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Selection for id in GPE[N_GPE - N_SEL:N_GPE]: rates = [RATE_BASE, RATE_SELE_1, RATE_SELE_2, RATE_BASE] times = [ 1, SEL_ONSET, SEL_ONSET + SEL_TIME_1, SEL_ONSET + SEL_TIME_1 + SEL_TIME_2 ] t_stop = sim_time GPE.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random() * 10000.0)) # Connect for i, syn in enumerate(SYNAPSE_MODELS_TESTED): target = SNR_list[i][i_exp] my_nest.ConvergentConnect(GPE[:], [target], model=syn) my_nest.ConvergentConnect(MSN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate(sim_time) for GPE in GPE_list: GPE.get_signal('s') for SNR in SNR_list: SNR.get_signal('s') misc.pickle_save([GPE_list, SNR_list], SAVE_AT) elif load: GPE_list, SNR_list = misc.pickle_load(SAVE_AT) pre_ref = str(SNR_list[0].signals['spikes'].mean_rate( SEL_ONSET - 500, SEL_ONSET)) pre_dyn = str(SNR_list[1].signals['spikes'].mean_rate( SEL_ONSET - 500, SEL_ONSET)) statusSynapes = [] for syn in SYNAPSE_MODELS_TESTED: statusSynapes.append(my_nest.GetDefaults(syn)) s = '\n' s = s + 'Example:\n' s = s + ' %s %5s %3s \n' % ('N experiments:', str(len(EXPERIMENTS)), '#') s = s + ' %s %5s %3s \n' % ('N GPEs:', str(N_GPE), '#') s = s + ' %s %5s %3s \n' % ('Base rate:', str(RATE_BASE), 'spikes/s') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE_1), 'spikes/s') s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME_1), 'ms') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE_2), 'spikes/s') s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME_2), 'ms') s = s + ' %s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref[0:4], 'spikes/s') s = s + ' %s %5s %3s \n' % ('Pre sel rate Dyn:', pre_dyn[0:4], 'spikes/s') for ss in statusSynapes: s = s + '\n' s = s + ' %s %10s\n' % ('Synapse', ss['synapsemodel']) s = s + ' %s %5s %3s\n' % ('Weight', str(round(ss['weight'], 1)), 'nS') return GPE_list, SNR_list, s
def simulate_example(hz=0, load=True): global SNR_INJECTED_CURRENT global NEURON_MODELS global N_GPE global N_SEL global N_MSN global N_STN global MSN_RATE_BASE global STN_BASE_RATE global SNAME global SPATH global SYNAPSE_MODELS global SEL_ONSET global GPE_BASE_RATE #n_exp = 20 n_exp = 200 RATE_SELE = hz # Selection rate save_at = SPATH + '/' + NEURON_MODELS[0] + '-example.pkl' sim_time = SEL_TIME + SEL_ONSET + 500. SNAME_NB = hz + 1000 experiments = range(n_exp) MODEL_LIST = models() my_nest.ResetKernel() my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND) GPE_list = [] # GPE input for each experiment for i_exp in experiments: GPE = MyPoissonInput(n=N_GPE, sd=True, spath=SPATH, sname_nb=SNAME_NB + i_exp) GPE_list.append(GPE) MSN_list = [] # MSN input for each experiment for i_exp in experiments: MSN = MyPoissonInput(n=N_MSN, sd=False) MSN_list.append(MSN) STN_list = [] # MSN input for each experiment for i_exp in experiments: STN = MyPoissonInput(n=N_STN, sd=False) STN_list.append(STN) SNR_list = [] # SNR groups for each synapse I_e = my_nest.GetDefaults(NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT for i_syn in range(len(SYNAPSE_MODELS_TESTED)): SNR = MyGroup(NEURON_MODELS[0], n=n_exp, params={'I_e': I_e}, sd=True) SNR_list.append(SNR) if not load: for i_exp in experiments: GPE = GPE_list[i_exp] MSN = MSN_list[i_exp] STN = STN_list[i_exp] # Set spike times # Base rate MSN for id in MSN[:]: MSN.set_spike_times(id=id, rates=[MSN_RATE_BASE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate STN for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Set spike times # Base rate for id in GPE[0:N_GPE - N_SEL]: GPE.set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Selection for id in GPE[N_GPE - N_SEL:N_GPE]: rates = [GPE_BASE_RATE, RATE_SELE, GPE_BASE_RATE] times = [1, SEL_ONSET, SEL_TIME + SEL_ONSET] t_stop = sim_time GPE.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random() * 10000.0)) # Connect for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED): target = SNR_list[i_syn][i_exp] my_nest.ConvergentConnect(GPE[:], [target], model=syn) my_nest.ConvergentConnect(MSN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate(sim_time) for GPE in GPE_list: GPE.get_signal('s') for SNR in SNR_list: SNR.get_signal('s') misc.pickle_save([GPE_list, SNR_list], save_at) elif load: GPE_list, SNR_list = misc.pickle_load(save_at) pre_ref = str(SNR_list[0].signals['spikes'].mean_rate( SEL_ONSET - 5000, SEL_ONSET)) pre_dyn = str(SNR_list[1].signals['spikes'].mean_rate( SEL_ONSET - 500, SEL_ONSET)) statusSynapes = [] for syn in SYNAPSE_MODELS_TESTED: statusSynapes.append(my_nest.GetDefaults(syn)) s = '\n' s = s + 'Example:\n' s = s + ' %s %5s %3s \n' % ('N experiments:', str(len(experiments)), '#') s = s + ' %s %5s %3s \n' % ('N GPEs:', str(N_GPE), '#') s = s + ' %s %5s %3s \n' % ('Base rate:', str(GPE_BASE_RATE), 'spikes/s') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE), 'spikes/s') s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME), 'ms') s = s + ' %s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref[0:4], 'spikes/s') s = s + ' %s %5s %3s \n' % ('Pre sel rate Dyn:', pre_dyn[0:4], 'spikes/s') for ss in statusSynapes: s = s + '\n' s = s + ' %s %10s\n' % ('Synapse', ss['synapsemodel']) s = s + ' %s %5s %3s\n' % ('Weight', str(round(ss['weight'], 1)), 'nS') return GPE_list, SNR_list, s
def simulate_example(MSN_hz=20, GPE_hz=0, load=True, n_gpe_sel=3, sel_time_GPE=500): global GPE_BASE_RATE global STN_BASE_RATE global MSN_BASE_RATE global MSN_BURST_TIME global NEURON_MODELS global N_GPE global N_STN global N_MSN global N_MSN_BURST global SNAME global SPATH global SYNAPSE_MODELS global SEL_ONSET global SNR_INJECTED_CURRENT n_exp = 200 msn_rate_sel = MSN_hz # Selection rate gpe_sel_rate = GPE_hz # Selection rate sel_time_MSN = MSN_BURST_TIME sim_time = sel_time_MSN+SEL_ONSET+500. EXPERIMENTS=range(n_exp) MODEL_LIST=models() my_nest.ResetKernel() my_nest.MyLoadModels( MODEL_LIST, NEURON_MODELS ) my_nest.MyLoadModels( MODEL_LIST, SYNAPSE_MODELS) my_nest.MyLoadModels( MODEL_LIST, SYNAPSE_MODELS_BACKGROUND) MSN_list=[] # MSN input for each experiment for i_exp in EXPERIMENTS: MSN = MyPoissonInput( n=N_MSN+N_MSN_BURST, sd=True) MSN_list.append(MSN) GPE_list=[] # GPE input for each experiment for i_exp in EXPERIMENTS: GPE = MyPoissonInput( n=N_GPE+n_gpe_sel, sd=True) GPE_list.append(GPE) STN_list=[] # GPE input for each experiment for i_exp in EXPERIMENTS: STN = MyPoissonInput( n=N_STN, sd=True) STN_list.append(GPE) SNR_list=[] # SNR groups for each synapse for i, SNR_i_c in enumerate(SNR_INJECTED_CURRENT): I_e=my_nest.GetDefaults(NEURON_MODELS[0])['I_e']+SNR_i_c SNR = MyGroup( NEURON_MODELS[0], n=n_exp, params={'I_e':I_e}, sd=True, mm=False, mm_dt=.1, record_from=['']) SNR_list.append(SNR) if not load: for i_exp in EXPERIMENTS: # MSN MSN = MSN_list[i_exp] # Set spike times # Base rate for id in MSN[0:N_MSN]: MSN.set_spike_times(id=id, rates=[MSN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) # Selection MSN for id in MSN[N_MSN:N_MSN+N_MSN_BURST]: rates = [MSN_BASE_RATE, msn_rate_sel, MSN_BASE_RATE] times = [1, SEL_ONSET, sel_time_MSN + SEL_ONSET] t_stop = sim_time MSN.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random()*10000.0)) # GPE GPE = GPE_list[i_exp] # Set spike times # Base rate for id in GPE[:]: GPE.set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) # Selection GPE for id in GPE[N_GPE:N_GPE+n_gpe_sel]: rates = [GPE_BASE_RATE, gpe_sel_rate, GPE_BASE_RATE] # If GPe excited smaller selection time times = [1, SEL_ONSET, sel_time_GPE + SEL_ONSET] t_stop = sim_time GPE.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random()*100000.0)) # Base rate STN for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) idx_MSN_s=range(0,N_MSN-N_MSN_BURST) idx_MSN_s.extend(range(N_MSN,N_MSN+N_MSN_BURST)) idx_GPE_s=range(0,N_GPE-n_gpe_sel) idx_GPE_s.extend(range(N_GPE,N_GPE+n_gpe_sel)) # Connect with MSN burst target=SNR_list[0][i_exp] my_nest.ConvergentConnect(MSN[idx_MSN_s], [target], model=SYNAPSE_MODELS[0]) my_nest.ConvergentConnect(GPE[0:N_GPE], [target], model=SYNAPSE_MODELS[1]) my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) # With GPe pause target=SNR_list[1][i_exp] my_nest.ConvergentConnect(MSN[0:N_MSN], [target], model=SYNAPSE_MODELS[0]) my_nest.ConvergentConnect(GPE[idx_GPE_s], [target], model=SYNAPSE_MODELS[1]) my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) # With MSN burst and GPe pause target=SNR_list[2][i_exp] my_nest.ConvergentConnect(MSN[idx_MSN_s], [target], model=SYNAPSE_MODELS[0]) my_nest.ConvergentConnect(GPE[idx_GPE_s], [target], model=SYNAPSE_MODELS[1]) my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.MySimulate( sim_time ) for MSN in MSN_list: MSN.get_signal( 's' ) for GPE in GPE_list: GPE.get_signal( 's' ) for SNR in SNR_list: SNR.get_signal( 's' ) misc.pickle_save([MSN_list, GPE_list,SNR_list] , save_at) if load: MSN_list, GPE_list, SNR_list=misc.pickle_load(save_at) pre_dyn_MSN=str(SNR_list[0].signals['spikes'].mean_rate(SEL_ONSET-500, SEL_ONSET)) pre_dyn_GPE=str(SNR_list[1].signals['spikes'].mean_rate(SEL_ONSET-500, SEL_ONSET)) s='\n' s=s+'Example:\n' s = s + ' %s %5s %3s \n' % ( 'N experiments:', str ( len(EXPERIMENTS) ), '#' ) s = s + ' %s %5s %3s \n' % ( 'N MSN:', str ( N_MSN ), '#' ) s = s + ' %s %5s %3s \n' % ( 'N GPE:', str ( N_GPE ), '#' ) s='\n' s = s + ' %s %5s %3s \n' % ( 'Base rate MSN:', str ( MSN_BASE_RATE),'spikes/s' ) s = s + ' %s %5s %3s \n' % ( 'Sel rate MSN:', str ( msn_rate_sel ), 'spikes/s' ) s = s + ' %s %5s %3s \n' % ( 'Sel time MSN:', str ( sel_time_MSN ), 'ms' ) s='\n' s = s + ' %s %5s %3s \n' % ( 'Base rate GPe:', str ( GPE_BASE_RATE),'spikes/s' ) s = s + ' %s %5s %3s \n' % ( 'Sel rate GPe:', str ( gpe_sel_rate ), 'spikes/s' ) s = s + ' %s %5s %3s \n' % ( 'Sel time GPe:', str ( sel_time_GPE ), 'ms' ) s = s + ' %s %5s %3s \n' % ( 'Pre sel rate Dyn MSN:', pre_dyn_MSN[0:4], 'spikes/s' ) s = s + ' %s %5s %3s \n' % ( 'Pre sel rate Dyn GPe:', pre_dyn_GPE[0:4], 'spikes/s' ) return MSN_list, GPE_list, SNR_list, s info_string=s return MSN_hzs, GPE_hzs, data, info_string
def simulate_get_rates(msn_burst_rate=20, load=True, len_ms=500.): n_exp = 50 sim_time = DP['SEL_TIME'] + DP['SEL_ONSET'] + 1000. experiments = range(n_exp) model_list = models() my_nest.ResetKernel() my_nest.MyLoadModels(model_list, DP['NEURON_MODELS']) my_nest.MyLoadModels(model_list, DP['SYNAPSE_MODELS_TESTED']) my_nest.MyLoadModels(model_list, DP['SYNAPSE_MODELS_BACKGROUND']) MSN_list = [] # MSN input for each experiment for i_exp in experiments: MSN = MyPoissonInput(n=DP['N_MSN'], sd=True) MSN_list.append(MSN) GPE_list = [] # GPE input for each experiment for i_exp in experiments: GPE = MyPoissonInput(n=DP['N_GPE'], sd=True) GPE_list.append(GPE) SNR_list = [] # SNR groups for each synapse for i_syn, syn in enumerate(DP['SYNAPSE_MODELS_TESTED']): I_e = my_nest.GetDefaults( DP['NEURON_MODELS'][0])['I_e'] + DP['SNR_INJECTED_CURRENT'][i_syn] SNR = MyGroup(DP['NEURON_MODELS'][0], n=n_exp, sd=True, params={'I_e': I_e}) SNR_list.append(SNR) for i_exp in experiments: MSN = MSN_list[i_exp] GPE = GPE_list[i_exp] # Set spike times # Base rate for id in MSN[1:DP['N_MSN']]: MSN.set_spike_times(id=id, rates=[DP['MSN_BASE_RATE']], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Set spike times # Base rate for id in GPE[:]: GPE.set_spike_times(id=id, rates=[DP['GPE_BASE_RATE']], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Selection for id in MSN[DP['N_MSN'] - DP['N_MSN_BURST'] - 50:DP['N_MSN'] - 50]: rates = [DP['MSN_BASE_RATE'], msn_burst_rate, DP['MSN_BASE_RATE']] times = [1, DP['SEL_ONSET'], DP['SEL_TIME'] + DP['SEL_ONSET']] t_stop = sim_time MSN.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random() * 10000.0)) # Connect for i_syn, syn in enumerate(DP['SYNAPSE_MODELS_TESTED']): target = SNR_list[i_syn][i_exp] my_nest.ConvergentConnect(MSN[:], [target], model=syn) my_nest.ConvergentConnect(GPE[:], [target], model=DP['SYNAPSE_MODELS_BACKGROUND'][0]) my_nest.MySimulate(sim_time) for SNR in SNR_list: SNR.get_signal('s') rate_ref_1 = str(SNR_list[0].signals['spikes'].mean_rate( DP['SEL_ONSET'], DP['SEL_ONSET'] + len_ms)) rate_ref_2 = str(SNR_list[1].signals['spikes'].mean_rate( DP['SEL_ONSET'], DP['SEL_ONSET'] + len_ms)) rate_dyn = str(SNR_list[2].signals['spikes'].mean_rate( DP['SEL_ONSET'], DP['SEL_ONSET'] + len_ms)) return [rate_ref_1, rate_ref_2, rate_dyn]
def simulate_rate_first_and_second_bursts( selection_intervals=[0.0, 500.0, 1000., 1500.], load=True): global SNR_INJECTED_CURRENT global NEURON_MODELS global N_GPE global N_MSN_BURST global N_MSN global N_STN global MSN_BASE_RATE global GPE_BASE_RATE global STN_BASE_RATE global FILE_NAME global OUTPUT_PATH global SYNAPSE_MODELS_TESTED global SEL_ONSET #n_exp=20 n_exp = 200 msn_burst_rate = 20 n_msn_burst = N_MSN_BURST transient_stop = selection_intervals[2] - selection_intervals[1] save_result_at = (OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_' + str(transient_stop) + 'ms.pkl') save_header_at = (OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_' + str(transient_stop) + 'ms_header') burst_time = 500. sim_time = SEL_ONSET + selection_intervals[3] + 500. EXPERIMENTS = range(n_exp) model_list = models() my_nest.ResetKernel(threads=1) my_nest.MyLoadModels(model_list, NEURON_MODELS) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_TESTED) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_BACKGROUND) if not load: MSN_list = [] # MSN input for each experiment for i_exp in EXPERIMENTS: MSN = MyPoissonInput(n=N_MSN + n_msn_burst, sd=True) MSN_list.append(MSN) GPE_list = [] # GPE input for each experiment for i_exp in EXPERIMENTS: GPE = MyPoissonInput(n=N_GPE, sd=True) GPE_list.append(GPE) STN_list = [] # GPE input for each experiment for i_exp in EXPERIMENTS: STN = MyPoissonInput(n=N_STN, sd=True) STN_list.append(STN) SNR_list = [] # SNR groups for each synapse and number of selected MSN SNR_list_experiments = [] for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED): SNR = [] for i_sel in range(1): I_e = my_nest.GetDefaults( NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT SNR.append( MyGroup(NEURON_MODELS[0], n=n_exp, sd=True, params={'I_e': I_e})) SNR_list.append(SNR) for i_exp in EXPERIMENTS: MSN = MSN_list[i_exp] GPE = GPE_list[i_exp] STN = STN_list[i_exp] # Set spike times # Base rate for id in MSN[1:N_MSN]: MSN.set_spike_times(id=id, rates=[MSN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Selection for id in MSN[N_MSN:N_MSN + n_msn_burst]: rates = [ MSN_BASE_RATE, msn_burst_rate, MSN_BASE_RATE, msn_burst_rate, MSN_BASE_RATE ] t1 = selection_intervals[0] t2 = selection_intervals[1] t3 = selection_intervals[2] t4 = selection_intervals[3] times = [ 1, SEL_ONSET + t1, SEL_ONSET + t2, SEL_ONSET + t3, SEL_ONSET + t4 ] t_stop = sim_time MSN.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random() * 10000.0)) # Base rate GPE for id in GPE[:]: GPE.set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate GPE for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Connect for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED): # i_sel goes over 0,..., n_max_sel for i_sel, n_sel in enumerate( range(n_msn_burst, n_msn_burst + 1)): target = SNR_list[i_syn][i_sel][i_exp] my_nest.ConvergentConnect(MSN[0:N_MSN - n_sel], [target], model=syn) my_nest.ConvergentConnect(MSN[N_MSN:N_MSN + n_sel], [target], model=syn) my_nest.ConvergentConnect( GPE[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect( STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate(sim_time) for SNR_sel in SNR_list: for SNR in SNR_sel: SNR.get_signal('s') t1 = selection_intervals[0] t3 = selection_intervals[2] mean_rates = [] mean_rates_std = [] # Time until arrival of spikes in SNr delay = my_nest.GetDefaults(SYNAPSE_MODELS_BACKGROUND[0])['delay'] for SNR_sel in SNR_list: m_r = [] m_r_std = [] for SNR in SNR_sel: # Mean rate during first 200 ms m_r.append(SNR.signals['spikes'].mean_rate( SEL_ONSET + t1 + delay, SEL_ONSET + t1 + 200 + delay)) m_r.append(SNR.signals['spikes'].mean_rate( SEL_ONSET + t3 + delay, SEL_ONSET + t3 + 200 + delay)) m_r_std.append(SNR.signals['spikes'].mean_rate_std( SEL_ONSET + t1 + delay, SEL_ONSET + t1 + 200 + delay)) m_r_std.append(SNR.signals['spikes'].mean_rate_std( SEL_ONSET + t3 + delay, SEL_ONSET + t3 + 200 + delay)) mean_rates.append(m_r) mean_rates_std.append(m_r_std) mean_rates = numpy.array(mean_rates) mean_rates_std = numpy.array(mean_rates_std) s = '\n' s = s + 'simulate_rate_first_and_second_bursts\n' s = s + '%s %5s %3s \n' % ('Simulation time', str(sim_time), '#') s = s + '%s %5s %3s \n' % ('N MSNs:', str(N_MSN), '#') s = s + '%s %5s %3s \n' % ('N MSN_bursts:', str(n_msn_burst), '#') s = s + '%s %5s %3s \n' % ('N experiments:', str(n_exp), '#') s = s + '%s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE), 'spikes/s') s = s + '%s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE), 'spikes/s') s = s + '%s %5s %3s \n' % ('MSN burst time:', str(burst_time), 'ms') s = s + '%s %5s %3s \n' % ('GPe base rate:', str(GPE_BASE_RATE), 'spikes/s') s = s + '%s %5s %3s \n' % ('SNR injected current:', str(SNR_INJECTED_CURRENT), 'pA') for i_interval, interval in enumerate(selection_intervals): s = s + '%s %5s %3s \n' % ('Sel interval ' + str(i_interval) + ':', str(selection_intervals), 'ms') info_string = s header = s misc.text_save(header, save_header_at) misc.pickle_save([mean_rates, mean_rates_std, info_string], save_result_at) elif load: mean_rates, mean_rates_std, info_string = misc.pickle_load( save_result_at) return mean_rates, mean_rates_std, info_string
def simulate_example_msn_snr(): nFun=0 # Function number nSim=0 # Simulation number within function rates=numpy.array([.1,.1]) times=numpy.array([0.,25000.]) nMSN =500 simTime=100000. I_e=0. my_nest.ResetKernel() model_list=models() my_nest.MyLoadModels( model_list, neuronModels ) my_nest.MyLoadModels( model_list, synapseModels ) MSN = MyGroup( 'spike_generator', nMSN, mm_dt=1.0, mm=False, sd=False, spath=spath, sname_nb=str(nFun)+str(nSim)) SNR = MyGroup( neuronModels[0], n=len(synapseModels), params={'I_e':I_e}, sd=True, mm_dt = .1, mm=False, spath=spath, sname_nb=str(nFun)+str(nSim) ) nSim+=1 spikeTimes=[] for i in range(nMSN): spikes=misc.inh_poisson_spikes( rates, times, t_stop=simTime, n_rep=1, seed=i ) my_nest.SetStatus([MSN[i]], params={ 'spike_times':spikes } ) for spk in spikes: spikeTimes.append((i,spk)) # add spike list for MSN to MSN spike list MSN.signals['spikes'] = my_signals.MySpikeList(spikeTimes, MSN.ids) MSN.save_signal( 's') noise=my_nest.Create('noise_generator', params={'std':100.}) my_nest.Connect(noise,[SNR[0]],params={'receptor_type':5}) my_nest.Connect(noise,[SNR[1]],params={'receptor_type':5}) my_nest.Connect(noise,[SNR[2]],params={'receptor_type':5}) for i, syn in enumerate(synapseModels): my_nest.ConvergentConnect(MSN[:],[SNR[i]], model=syn) my_nest.MySimulate( simTime ) SNR.get_signal( 's' ) # retrieve signal SNR_rates=[SNR.signals['spikes'].mean_rates(0,5000), SNR.signals['spikes'].mean_rates(5000, 10000)] for i in range(0, len(SNR_rates)): for j in range(0, len(SNR_rates[0])): SNR_rates[i][j]=int(SNR_rates[i][j]) s='\n' s =s + 'Example plot MSN and SNr:\n' s =s + 'Synapse models:\n' for syn in synapseModels: s = s + ' %s\n' % (syn ) s = s + ' %s %5s %3s \n' % ( 'N MSN:', str ( nMSN ), '#' ) s = s + ' %s %5s %3s \n' % ( 'MSN Rates:', str ( [str(round(r,1)) for r in rates]),'Hz' ) s = s + ' %s %5s %3s \n' % ( '\nSNR Rates 0-5000:\n', str ( SNR_rates [0]) ,'Hz' ) s = s + ' %s %5s %3s \n' % ( '\nSNR Rates 10000-5000:\n', str ( SNR_rates [1]) ,'Hz' ) s = s + ' %s %5s %3s \n' % ( '\nTimes:', str ( times), 'ms' ) s = s + ' %s %5s %3s \n' % ( 'I_e:', str ( I_e ), 'pA' ) infoString=s return MSN, SNR, infoString
def simulate_MSN_vs_SNR_rate(load=True): global SNR_INJECTED_CURRENT global N_MSN global N_GPE global N_STN global GPE_BASE_RATE # Path were raw data is saved. For example the spike trains. save_result_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_rate.pkl' save_header_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_rate_header' MSNmeanRates = numpy.arange(0.1, 3.1, 0.1) SNRmeanRates = [] sim_time = 100000. if not load: for r in MSNmeanRates: my_nest.ResetKernel(threads=3) model_list, model_dict = models() my_nest.MyLoadModels(model_list, NEURON_MODELS) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_TESTED) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_BACKGROUND) MSN = MyPoissonInput(n=N_MSN) GPE = MyPoissonInput(n=N_GPE) STN = MyPoissonInput(n=N_STN) I_e = my_nest.GetDefaults( NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT SNR = MyGroup(NEURON_MODELS[0], n=len(SYNAPSE_MODELS_TESTED), params={'I_e': I_e}, sd=True) for id in MSN[:]: MSN.set_spike_times(id=id, rates=numpy.array([r]), times=numpy.array([1]), t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate GPE for id in GPE[:]: GPE.set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate STN for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) for i, syn in enumerate(SYNAPSE_MODELS_TESTED): my_nest.ConvergentConnect(MSN[:], [SNR[i]], model=syn) my_nest.ConvergentConnect(GPE[:], [SNR[i]], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect(STN[:], [SNR[i]], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate(sim_time) SNR.get_signal('s') # retrieve signal SNRmeanRates.append(SNR.signals['spikes'].mean_rates( 1000.0, sim_time)) SNRmeanRates = numpy.array(SNRmeanRates).transpose() MSNmeanRates = numpy.array(MSNmeanRates) rateAtThr = '' for SNRr in SNRmeanRates: tmp = str(MSNmeanRates[SNRr >= SELECTION_THR][-1]) rateAtThr += ' ' + tmp[0:4] s = '\n' s = s + 'simulate_MSN_vs_SNR_rate:\n' s = s + ' %s %5s %3s \n' % ('N MSNs:', str(N_MSN), '#') s = s + ' \n%s \n%5s %3s \n' % ('MSN rates:', str( MSNmeanRates[0]) + '-' + str(MSNmeanRates[-1]), 'spikes/s') s = s + ' %s %5s %3s \n' % ('N GPes:', str(N_GPE), '#') s = s + ' %s %5s %3s \n' % ('Threshold SNr:', str(SELECTION_THR), 'spikes/s') s = s + ' \n%s \n%5s %3s \n' % ('MSN rate right before threshold SNr:', str(rateAtThr), 'spikes/s') s = s + ' \n%s %5s %3s \n' % ('Simulation time:', str(sim_time), 'ms') s = s + ' %s %5s %3s \n' % ('Injected current:', str(SNR_INJECTED_CURRENT), 'pA') infoString = s header = HEADER_SIMULATION_SETUP + s misc.text_save(header, save_header_at) misc.pickle_save([MSNmeanRates, SNRmeanRates, infoString], save_result_at) elif load: MSNmeanRates, SNRmeanRates, infoString = misc.pickle_load( save_result_at) return MSNmeanRates, SNRmeanRates, infoString
def simulate_GPE_vs_SNR_const_syn_events(load=True): global N_GPE global N_MSN global MSN_BASE_RATE global SNR_INJECTED_CURRENT save_at = (SPATH+'/'+NEURON_MODELS[0]+'-' + '-GPE_vs_SNR_const_syn_events.pkl') nGPE_range=numpy.arange(N_GPE,4,-1) # To maintain CONSTANT_SYN_EVENTS in to SNr while changing number of pausing # GPe we have to increase the mean rate of the non-pausing GPe's GPEmeanRates=CONSTANT_SYN_EVENTS/nGPE_range SNRmeanRates=[] sim_time=10000. I_e=0. if not load: for r, n_gpe in zip(GPEmeanRates,nGPE_range): my_nest.ResetKernel() model_list=models() my_nest.MyLoadModels( model_list, NEURON_MODELS ) my_nest.MyLoadModels( model_list, SYNAPSE_MODELS_TESTED ) my_nest.MyLoadModels( model_list, SYNAPSE_MODELS_BACKGROUND ) GPE = MyPoissonInput( n=n_gpe) MSN = MyPoissonInput( n=N_MSN) STN = MyPoissonInput( n=N_STN) I_e=my_nest.GetDefaults(NEURON_MODELS[0])['I_e']+SNR_INJECTED_CURRENT SNR = MyGroup( NEURON_MODELS[0], n=len(SYNAPSE_MODELS_TESTED), sd=True,params={'I_e':I_e}) for id in GPE[:]: GPE.set_spike_times(id=id, rates=[r], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) for id in MSN[:]: MSN.set_spike_times(id=id, rates=[MSN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) for i, syn in enumerate(SYNAPSE_MODELS_TESTED): my_nest.ConvergentConnect(GPE[:],[SNR[i]], model=syn) my_nest.ConvergentConnect(MSN[:],[SNR[i]], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect(STN[:],[SNR[i]], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate( sim_time ) SNR.get_signal( 's') # retrieve signal SNRmeanRates.append(SNR.signals['spikes'].mean_rates(5000,sim_time)) SNRmeanRates=numpy.array(SNRmeanRates).transpose() GPEmeanRates=numpy.array(GPEmeanRates) rateAtThr='' for SNRr in SNRmeanRates: tmp=str(GPEmeanRates[SNRr>=SELECTION_THR][-1]) rateAtThr+=' '+tmp[0:4] s='\n' s =s + 'GPE vs SNr rate:\n' s = s + ' %s %5s %3s \n' % ( 'N GPEs:', str ( N_GPE ), '#' ) s = s + ' \n%s %5s %3s \n' % ( 'GPE rates:', str ( GPEmeanRates[0] ) + '-'+ str ( GPEmeanRates[-1] ), 'spikes/s' ) s = s + ' %s %5s %3s \n' % ( 'Threshold SNr:', str ( SELECTION_THR ), 'spikes/s' ) s = s + ' \n%s %5s %3s \n' % ( 'GPE rate at threshold SNr:', str ( rateAtThr ), 'spikes/s' ) s = s + ' \n%s %5s %3s \n' % ( 'Simulation time:', str ( sim_time), 'ms' ) s = s + ' %s %5s %3s \n' % ( 'I_e:', str ( I_e ), 'pA' ) s = s + ' %s %5s %3s \n' % ( 'Steady state rate ref:', str ( round(SNRmeanRates[0][0],1) ), 'pA' ) s = s + ' %s %5s %3s \n' % ( 'Steady state rate dyn:', str ( round(SNRmeanRates[1][0],1) ), 'pA' ) statusSynapse=[] for syn in SYNAPSE_MODELS_TESTED: statusSynapse.append( my_nest.GetDefaults(syn) ) for ss in statusSynapse: s = s + '\n' s = s + ' %s %10s\n' % ( 'Synapse', ss['synapsemodel']) s = s + ' %s %5s %3s\n' % ( 'Weight', str( round( ss['weight'], 1) ), 'nS') infoString=s misc.pickle_save([GPEmeanRates, SNRmeanRates, infoString], save_at) elif load: GPEmeanRates, SNRmeanRates, infoString = misc.pickle_load(save_at) return nGPE_range, GPEmeanRates, SNRmeanRates, infoString
def simulate_example(hz=20): sname_nb = hz + 1000 saveAt = SPATH + '/' + SNAME + '-' + NEURONMODELS[0] + '-example.pkl' nGPE = 100 EXPERIMENTS = range(20) nSelected = 50 selectionRate = hz baseRate = 20. model_list = models() I_e = -5. simTime = 3500. selectionTime = 500. selectionOnset = 2500. my_nest.ResetKernel() my_nest.MyLoadModels(model_list, NEURONMODELS) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS) GPE_list = [] # GPE input for each experiment for iExp in EXPERIMENTS: GPE_list.append( MyPoissonInput(n=nGPE, sd=True, spath=SPATH, sname_nb=sname_nb + iExp)) SNR_list = [] # SNR groups for each synapse for iSyn, syn in enumerate(SYNAPSE_MODELS): SNR_list.append( MyGroup(NEURONMODELS[0], n=len(EXPERIMENTS), params={'I_e': I_e}, mm_dt=.1, record_from=[''], spath=SPATH, sname_nb=sname_nb + iSyn)) if not LOAD: for iExp in EXPERIMENTS: GPE = GPE_list[iExp] # Base rate for id in GPE[:]: GPE.set_spike_times(id=id, rates=[baseRate], times=[1], t_stop=simTime) # Selection for id in GPE[:]: GPE.set_spike_times( id=id, rates=[baseRate, selectionRate, baseRate], times=[1, selectionOnset, selectionTime + selectionOnset], t_stop=simTime) for iSyn, syn in enumerate(SYNAPSE_MODELS): target = SNR_list[iSyn][iExp] my_nest.ConvergentConnect(GPE[:], [target], model=syn) my_nest.MySimulate(simTime) for GPE in GPE_list: GPE.get_signal('s') # retrieve signal for SNR in SNR_list: SNR.get_signal('s') # retrieve signal misc.pickle_save([GPE_list, SNR_list], saveAt) if LOAD: GPE_list, SNR_list = misc.pickle_load(saveAt) s = '\n' s = s + 'Example:\n' s = s + ' %s %5s %3s \n' % ('N experiments:', str(len(EXPERIMENTS)), '#') s = s + ' %s %5s %3s \n' % ('Base rate:', str(baseRate), '#') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(selectionRate), '#') return GPE_list, SNR_list, s
def simulate_selection_vs_neurons(selRateInterval=[0.0, 500.0], hz=20): sname_nb = hz nGPE = 500 nExp = 5 if hz > 7: nMaxSelected = 60 else: nMaxSelected = 100 baseRate = 0.1 selectionRate = hz I_e = -5. simTime = 3500. model_list = models() selectionTime = 3000. selectionOnset = 500. expParams = [] expIntervals = [] iSNR = 0 for syn in SYNAPSE_MODELS: for iSel in range(nMaxSelected): expIntervals.append([iSNR, iSNR + nExp]) for iExp in range(nExp): expParams.append((syn, iSel, iExp, iSNR)) iSNR += 1 synIntervals = [] iSNR = 0 for syn in SYNAPSE_MODELS: synIntervals.append([iSNR, iSNR + nMaxSelected]) iSNR += nMaxSelected my_nest.ResetKernel() my_nest.MyLoadModels(model_list, NEURONMODELS) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS) SNR = MyGroup(NEURONMODELS[0], n=len(expParams), params={'I_e': I_e}, mm_dt=.1, record_from=[''], spath=SPATH, sname_nb=sname_nb) sourceBack = [] sourceSel = [] for iExp in range(nExp): # Background tmpSourceBack = [] for iGPE in range(nGPE - 1): spikeTimes = misc.inh_poisson_spikes([baseRate], [1], t_stop=simTime, n_rep=nExp, seed=iGPE + 10 * iExp) if any(spikeTimes): tmpSourceBack.extend( my_nest.Create('spike_generator', params={'spike_times': spikeTimes})) sourceBack.append(tmpSourceBack) if not LOAD: for syn, iSel, iExp, iSNR in expParams: print 'Connect SNR ' + str(SNR[iSNR]) + ' ' + syn target = SNR[iSNR] my_nest.ConvergentConnect(sourceBack[iExp][0:nGPE - iSel], [target], model=syn) my_nest.ConvergentConnect(sourceSel[iExp][0:iSel + 1], [target], model=syn) my_nest.MySimulate(simTime) SNR.save_signal('s') SNR.get_signal('s') # retrieve signal #SNR.get_signal( 'v','V_m' ) # retrieve signal #SNR.signals['V_m'].plot() #SNR.signals['spikes'].raster_plot() #pylab.show() if LOAD: SNR.load_signal('s') #SNR.get_signal( 'v','V_m', stop=simTime ) # retrieve signal #SNR.signals['V_m'].plot(id_list=[5]) #SNR.['spikes'].raster_plot() #pylab.show() t1 = selRateInterval[0] t2 = selRateInterval[1] tmpMeanRates1 = [] tmpMeanRates2 = [] tmpMeanRates3 = [] tmpMeanRates4 = [] tmpMeanRates1 = SNR.signals['spikes'].mean_rates(selectionOnset + t1, selectionOnset + t2) for interval in expIntervals: tmpMeanRates3.append( numpy.mean(tmpMeanRates1[interval[0]:interval[1]], axis=0)) for interval in synIntervals: tmpMeanRates4.append(tmpMeanRates3[interval[0]:interval[1]]) meanRates = numpy.array(tmpMeanRates4) nbNeurons = numpy.arange(1, nMaxSelected + 1, 1) s = '\n' s = s + ' %s %5s %3s \n' % ('N GPEs:', str(nGPE), '#') s = s + ' %s %5s %3s \n' % ('N experiments:', str(nExp), '#') s = s + ' %s %5s %3s \n' % ('Base rate:', str(baseRate), 'Hz') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(selectionRate), 'Hz') s = s + ' %s %5s %3s \n' % ('Selection time:', str(selectionTime), 'ms') s = s + ' %s %5s %3s \n' % ('I_e:', str(I_e), 'pA') infoString = s return nbNeurons, meanRates, infoString
def simulate_example(hz=0, load=True): global NEURON_MODELS global SNAME global SPATH global SYNAPSE_MODELS global SEL_ONSET global I_E N_EXP = 200 N_GPE = 50 N_SEL = 30 # Number of selected GPE N_INH = 0 # Number of inhibited GPE RATE_BASE = 15 # Base rate RATE_SELE = hz # Selection rate RATE_INHI = 0 SAVE_AT = SPATH + '/' + NEURON_MODELS[0] + '-example.pkl' SEL_TIME = 20. sim_time = SEL_TIME + SEL_ONSET + 800. SNAME_NB = hz + 1000 EXPERIMENTS = range(N_EXP) MODEL_LIST = models() my_nest.ResetKernel() my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS) GPE_list = [] # GPE input for each experiment for i_exp in EXPERIMENTS: GPE = MyPoissonInput(n=N_GPE, sd=True, spath=SPATH, sname_nb=SNAME_NB + i_exp) GPE_list.append(GPE) SNR_list = [] # SNR groups for each synapse for i_syn, syn in enumerate(SYNAPSE_MODELS): SNR = MyGroup(NEURON_MODELS[0], n=N_EXP, params={'I_e': I_E}, sd=True, mm=False, mm_dt=.1, record_from=[''], spath=SPATH, sname_nb=SNAME_NB + i_syn) SNR_list.append(SNR) if not load: for i_exp in EXPERIMENTS: GPE = GPE_list[i_exp] # Set spike times # Base rate for id in GPE[:]: GPE.set_spike_times(id=id, rates=[RATE_BASE], times=[1], t_stop=sim_time) # Selection for id in GPE[N_GPE - N_SEL:N_GPE + 1]: rates = [RATE_BASE, RATE_SELE, RATE_BASE] times = [1, SEL_ONSET, SEL_TIME + SEL_ONSET] t_stop = sim_time GPE.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop) # Inhibition for id in GPE[N_GPE - N_SEL - N_INH:N_GPE + 1 - N_SEL]: rates = [RATE_BASE, RATE_INHI, RATE_BASE] times = [1, SEL_ONSET, SEL_TIME + SEL_ONSET] t_stop = sim_time GPE.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop) # Connect for i_syn, syn in enumerate(SYNAPSE_MODELS): target = SNR_list[i_syn][i_exp] my_nest.ConvergentConnect(GPE[:], [target], model=syn) my_nest.MySimulate(sim_time) for GPE in GPE_list: GPE.get_signal('s') for SNR in SNR_list: SNR.get_signal('s') misc.pickle_save([GPE_list, SNR_list], SAVE_AT) if load: GPE_list, SNR_list = misc.pickle_load(SAVE_AT) pre_ref = str(SNR_list[0].signals['spikes'].mean_rate( SEL_ONSET - 500, SEL_ONSET)) pre_dyn = str(SNR_list[2].signals['spikes'].mean_rate( SEL_ONSET - 500, SEL_ONSET)) s = '\n' s = s + 'Example:\n' s = s + ' %s %5s %3s \n' % ('N experiments:', str(len(EXPERIMENTS)), '#') s = s + ' %s %5s %3s \n' % ('Base rate:', str(RATE_BASE), 'Hz') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE), 'Hz') s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME), 'ms') s = s + ' %s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref[0:4], 'Hz') s = s + ' %s %5s %3s \n' % ('Pre sel rate Dyn:', pre_dyn[0:4], 'Hz') return GPE_list, SNR_list, s
def simulate_GPE_vs_SNR_rate(): nFun = 1 # Function number nSim = 0 # Simulation number within function GPEmeanRates = numpy.arange(1, 50, 2) SNRmeanRates = [] nGPE = 10 simTime = 10000. I_e = 0. for r in GPEmeanRates: my_nest.ResetKernel() model_list = models() my_nest.MyLoadModels(model_list, neuronModels) my_nest.MyLoadModels(model_list, synapseModels) GPE = MyGroup('spike_generator', nGPE, mm_dt=1.0, mm=False, sd=False, spath=spath, siter=str(nFun) + str(nSim)) SNR = MyGroup(neuronModels[0], n=len(synapseModels), params={'I_e': I_e}, mm_dt=.1, mm=False, spath=spath, siter=str(nFun) + str(nSim)) nSim += 1 if not LOAD: spikeTimes = [] for i in range(nGPE): spikes = misc.inh_poisson_spikes(numpy.array([r]), numpy.array([0]), t_stop=simTime, n_rep=1, seed=i) my_nest.SetStatus([GPE[i]], params={'spike_times': spikes}) for spk in spikes: spikeTimes.append((i, spk)) # add spike list for GPE to GPE spike list GPE.signals['spikes'] = my_signals.MySpikeList(spikeTimes, GPE.ids) GPE.save_signal('s') for i, syn in enumerate(synapseModels): my_nest.ConvergentConnect(GPE[:], [SNR[i]], model=syn) my_nest.MySimulate(simTime) SNR.save_signal('s') SNR.get_signal('s') # retrieve signal elif LOAD: SNR.load_signal('s') SNRmeanRates.append(SNR.signals['spikes'].mean_rates(0, simTime)) SNRmeanRates = numpy.array(SNRmeanRates).transpose() GPEmeanRates = numpy.array(GPEmeanRates) THR = 2. rateAtThr = '' for SNRr in SNRmeanRates: tmp = str(GPEmeanRates[SNRr >= THR][-1]) rateAtThr += ' ' + tmp[0:4] s = '\n' s = s + 'GPE vs SNr rate:\n' s = s + ' %s %5s %3s \n' % ('N GPEs:', str(nGPE), '#') s = s + ' \n%s \n%5s %3s \n' % ('GPE rates:', str(GPEmeanRates[0]) + '-' + str(GPEmeanRates[-1]), 'Hz') s = s + ' %s %5s %3s \n' % ('Threshold SNr:', str(THR), 'Hz') s = s + ' \n%s \n%5s %3s \n' % ('GPE rate at threshold SNr:', str(rateAtThr), 'Hz') s = s + ' \n%s %5s %3s \n' % ('Simulation time:', str(simTime), 'ms') s = s + ' %s %5s %3s \n' % ('I_e:', str(I_e), 'pA') infoString = s return GPEmeanRates, SNRmeanRates, infoString
def simulate_example_GPE_snr(): nFun = 0 # Function number nSim = 0 # Simulation number within function rates = numpy.array([20, 30]) times = numpy.array([0., 5000.]) nGPE = 10 simTime = 10000. I_e = 0. my_nest.ResetKernel() model_list = models() my_nest.MyLoadModels(model_list, neuronModels) my_nest.MyLoadModels(model_list, synapseModels) GPE = MyGroup('spike_generator', nGPE, mm_dt=1.0, mm=False, sd=False, spath=spath, siter=str(nFun) + str(nSim)) SNR = MyGroup(neuronModels[0], n=len(synapseModels), params={'I_e': I_e}, mm_dt=.1, mm=False, spath=spath, siter=str(nFun) + str(nSim)) nSim += 1 if not LOAD: spikeTimes = [] for i in range(nGPE): spikes = misc.inh_poisson_spikes(rates, times, t_stop=simTime, n_rep=1, seed=i) my_nest.SetStatus([GPE[i]], params={'spike_times': spikes}) for spk in spikes: spikeTimes.append((i, spk)) # add spike list for GPE to GPE spike list GPE.signals['spikes'] = my_signals.MySpikeList(spikeTimes, GPE.ids) GPE.save_signal('s') for i, syn in enumerate(synapseModels): my_nest.ConvergentConnect(GPE[:], [SNR[i]], model=syn) my_nest.MySimulate(simTime) SNR.save_signal('s') SNR.get_signal('s') # retrieve signal elif LOAD: GPE.load_signal('s') SNR.load_signal('s') SNR_rates = [ SNR.signals['spikes'].mean_rates(0, 5000), SNR.signals['spikes'].mean_rates(5000, 10000) ] for i in range(0, len(SNR_rates)): for j in range(0, len(SNR_rates[0])): SNR_rates[i][j] = int(SNR_rates[i][j]) s = '\n' s = s + 'Example plot GPE and SNr:\n' s = s + 'Synapse models:\n' for syn in synapseModels: s = s + ' %s\n' % (syn) s = s + ' %s %5s %3s \n' % ('N GPE:', str(nGPE), '#') s = s + ' %s %5s %3s \n' % ('GPE Rates:', str([str(round(r, 1)) for r in rates]), 'Hz') s = s + ' %s %5s %3s \n' % ('\nSNR Rates 0-5000:\n', str( SNR_rates[0]), 'Hz') s = s + ' %s %5s %3s \n' % ('\nSNR Rates 10000-5000:\n', str( SNR_rates[1]), 'Hz') s = s + ' %s %5s %3s \n' % ('\nTimes:', str(times), 'ms') s = s + ' %s %5s %3s \n' % ('I_e:', str(I_e), 'pA') infoString = s return GPE, SNR, infoString
def simulate_selection_vs_neurons(selRateInterval=[0.0, 500.0], hz=0, load=True): global SEL_ONSET global SNR_INJECTED_CURRENT global N_GPE global N_STN global MSN_RATE_BASE global GPE_BASE_RATE global STN_BASE_RATE SNAME_NB = hz #n_exp=200 n_exp = 20 N_MAX_SEL = N_GPE + 1 # Plus one to account for the case when all GPe have stopped RATE_SELE = hz save_at = (SPATH + '/' + NEURON_MODELS[0] + '-' + str(SNAME_NB) + '-hz.pkl') #SEL_TIME = 1000. sim_time = SEL_TIME + SEL_ONSET + 500. experiments = range(n_exp) MODEL_LIST = models() my_nest.ResetKernel() my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND) GPE_list = [] # GPE input for each experiment for i_exp in experiments: GPE = MyPoissonInput(n=N_GPE + N_MAX_SEL) GPE_list.append(GPE) MSN_list = [] # MSN input for each experiment for i_exp in experiments: MSN = MyPoissonInput(n=N_MSN, sd=True) MSN_list.append(MSN) STN_list = [] # STN input for each experiment for i_exp in experiments: STN = MyPoissonInput(n=N_STN, sd=True) STN_list.append(STN) SNR_list = [] # SNR groups for each synapse and number of selected GPE I_e = my_nest.GetDefaults(NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT for i, i_syn in enumerate(SYNAPSE_MODELS_TESTED): SNR = [] for i_sel in range(N_MAX_SEL): SNR.append( MyGroup(NEURON_MODELS[0], n=n_exp, params={'I_e': I_e}, sd=True, sd_params={ 'start': 0., 'stop': sim_time })) SNR_list.append(SNR) if not load: for i_exp in experiments: GPE = GPE_list[i_exp] MSN = MSN_list[i_exp] STN = STN_list[i_exp] # Set spike times # Base rate for id in MSN[:]: MSN.set_spike_times(id=id, rates=[MSN_RATE_BASE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate STN for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Set spike times # Base rate for id in GPE[0:N_GPE]: GPE.set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Selection for id in GPE[N_GPE:N_GPE + N_MAX_SEL]: rates = [GPE_BASE_RATE, RATE_SELE, GPE_BASE_RATE] times = [1, SEL_ONSET, SEL_TIME + SEL_ONSET] t_stop = sim_time GPE.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random() * 10000.0)) # Connect for i, syn in enumerate(SYNAPSE_MODELS_TESTED): for i_sel in range(N_MAX_SEL): target = SNR_list[i][i_sel][i_exp] my_nest.ConvergentConnect(GPE[0:N_GPE - i_sel], [target], model=syn) my_nest.ConvergentConnect(GPE[N_GPE:N_GPE + i_sel], [target], model=syn) my_nest.ConvergentConnect( MSN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect( STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate(sim_time) for SNR_sel in SNR_list: for SNR in SNR_sel: SNR.get_signal('s') misc.pickle_save_groups(SNR_list, save_at) elif load: SNR_list = misc.pickle_load_groups(save_at) t1 = selRateInterval[0] t2 = selRateInterval[1] mean_rates = [] delay = my_nest.GetDefaults(SYNAPSE_MODELS_TESTED[0])['delay'] for SNR_sel in SNR_list: m_r = [] for SNR in SNR_sel: m_r_pre = SNR.signals['spikes'].mean_rate(SEL_ONSET - (t2 - t1), SEL_ONSET) m_r_post = SNR.signals['spikes'].mean_rate(SEL_ONSET + t1 + delay, SEL_ONSET + t2 + delay) m_r.append(m_r_post) mean_rates.append(m_r) mean_rates = numpy.array(mean_rates) nb_neurons = numpy.arange(0, N_MAX_SEL, 1) s = '\n' s = s + ' %s %5s %3s \n' % ('N GPEs:', str(N_GPE), '#') s = s + ' %s %5s %3s \n' % ('N experiments:', str(n_exp), '#') s = s + ' %s %5s %3s \n' % ('Base rate:', str(GPE_BASE_RATE), 'spikes/s') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE), 'spikes/s') s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME), 'ms') s = s + ' %s %5s %3s \n' % ('SNR_INJECTED_CURRENT:', str(SNR_INJECTED_CURRENT), 'pA') info_string = s return nb_neurons, mean_rates, info_string
def simulate_MSN_vs_SNR_const_syn_events(load=True): global SNR_INJECTED_CURRENT global N_MSN global N_GPE global MSN_BURST_RATE global GPE_BASE_RATE # Path were raw data is saved. For example the spike trains. save_result_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_const_syn_events.pkl' save_header_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_const_syn_events_header' # REMARK can not be more than rate=const_syn_events/burst_rate n_MSN_bursting = numpy.arange(0, N_MAX_BURSTING + 1) n_exp = 200 #n_exp=20 # Solve (500-n)*x + 20*n=600, where 500 is total number of MSNs, 20 is burst # activation, x is MSN mean rate and n is number of bursters. # Then x=(600-20*n)/(500-n) MSNmeanRates = (SYN_EVENTS - MSN_BURST_RATE * n_MSN_bursting) / (N_MSN - n_MSN_bursting) SNRmeanRates = [] sim_time = 3000. if not load: for r, n_MSN_b in zip(MSNmeanRates, n_MSN_bursting): my_nest.ResetKernel(threads=4) model_list, model_dict = models() my_nest.MyLoadModels(model_list, NEURON_MODELS) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_TESTED) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_BACKGROUND) MSN = [] SNR = [] GPE = [] STN = [] for i in range(n_exp): MSN.append(MyPoissonInput(n=N_MSN, sd=True)) GPE.append(MyPoissonInput(n=N_GPE, sd=True)) STN.append(MyPoissonInput(n=N_STN, sd=True)) I_e = my_nest.GetDefaults( NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT SNR.append( MyGroup(NEURON_MODELS[0], n=len(SYNAPSE_MODELS_TESTED), params={'I_e': I_e}, sd=True)) for i_exp in range(n_exp): for id in MSN[i_exp][:N_MSN - n_MSN_b]: MSN[i_exp].set_spike_times(id=id, rates=numpy.array([r]), times=numpy.array([1]), t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) for id in MSN[i_exp][N_MSN - n_MSN_b:]: MSN[i_exp].set_spike_times( id=id, rates=numpy.array([r, MSN_BURST_RATE, r]), times=numpy.array([1, SEL_ONSET, SEL_OFFSET]), t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate GPE for id in GPE[i_exp][:]: GPE[i_exp].set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Base rate STN for id in STN[i_exp][:]: STN[i_exp].set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) for j, syn in enumerate(SYNAPSE_MODELS_TESTED): my_nest.ConvergentConnect(MSN[i_exp][:], [SNR[i_exp][j]], model=syn) my_nest.ConvergentConnect( GPE[i_exp][:], [SNR[i_exp][j]], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect( STN[i_exp][:], [SNR[i_exp][j]], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate(sim_time) delay = my_nest.GetDefaults(SYNAPSE_MODELS_BACKGROUND[0])['delay'] SNRmeanRates_tmp = [] for i in range(n_exp): SNR[i].get_signal('s') # retrieve signal SNRmeanRates_tmp.append(SNR[i].signals['spikes'].mean_rates( SEL_ONSET + delay, SEL_OFFSET + delay)) SNRmeanRates.append(numpy.mean(SNRmeanRates_tmp, axis=0)) SNRmeanRates = numpy.array(SNRmeanRates).transpose() s = '\n' s = s + 'simulate_MSN_vs_SNR_const_syn_events:\n' s = s + '%s %5s %3s \n' % ('Syn events:', str(SYN_EVENTS), '#') s = s + '%s %5s %3s \n' % ('n_exp:', str(n_exp), '#') infoString = s header = HEADER_SIMULATION_SETUP + s misc.text_save(header, save_header_at) misc.pickle_save([SNRmeanRates, infoString], save_result_at) elif load: SNRmeanRates, infoString = misc.pickle_load(save_result_at) return n_MSN_bursting, MSNmeanRates, SNRmeanRates, infoString
def simulate_selection_vs_neurons(selection_intervals=[0.0, 500.0], hz=20, load=True): global SNR_INJECTED_CURRENT global NEURON_MODELS global N_GPE global N_MSN_BURST global N_MSN global GPE_BASE_RATE global FILE_NAME global OUTPUT_PATH global SYNAPSE_MODELS_TESTED global SEL_ONSET #n_exp=100 n_exp = 2 if hz > 7: n_max_sel = 60 if hz > 20: n_max_sel = 30 else: n_max_sel = 100 RATE_BASE = 0.1 RATE_SELE = hz save_result_at = (OUTPUT_PATH + '/' + FILE_NAME + '-simulate_selection_vs_neurons' + str(hz) + '-hz.pkl') save_header_at = (OUTPUT_PATH + '/' + FILE_NAME + '-simulate_selection_vs_neurons' + str(hz) + '-hz_header') burst_time = 500. sim_time = burst_time + SEL_ONSET + 500. EXPERIMENTS = range(n_exp) MODEL_LIST = models() my_nest.ResetKernel() my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED) my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND) MSN_list = [] # MSN input for each experiment for i_exp in EXPERIMENTS: MSN = MyPoissonInput(n=N_MSN + n_max_sel, sd=True) MSN_list.append(MSN) GPE_list = [] # GPE input for each experiment for i_exp in EXPERIMENTS: GPE = MyPoissonInput(n=N_GPE, sd=True) GPE_list.append(GPE) SNR_list = [] # SNR groups for each synapse and number of selected MSN SNR_list_experiments = [] for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED): SNR = [] for i_sel in range(n_max_sel + 1): # Plus one to get no burst point I_e = my_nest.GetDefaults( NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT SNR.append( MyGroup(NEURON_MODELS[0], n=n_exp, sd=True, params={'I_e': I_e})) SNR_list.append(SNR) if not load: for i_exp in EXPERIMENTS: MSN = MSN_list[i_exp] GPE = GPE_list[i_exp] # Set spike times # Base rate for id in MSN[1:N_MSN]: MSN.set_spike_times(id=id, rates=[RATE_BASE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Selection for id in MSN[N_MSN:N_MSN + n_max_sel]: rates = [RATE_BASE, RATE_SELE, RATE_BASE] times = [1, SEL_ONSET, burst_time + SEL_ONSET] t_stop = sim_time MSN.set_spike_times(id=id, rates=rates, times=times, t_stop=t_stop, seed=int(numpy.random.random() * 10000.0)) # Base rate GPE for id in GPE[:]: GPE.set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random() * 10000.0)) # Connect for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED): # i_sel goes over 0,..., n_max_sel for i_sel in range(0, n_max_sel + 1): target = SNR_list[i_syn][i_sel][i_exp] my_nest.ConvergentConnect(MSN[0:N_MSN - i_sel], [target], model=syn) my_nest.ConvergentConnect(MSN[N_MSN:N_MSN + i_sel], [target], model=syn) my_nest.ConvergentConnect( GPE[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.MySimulate(sim_time) for SNR_sel in SNR_list: for SNR in SNR_sel: SNR.get_signal('s') sel_interval_mean_rates = [] sel_interval_mean_rates_std = [] for i_interval, interval in enumerate(selection_intervals): t1 = selection_intervals[i_interval][0] t2 = selection_intervals[i_interval][1] mean_rates = [] mean_rates_std = [] # Time until arrival of spikes in SNr delay = my_nest.GetDefaults(SYNAPSE_MODELS_BACKGROUND[0])['delay'] for SNR_sel in SNR_list: m_r = [] m_r_std = [] for SNR in SNR_sel: m_r.append(SNR.signals['spikes'].mean_rate( SEL_ONSET + t1 + delay, SEL_ONSET + t2 + delay)) m_r_std.append(SNR.signals['spikes'].mean_rate_std( SEL_ONSET + t1 + delay, SEL_ONSET + t2 + delay)) mean_rates.append(m_r) mean_rates_std.append(m_r_std) mean_rates = numpy.array(mean_rates) mean_rates_std = numpy.array(mean_rates_std) sel_interval_mean_rates.append(mean_rates) sel_interval_mean_rates_std.append(mean_rates_std) nb_neurons = numpy.arange(0, n_max_sel + 1, 1) s = '\n' s = s + ' %s %5s %3s \n' % ('N MSNs:', str(N_MSN), '#') s = s + ' %s %5s %3s \n' % ('N experiments:', str(n_exp), '#') s = s + ' %s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE), 'Hz') s = s + ' %s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE), 'Hz') s = s + ' %s %5s %3s \n' % ('GPe rate:', str(GPE_BASE_RATE), 'Hz') s = s + ' %s %5s %3s \n' % ('Burst time:', str(burst_time), 'ms') s = s + ' %s %5s %3s \n' % ('SNR_INJECTED_CURRENT:', str(SNR_INJECTED_CURRENT), 'pA') for i_interval, interval in enumerate(selection_intervals): s = s + ' %s %5s %3s \n' % ('Sel interval ' + str(i_interval) + ':', str(selection_intervals), 'ms') info_string = s header = HEADER_SIMULATION_SETUP + s misc.text_save(header, save_header_at) misc.pickle_save([ nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std, info_string ], save_result_at) elif load: nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std, info_string = misc.pickle_load( save_result_at) return nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std, info_string
def simulate_GPE_vs_SNR_rate(load=True): global N_GPE global N_MSN global N_STN global MSN_BASE_RATE # Path were data is saved. For example the spike trains. save_at = (SPATH+'/'+NEURON_MODELS[0]+'-' + '-GPE_vs_SNR_rate.pkl') GPEmeanRates=numpy.arange(0,150,1) SNRmeanRates=[] sim_time=50000. I_e=0. if not load: for r in GPEmeanRates: my_nest.ResetKernel() model_list=models() my_nest.MyLoadModels( model_list, NEURON_MODELS ) my_nest.MyLoadModels( model_list, SYNAPSE_MODELS_TESTED ) my_nest.MyLoadModels( model_list, SYNAPSE_MODELS_BACKGROUND ) MSN = MyPoissonInput( n=N_MSN) GPE = MyPoissonInput( n=N_GPE) STN = MyPoissonInput( n=N_STN) I_e=my_nest.GetDefaults(NEURON_MODELS[0])['I_e']+SNR_INJECTED_CURRENT SNR = MyGroup( NEURON_MODELS[0], n=len(SYNAPSE_MODELS_TESTED), sd=True,params={'I_e':I_e}) for id in GPE[:]: GPE.set_spike_times(id=id, rates=[r], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) for id in MSN[:]: MSN.set_spike_times(id=id, rates=[MSN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) for id in STN[:]: STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], t_stop=sim_time, seed=int(numpy.random.random()*10000.0)) for i, syn in enumerate(SYNAPSE_MODELS_TESTED): my_nest.ConvergentConnect(GPE[:],[SNR[i]], model=syn) my_nest.ConvergentConnect(MSN[:],[SNR[i]], model=SYNAPSE_MODELS_BACKGROUND[0]) my_nest.ConvergentConnect(STN[:],[SNR[i]], model=SYNAPSE_MODELS_BACKGROUND[1]) my_nest.MySimulate( sim_time ) SNR.get_signal( 's') # retrieve signal SNRmeanRates.append(SNR.signals['spikes'].mean_rates( 5000, sim_time)) SNRmeanRates=numpy.array(SNRmeanRates).transpose() GPEmeanRates=numpy.array(GPEmeanRates) rateAtThr='' for SNRr in SNRmeanRates: tmp=str(GPEmeanRates[SNRr>=SELECTION_THR][-1]) rateAtThr+=' '+tmp[0:4] s='\n' s =s + 'GPE vs SNr rate:\n' s = s + ' %s %5s %3s \n' % ( 'N GPEs:', str ( N_GPE ), '#' ) s = s + ' %s %5s %3s \n' % ( 'Max SNr rate:', str ( SNRmeanRates[0] ), '#' ) s = s + ' \n%s \n%5s %3s \n' % ( 'GPE rates:', str ( GPEmeanRates[0] ) + '-'+ str ( GPEmeanRates[-1] ), 'spikes/s' ) s = s + ' %s %5s %3s \n' % ( 'Threshold SNr:', str ( SELECTION_THR ), 'spikes/s' ) s = s + ' \n%s \n%5s %3s \n' % ( 'GPE rate at threshold SNr:', str ( rateAtThr ), 'spikes/s' ) s = s + ' \n%s %5s %3s \n' % ( 'Simulation time:', str ( sim_time), 'ms' ) s = s + ' %s %5s %3s \n' % ( 'I_e:', str ( I_e ), 'pA' ) infoString=s misc.pickle_save([GPEmeanRates, SNRmeanRates, infoString], save_at) elif load: GPEmeanRates, SNRmeanRates, infoString = misc.pickle_load(save_at) return GPEmeanRates, SNRmeanRates, infoString