def simulate_steady_state_freq(frequencies, flag='ss'): global sname_nb relativeFacilitation=[] model_list=models() data={} n=len(frequencies) for syn in synapseModels: my_nest.ResetKernel() my_nest.MyLoadModels( model_list, neuronModels ) my_nest.MyLoadModels( model_list, [syn]) ss=my_nest.GetDefaults(syn) synapticEficacy = ss['weight']*ss['U'] SNR = MyGroup( neuronModels[0], n, mm_dt = .1, params={'I_e':-150.}, record_from=['g_GABAA_2'], spath=spath, sname_nb=sname_nb ) sname_nb+=1 tSim=3*1000/frequencies[0] spikeTimes=[] for f in frequencies : isi = 1000./f spikeTimes.append(numpy.arange(1,tSim,isi)) if not LOAD: for target, st in zip(SNR, spikeTimes ) : source = my_nest.Create('spike_generator', params={'spike_times':st} ) my_nest.SetDefaults(syn, params={'delay':1.}) my_nest.Connect(source, [target], model=syn) my_nest.MySimulate(tSim) SNR.get_signal( 'g','g_GABAA_2', stop=tSim ) # retrieve signal SNR.save_signal( 'g','g_GABAA_2', stop=tSim ) elif LOAD: SNR.load_signal( 'g','g_GABAA_2') signal=SNR.signals['g_GABAA_2'] tmpSteadyState=[] for i, st in enumerate(spikeTimes, start=1): if SNR.mm_dt==0.1: indecies=numpy.int64(numpy.ceil(st*10))+9 elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st)) values=signal[i].signal[indecies]-signal[i].signal[indecies-1] if flag=='ss': tmpSteadyState.append(values[-1]/synapticEficacy) if flag=='max': tmpSteadyState.append(max(values)/synapticEficacy) relativeFacilitation.append(tmpSteadyState) relativeFacilitation=numpy.array(relativeFacilitation) return frequencies, relativeFacilitation
def simulate_recovery(revoceryTimes): global sname_nb relativeRecovery=[] model_list=models() data={} n=len(revoceryTimes) for syn in synapseModels: my_nest.ResetKernel() my_nest.MyLoadModels( model_list, neuronModels ) my_nest.MyLoadModels( model_list, [syn]) ss=my_nest.GetDefaults(syn) synapticEficacy = ss['weight']*ss['U'] SNR = MyGroup( neuronModels[0], n, mm_dt = .1, params={'I_e':-150.}, record_from=['g_GABAA_2'], spath=spath, sname_nb=sname_nb) sname_nb+=1 tSim=5000 spikeTimes=[] for rt in revoceryTimes: spikeTimes.append(numpy.array([1.,11.,21.,31.,41.,41+rt])) if not LOAD: for target, st in zip(SNR, spikeTimes ) : source = my_nest.Create('spike_generator', params={'spike_times':st} ) my_nest.SetDefaults(syn, params={'delay':1.}) my_nest.Connect(source, [target], model=syn) my_nest.MySimulate(tSim) SNR.get_signal( 'g','g_GABAA_2', stop=tSim ) # retrieve signal SNR.save_signal( 'g','g_GABAA_2', stop=tSim ) elif LOAD: SNR.load_signal( 'g','g_GABAA_2') signal=SNR.signals['g_GABAA_2'] tmpSteadyState=[] for i, st in enumerate(spikeTimes, start=1): if SNR.mm_dt==0.1: indecies=numpy.int64(numpy.ceil(st*10))+9 elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st)) values=signal[i].signal[indecies]-signal[i].signal[indecies-1] tmpSteadyState.append(values[-1]/synapticEficacy) #tmpSteadyState.append(max(values)/synapticEficacy) relativeRecovery.append(tmpSteadyState) relativeRecovery=numpy.array(relativeRecovery) return revoceryTimes, relativeRecovery
def simulate_example_msn_snr(): nFun=0 # Function number nSim=0 # Simulation number within function rates=numpy.array([.1,.1]) times=numpy.array([0.,25000.]) nMSN =500 simTime=100000. I_e=0. my_nest.ResetKernel() model_list=models() my_nest.MyLoadModels( model_list, neuronModels ) my_nest.MyLoadModels( model_list, synapseModels ) MSN = MyGroup( 'spike_generator', nMSN, mm_dt=1.0, mm=False, sd=False, spath=spath, sname_nb=str(nFun)+str(nSim)) SNR = MyGroup( neuronModels[0], n=len(synapseModels), params={'I_e':I_e}, sd=True, mm_dt = .1, mm=False, spath=spath, sname_nb=str(nFun)+str(nSim) ) nSim+=1 spikeTimes=[] for i in range(nMSN): spikes=misc.inh_poisson_spikes( rates, times, t_stop=simTime, n_rep=1, seed=i ) my_nest.SetStatus([MSN[i]], params={ 'spike_times':spikes } ) for spk in spikes: spikeTimes.append((i,spk)) # add spike list for MSN to MSN spike list MSN.signals['spikes'] = my_signals.MySpikeList(spikeTimes, MSN.ids) MSN.save_signal( 's') noise=my_nest.Create('noise_generator', params={'std':100.}) my_nest.Connect(noise,[SNR[0]],params={'receptor_type':5}) my_nest.Connect(noise,[SNR[1]],params={'receptor_type':5}) my_nest.Connect(noise,[SNR[2]],params={'receptor_type':5}) for i, syn in enumerate(synapseModels): my_nest.ConvergentConnect(MSN[:],[SNR[i]], model=syn) my_nest.MySimulate( simTime ) SNR.get_signal( 's' ) # retrieve signal SNR_rates=[SNR.signals['spikes'].mean_rates(0,5000), SNR.signals['spikes'].mean_rates(5000, 10000)] for i in range(0, len(SNR_rates)): for j in range(0, len(SNR_rates[0])): SNR_rates[i][j]=int(SNR_rates[i][j]) s='\n' s =s + 'Example plot MSN and SNr:\n' s =s + 'Synapse models:\n' for syn in synapseModels: s = s + ' %s\n' % (syn ) s = s + ' %s %5s %3s \n' % ( 'N MSN:', str ( nMSN ), '#' ) s = s + ' %s %5s %3s \n' % ( 'MSN Rates:', str ( [str(round(r,1)) for r in rates]),'Hz' ) s = s + ' %s %5s %3s \n' % ( '\nSNR Rates 0-5000:\n', str ( SNR_rates [0]) ,'Hz' ) s = s + ' %s %5s %3s \n' % ( '\nSNR Rates 10000-5000:\n', str ( SNR_rates [1]) ,'Hz' ) s = s + ' %s %5s %3s \n' % ( '\nTimes:', str ( times), 'ms' ) s = s + ' %s %5s %3s \n' % ( 'I_e:', str ( I_e ), 'pA' ) infoString=s return MSN, SNR, infoString
def simulate_steady_state_freq(frequencies, flag='ss', load=True): # Path were raw data is saved. For example the spike trains. save_result_at = OUTPUT_PATH + '/simulate_steady_state_freq.pkl' save_header_at = OUTPUT_PATH + '/simulate_steady_state_freq_header' relative_fac = [] n = len(frequencies) if not load: for syn in SYNAPSE_MODELS: my_nest.ResetKernel() model_list, model_dict = models() my_nest.MyLoadModels(model_list, NEURON_MODELS) my_nest.MyLoadModels(model_list, [syn]) ss = my_nest.GetDefaults(syn) synapticEficacy = ss['weight'] * ss['U'] SNR = MyGroup(NEURON_MODELS[0], n, mm=True, mm_dt=.1, params={'I_e': -150.}, record_from=['g_GABAA_1']) tSim = 3 * 1000 / frequencies[0] spikeTimes = [] for f in frequencies: isi = 1000. / f spikeTimes.append(numpy.arange(1, tSim, isi)) for target, st in zip(SNR, spikeTimes): source = my_nest.Create('spike_generator', params={'spike_times': st}) my_nest.SetDefaults(syn, params={'delay': 1.}) my_nest.Connect(source, [target], model=syn) my_nest.MySimulate(tSim) SNR.get_signal('g', 'g_GABAA_1', stop=tSim) # retrieve signal SNR.save_signal('g', 'g_GABAA_1', stop=tSim) SNR.load_signal('g', 'g_GABAA_1') signal = SNR.signals['g_GABAA_1'] tmpss = [] for i, st in enumerate(spikeTimes, start=1): if SNR.mm_dt == 0.1: indecies = numpy.int64(numpy.ceil(st * 10)) + 9 elif SNR.mm_dt == 1.: indecies = numpy.int64(numpy.ceil(st)) values = signal[i].signal[indecies] - signal[i].signal[indecies - 1] if flag == 'ss': tmpss.append(values[-1] / synapticEficacy) if flag == 'max': tmpss.append(max(values) / synapticEficacy) relative_fac.append(tmpss) relative_fac = numpy.array(relative_fac) max_rel_fac = str(max(relative_fac[0])) s = '\n' s = s + ' %s %5s %3s \n' % (flag + ' Max:', max_rel_fac[0:6], '--') info_string = s header = HEADER_SIMULATION_SETUP + s misc.text_save(header, save_header_at) misc.pickle_save([frequencies, relative_fac, s], save_result_at) elif load: revoceryTimes, relative_fac, info_string = misc.pickle_load( save_result_at) return frequencies, relative_fac, info_string
def simulate_selection_vs_neurons(selRateInterval=[0.0, 500.0], hz=20): sname_nb = hz nGPE = 500 nExp = 5 if hz > 7: nMaxSelected = 60 else: nMaxSelected = 100 baseRate = 0.1 selectionRate = hz I_e = -5. simTime = 3500. model_list = models() selectionTime = 3000. selectionOnset = 500. expParams = [] expIntervals = [] iSNR = 0 for syn in SYNAPSE_MODELS: for iSel in range(nMaxSelected): expIntervals.append([iSNR, iSNR + nExp]) for iExp in range(nExp): expParams.append((syn, iSel, iExp, iSNR)) iSNR += 1 synIntervals = [] iSNR = 0 for syn in SYNAPSE_MODELS: synIntervals.append([iSNR, iSNR + nMaxSelected]) iSNR += nMaxSelected my_nest.ResetKernel() my_nest.MyLoadModels(model_list, NEURONMODELS) my_nest.MyLoadModels(model_list, SYNAPSE_MODELS) SNR = MyGroup(NEURONMODELS[0], n=len(expParams), params={'I_e': I_e}, mm_dt=.1, record_from=[''], spath=SPATH, sname_nb=sname_nb) sourceBack = [] sourceSel = [] for iExp in range(nExp): # Background tmpSourceBack = [] for iGPE in range(nGPE - 1): spikeTimes = misc.inh_poisson_spikes([baseRate], [1], t_stop=simTime, n_rep=nExp, seed=iGPE + 10 * iExp) if any(spikeTimes): tmpSourceBack.extend( my_nest.Create('spike_generator', params={'spike_times': spikeTimes})) sourceBack.append(tmpSourceBack) if not LOAD: for syn, iSel, iExp, iSNR in expParams: print 'Connect SNR ' + str(SNR[iSNR]) + ' ' + syn target = SNR[iSNR] my_nest.ConvergentConnect(sourceBack[iExp][0:nGPE - iSel], [target], model=syn) my_nest.ConvergentConnect(sourceSel[iExp][0:iSel + 1], [target], model=syn) my_nest.MySimulate(simTime) SNR.save_signal('s') SNR.get_signal('s') # retrieve signal #SNR.get_signal( 'v','V_m' ) # retrieve signal #SNR.signals['V_m'].plot() #SNR.signals['spikes'].raster_plot() #pylab.show() if LOAD: SNR.load_signal('s') #SNR.get_signal( 'v','V_m', stop=simTime ) # retrieve signal #SNR.signals['V_m'].plot(id_list=[5]) #SNR.['spikes'].raster_plot() #pylab.show() t1 = selRateInterval[0] t2 = selRateInterval[1] tmpMeanRates1 = [] tmpMeanRates2 = [] tmpMeanRates3 = [] tmpMeanRates4 = [] tmpMeanRates1 = SNR.signals['spikes'].mean_rates(selectionOnset + t1, selectionOnset + t2) for interval in expIntervals: tmpMeanRates3.append( numpy.mean(tmpMeanRates1[interval[0]:interval[1]], axis=0)) for interval in synIntervals: tmpMeanRates4.append(tmpMeanRates3[interval[0]:interval[1]]) meanRates = numpy.array(tmpMeanRates4) nbNeurons = numpy.arange(1, nMaxSelected + 1, 1) s = '\n' s = s + ' %s %5s %3s \n' % ('N GPEs:', str(nGPE), '#') s = s + ' %s %5s %3s \n' % ('N experiments:', str(nExp), '#') s = s + ' %s %5s %3s \n' % ('Base rate:', str(baseRate), 'Hz') s = s + ' %s %5s %3s \n' % ('Selection rate:', str(selectionRate), 'Hz') s = s + ' %s %5s %3s \n' % ('Selection time:', str(selectionTime), 'ms') s = s + ' %s %5s %3s \n' % ('I_e:', str(I_e), 'pA') infoString = s return nbNeurons, meanRates, infoString
def simulate_GPE_vs_SNR_rate(): nFun = 1 # Function number nSim = 0 # Simulation number within function GPEmeanRates = numpy.arange(1, 50, 2) SNRmeanRates = [] nGPE = 10 simTime = 10000. I_e = 0. for r in GPEmeanRates: my_nest.ResetKernel() model_list = models() my_nest.MyLoadModels(model_list, neuronModels) my_nest.MyLoadModels(model_list, synapseModels) GPE = MyGroup('spike_generator', nGPE, mm_dt=1.0, mm=False, sd=False, spath=spath, siter=str(nFun) + str(nSim)) SNR = MyGroup(neuronModels[0], n=len(synapseModels), params={'I_e': I_e}, mm_dt=.1, mm=False, spath=spath, siter=str(nFun) + str(nSim)) nSim += 1 if not LOAD: spikeTimes = [] for i in range(nGPE): spikes = misc.inh_poisson_spikes(numpy.array([r]), numpy.array([0]), t_stop=simTime, n_rep=1, seed=i) my_nest.SetStatus([GPE[i]], params={'spike_times': spikes}) for spk in spikes: spikeTimes.append((i, spk)) # add spike list for GPE to GPE spike list GPE.signals['spikes'] = my_signals.MySpikeList(spikeTimes, GPE.ids) GPE.save_signal('s') for i, syn in enumerate(synapseModels): my_nest.ConvergentConnect(GPE[:], [SNR[i]], model=syn) my_nest.MySimulate(simTime) SNR.save_signal('s') SNR.get_signal('s') # retrieve signal elif LOAD: SNR.load_signal('s') SNRmeanRates.append(SNR.signals['spikes'].mean_rates(0, simTime)) SNRmeanRates = numpy.array(SNRmeanRates).transpose() GPEmeanRates = numpy.array(GPEmeanRates) THR = 2. rateAtThr = '' for SNRr in SNRmeanRates: tmp = str(GPEmeanRates[SNRr >= THR][-1]) rateAtThr += ' ' + tmp[0:4] s = '\n' s = s + 'GPE vs SNr rate:\n' s = s + ' %s %5s %3s \n' % ('N GPEs:', str(nGPE), '#') s = s + ' \n%s \n%5s %3s \n' % ('GPE rates:', str(GPEmeanRates[0]) + '-' + str(GPEmeanRates[-1]), 'Hz') s = s + ' %s %5s %3s \n' % ('Threshold SNr:', str(THR), 'Hz') s = s + ' \n%s \n%5s %3s \n' % ('GPE rate at threshold SNr:', str(rateAtThr), 'Hz') s = s + ' \n%s %5s %3s \n' % ('Simulation time:', str(simTime), 'ms') s = s + ' %s %5s %3s \n' % ('I_e:', str(I_e), 'pA') infoString = s return GPEmeanRates, SNRmeanRates, infoString
def simulate_example_GPE_snr(): nFun = 0 # Function number nSim = 0 # Simulation number within function rates = numpy.array([20, 30]) times = numpy.array([0., 5000.]) nGPE = 10 simTime = 10000. I_e = 0. my_nest.ResetKernel() model_list = models() my_nest.MyLoadModels(model_list, neuronModels) my_nest.MyLoadModels(model_list, synapseModels) GPE = MyGroup('spike_generator', nGPE, mm_dt=1.0, mm=False, sd=False, spath=spath, siter=str(nFun) + str(nSim)) SNR = MyGroup(neuronModels[0], n=len(synapseModels), params={'I_e': I_e}, mm_dt=.1, mm=False, spath=spath, siter=str(nFun) + str(nSim)) nSim += 1 if not LOAD: spikeTimes = [] for i in range(nGPE): spikes = misc.inh_poisson_spikes(rates, times, t_stop=simTime, n_rep=1, seed=i) my_nest.SetStatus([GPE[i]], params={'spike_times': spikes}) for spk in spikes: spikeTimes.append((i, spk)) # add spike list for GPE to GPE spike list GPE.signals['spikes'] = my_signals.MySpikeList(spikeTimes, GPE.ids) GPE.save_signal('s') for i, syn in enumerate(synapseModels): my_nest.ConvergentConnect(GPE[:], [SNR[i]], model=syn) my_nest.MySimulate(simTime) SNR.save_signal('s') SNR.get_signal('s') # retrieve signal elif LOAD: GPE.load_signal('s') SNR.load_signal('s') SNR_rates = [ SNR.signals['spikes'].mean_rates(0, 5000), SNR.signals['spikes'].mean_rates(5000, 10000) ] for i in range(0, len(SNR_rates)): for j in range(0, len(SNR_rates[0])): SNR_rates[i][j] = int(SNR_rates[i][j]) s = '\n' s = s + 'Example plot GPE and SNr:\n' s = s + 'Synapse models:\n' for syn in synapseModels: s = s + ' %s\n' % (syn) s = s + ' %s %5s %3s \n' % ('N GPE:', str(nGPE), '#') s = s + ' %s %5s %3s \n' % ('GPE Rates:', str([str(round(r, 1)) for r in rates]), 'Hz') s = s + ' %s %5s %3s \n' % ('\nSNR Rates 0-5000:\n', str( SNR_rates[0]), 'Hz') s = s + ' %s %5s %3s \n' % ('\nSNR Rates 10000-5000:\n', str( SNR_rates[1]), 'Hz') s = s + ' %s %5s %3s \n' % ('\nTimes:', str(times), 'ms') s = s + ' %s %5s %3s \n' % ('I_e:', str(I_e), 'pA') infoString = s return GPE, SNR, infoString