def simulate_steady_state_freq(frequencies, flag='ss'): global sname_nb relativeFacilitation=[] model_list=models() data={} n=len(frequencies) for syn in synapseModels: my_nest.ResetKernel() my_nest.MyLoadModels( model_list, neuronModels ) my_nest.MyLoadModels( model_list, [syn]) ss=my_nest.GetDefaults(syn) synapticEficacy = ss['weight']*ss['U'] SNR = MyGroup( neuronModels[0], n, mm_dt = .1, params={'I_e':-150.}, record_from=['g_GABAA_2'], spath=spath, sname_nb=sname_nb ) sname_nb+=1 tSim=3*1000/frequencies[0] spikeTimes=[] for f in frequencies : isi = 1000./f spikeTimes.append(numpy.arange(1,tSim,isi)) if not LOAD: for target, st in zip(SNR, spikeTimes ) : source = my_nest.Create('spike_generator', params={'spike_times':st} ) my_nest.SetDefaults(syn, params={'delay':1.}) my_nest.Connect(source, [target], model=syn) my_nest.MySimulate(tSim) SNR.get_signal( 'g','g_GABAA_2', stop=tSim ) # retrieve signal SNR.save_signal( 'g','g_GABAA_2', stop=tSim ) elif LOAD: SNR.load_signal( 'g','g_GABAA_2') signal=SNR.signals['g_GABAA_2'] tmpSteadyState=[] for i, st in enumerate(spikeTimes, start=1): if SNR.mm_dt==0.1: indecies=numpy.int64(numpy.ceil(st*10))+9 elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st)) values=signal[i].signal[indecies]-signal[i].signal[indecies-1] if flag=='ss': tmpSteadyState.append(values[-1]/synapticEficacy) if flag=='max': tmpSteadyState.append(max(values)/synapticEficacy) relativeFacilitation.append(tmpSteadyState) relativeFacilitation=numpy.array(relativeFacilitation) return frequencies, relativeFacilitation
def simulate_recovery(revoceryTimes): global sname_nb relativeRecovery=[] model_list=models() data={} n=len(revoceryTimes) for syn in synapseModels: my_nest.ResetKernel() my_nest.MyLoadModels( model_list, neuronModels ) my_nest.MyLoadModels( model_list, [syn]) ss=my_nest.GetDefaults(syn) synapticEficacy = ss['weight']*ss['U'] SNR = MyGroup( neuronModels[0], n, mm_dt = .1, params={'I_e':-150.}, record_from=['g_GABAA_2'], spath=spath, sname_nb=sname_nb) sname_nb+=1 tSim=5000 spikeTimes=[] for rt in revoceryTimes: spikeTimes.append(numpy.array([1.,11.,21.,31.,41.,41+rt])) if not LOAD: for target, st in zip(SNR, spikeTimes ) : source = my_nest.Create('spike_generator', params={'spike_times':st} ) my_nest.SetDefaults(syn, params={'delay':1.}) my_nest.Connect(source, [target], model=syn) my_nest.MySimulate(tSim) SNR.get_signal( 'g','g_GABAA_2', stop=tSim ) # retrieve signal SNR.save_signal( 'g','g_GABAA_2', stop=tSim ) elif LOAD: SNR.load_signal( 'g','g_GABAA_2') signal=SNR.signals['g_GABAA_2'] tmpSteadyState=[] for i, st in enumerate(spikeTimes, start=1): if SNR.mm_dt==0.1: indecies=numpy.int64(numpy.ceil(st*10))+9 elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st)) values=signal[i].signal[indecies]-signal[i].signal[indecies-1] tmpSteadyState.append(values[-1]/synapticEficacy) #tmpSteadyState.append(max(values)/synapticEficacy) relativeRecovery.append(tmpSteadyState) relativeRecovery=numpy.array(relativeRecovery) return revoceryTimes, relativeRecovery
def multiply_weight_factor(synapses, weightFactor): for i in range(len(synapses)): # Multiply with weight factor defaults = my_nest.GetDefaults(synapses[i]) my_nest.SetDefaults(synapses[i], {'weight': defaults['weight'] * weightFactor})
def simulate_network(params_msn_d1, params_msn_d2, params_stn, synapse_models, sim_time, seed, I_e_add, threads=1, start_rec=0, model_params={}): ''' params_msn_d1 - dictionary with timing and burst freq setup for msn {'base_rates':[0.1, 0.1, ..., 0.1], #Size number of actions 'mod_rates': [[20,0,...,0], [0,20,...,0],...[0,0,...,20]] #size number of actions times number of events 'mod_times':[[500,1000],[1500,2000],[9500,10000]] # size number of events 'n_neurons':500} params_msn_d2 - dictionary with timing and burst freq setup for gpe params_stn - dictionary {'rate':50} same as params_msn neuron_model - string, the neuron model to use synapse_models - dict, {'MSN':'...', 'GPE':,'...', 'STN':'...'} sim_time - simulation time seed - seed for random generator I_e_add - diabled start_rec - start recording from model_params - general model paramters ''' I_e_add = {'SNR': 300, 'STN': 0, 'GPE': 30} f = 0.01 #0.01#0.5 I_e_variation = {'GPE': 25 * f, 'SNR': 100 * f, 'STN': 10 * f} my_nest.ResetKernel(threads=8) numpy.random.seed(seed) params = { 'conns': { 'MSN_D1_SNR': { 'syn': synapse_models[0] }, 'GPE_SNR': { 'syn': synapse_models[1] } } } params = misc.dict_merge(model_params, params) model_list, model_dict = models() group_list, group_dict, connect_list, connect_params = network( model_dict, params) print connect_params groups = {} for name, model, setup in group_list: # Update input current my_nest.MyLoadModels(model_dict, [model]) if name in I_e_add.keys(): I_e = my_nest.GetDefaults(model)['I_e'] + I_e_add[name] my_nest.SetDefaults(model, {'I_e': I_e}) groups[name] = [] for action in range(connect_params['misc']['n_actions']): if model in ['MSN_D1_spk_gen', 'MSN_D2_spk_gen']: group = MyPoissonInput(params=setup, sd=True, sd_params={ 'start': start_rec, 'stop': sim_time }) else: group = MyGroup(params=setup, sd=True, mm=False, mm_dt=0.1, sd_params={ 'start': start_rec, 'stop': sim_time }) groups[name].append(group) for action in range(connect_params['misc']['n_actions']): groups['MSN_D1'][action].set_spike_times( list(params_msn_d1['mod_rates'][action]), list(params_msn_d1['mod_times']), sim_time, ids=groups['MSN_D1'][action].ids) groups['MSN_D2'][action].set_spike_times( params_msn_d2['mod_rates'][action], params_msn_d2['mod_times'], sim_time, ids=groups['MSN_D2'][action].ids) # Create neurons and synapses for source, target, props in connect_list: my_nest.MyLoadModels(model_dict, [props['model']]) for action in range(connect_params['misc']['n_actions']): pre = list(groups[source][action].ids) post = list(groups[target][action].ids) my_nest.MyRandomConvergentConnect(pre, post, params=props) STN_CTX_input_base = my_nest.Create('poisson_generator', params={ 'rate': params_stn['rate'], 'start': 0., 'stop': sim_time }) my_nest.MyLoadModels(model_dict, ['CTX_STN_ampa_s']) for action in range(connect_params['misc']['n_actions']): my_nest.DivergentConnect(STN_CTX_input_base, groups['STN'][action].ids, model='CTX_STN_ampa_s') my_nest.MySimulate(sim_time) for action in range(connect_params['misc']['n_actions']): groups['MSN_D1'][action].get_signal('s', start=start_rec, stop=sim_time) groups['MSN_D2'][action].get_signal('s', start=start_rec, stop=sim_time) groups['GPE'][action].get_signal('s', start=start_rec, stop=sim_time) groups['SNR'][action].get_signal('s', start=start_rec, stop=sim_time) groups['STN'][action].get_signal('s', start=start_rec, stop=sim_time) return groups
def simulate_network_poisson(params_msn_d1, params_msn_d2, params_stn, synapse_models, sim_time, seed, I_e_add, threads=1, start_rec=0, model_params={}, params_in={}, p_weights=False, p_conn=False, p_I_e=False): ''' Assume that the background MSN are static weak, then can use poisson process for them, params_msn_d1 - dictionary with timing and burst freq setup for msn {'base_rates':0.1, 'base_times':[1], 'mod_rates': 20, 'mod_times':[1,200], 'mod_units':list() 'n_tot':500, n_mod=20} params_msn_d2 - dictionary with timing and burst freq setup for gpe params_stn - dictionary {'rate':50} same as params_msn neuron_model - string, the neuron model to use synapse_models - dict, {'MSN':'...', 'GPE':,'...', 'STN':'...'} sim_time - simulation time seed - seed for random generator I_e_add - diabled start_rec - start recording from model_params - general model paramters ''' params = { 'conns': { 'MSN_D1_SNR': { 'syn': synapse_models[0] }, 'GPE_SNR': { 'syn': synapse_models[1] } } } my_nest.ResetKernel(threads=8) numpy.random.seed(seed) params = misc.dict_merge(model_params, params) params = misc.dict_merge({'neurons': {'GPE': {'paused': 0}}}, params) model_list, model_dict = models({}, p_weights) layer_list, connect_list = network(model_dict, params, p_conn) dic_p_I_e = {'SNR': 1., 'GPE': 1., 'STN': 1.} if p_I_e is not False: dic_p_I_e['SNR'] *= p_I_e[0] dic_p_I_e['GPE'] *= p_I_e[1] dic_p_I_e['STN'] *= p_I_e[2] # Create neurons and synapses layer_dic = {} for name, model, props in layer_list: # Update input current my_nest.MyLoadModels(model_dict, [model[1]]) if name in I_IN_VIVO.keys(): I_in_vitro = my_nest.GetDefaults(model[1])['I_e'] I_e = I_in_vitro + I_IN_VIVO[name] my_nest.SetDefaults(model[1], {'I_e': I_e * dic_p_I_e[name]}) #! Create layer, retrieve neurons ids per elements and p if model[0] == 'spike_generator': layer = MyLayerPoissonInput(layer_props=props, sd=True, sd_params={ 'start': start_rec, 'stop': sim_time }) elif model[0] == 'poisson_generator': layer = MyPoissonInput(model[0], props['columns'], sd=True, sd_params={ 'start': start_rec, 'stop': sim_time }) else: layer = MyLayerGroup(layer_props=props, sd=True, mm=False, mm_dt=0.1, sd_params={ 'start': start_rec, 'stop': sim_time }) for iter, id in enumerate(layer[:]): if name == 'GPE' and params_msn_d2[ 'n_mod'] and iter < params['neurons']['GPE']['paused']: scg = my_nest.Create('step_current_generator', n=1) rec = my_nest.GetStatus([id])[0]['receptor_types'] my_nest.SetStatus( scg, { 'amplitude_times': params_msn_d2['mod_times'], 'amplitude_values': [0., -300., 0.] }) my_nest.Connect(scg, [id], params={'receptor_type': rec['CURR']}) I_e = my_nest.GetDefaults(model[1])['I_e'] if I_E_VARIATION[name]: I = numpy.random.normal( I_e, I_E_VARIATION[name]) #I_E_VARIATION[name]) else: I = I_e my_nest.SetStatus([id], {'I_e': I}) layer_dic[name] = layer # Connect populations for conn in connect_list: print[conn[2]['synapse_model']] if not conn[2]['synapse_model'] in nest.Models(): my_nest.MyLoadModels(model_dict, [conn[2]['synapse_model']]) if layer_dic[conn[0]].model == 'poisson_generator': my_nest.Connect(layer_dic[conn[0]].ids, layer_dic[conn[1]].ids, model=conn[2]['synapse_model']) else: name = conn[0] + '_' + conn[1] + '_' + conn[3] tp.ConnectLayers(layer_dic[conn[0]].layer_id, layer_dic[conn[1]].layer_id, conn[2]) layer_dic[conn[1]].add_connection(source=layer_dic[conn[0]], type=conn[3], props=conn[2]) # Sort MSN D2 such that the closest to center is first in ids list. # Do this to we can get focused inhibition in GPe if params_msn_d2['focus']: MSN_D2_idx = layer_dic['MSN_D2'].sort_ids() else: MSN_D2_idx = range(len(numpy.array(layer_dic['MSN_D2'].ids))) n_mod_msn_d1 = params_msn_d1['n_mod'] n_mod_msn_d2 = params_msn_d2['n_mod'] MSN_D1_ids = layer_dic['MSN_D1'].ids MSN_D2_ids = layer_dic['MSN_D2'].ids MSN_D1_mod, MSN_D2_mod = [], [] if params_msn_d1['n_mod']: MSN_D1_mod = MSN_D1_ids[0:n_mod_msn_d1] if params_msn_d2['n_mod']: MSN_D2_mod = MSN_D2_ids[0:n_mod_msn_d2 * params_msn_d2['skip']:params_msn_d2['skip']] MSN_D1_base = list(set(MSN_D1_ids).difference(MSN_D1_mod)) MSN_D2_base = list(set(MSN_D2_ids).difference(MSN_D2_mod)) layer_dic['MSN_D1'].set_spike_times(params_msn_d1['base_rates'], params_msn_d1['base_times'], sim_time, ids=MSN_D1_base) layer_dic['MSN_D2'].set_spike_times(params_msn_d2['base_rates'], params_msn_d2['base_times'], sim_time, ids=MSN_D2_base) if params_msn_d1['n_mod']: layer_dic['MSN_D1'].set_spike_times(params_msn_d1['mod_rates'], params_msn_d1['mod_times'], sim_time) if params_msn_d2['n_mod']: layer_dic['MSN_D2'].set_spike_times(params_msn_d2['mod_rates'], params_msn_d2['mod_times'], sim_time, ids=MSN_D2_mod) # If background poisson are use if params_msn_d1['bg_rate']: layer_dic['MSN_D1_bg'].set_spike_times(params_msn_d1['bg_rate'], [1.], sim_time) if params_msn_d2['bg_rate']: layer_dic['MSN_D2_bg'].set_spike_times(params_msn_d2['bg_rate'], [1.], sim_time) STN_CTX_input_base = my_nest.Create('poisson_generator', params={ 'rate': BASE_RATE_CTX_STN, 'start': 0., 'stop': sim_time }) my_nest.MyLoadModels(model_dict, ['CTX_STN_ampa_s']) if 'STN' in layer_dic.keys(): my_nest.DivergentConnect(STN_CTX_input_base, layer_dic['STN'].ids, model='CTX_STN_ampa_s') if params_stn['mod'] and 'STN' in layer_dic.keys(): STN_CTX_input_mod = my_nest.Create('poisson_generator', params={ 'rate': params_stn['mod_rate'], 'start': params_stn['mod_times'][0], 'stop': params_stn['mod_times'][1] }) my_nest.DivergentConnect(STN_CTX_input_mod, layer_dic['STN'].ids, model='CTX_STN_ampa_s') my_nest.MySimulate(sim_time) if params_msn_d1['n_mod']: layer_dic['MSN_D1'].id_mod = MSN_D1_mod if params_msn_d2['n_mod']: layer_dic['MSN_D2'].id_mod = MSN_D2_mod if 'MSN_D1' in layer_dic.keys(): layer_dic['MSN_D1'].get_signal('s', start=start_rec, stop=sim_time) if 'MSN_D2' in layer_dic.keys(): layer_dic['MSN_D2'].get_signal('s', start=start_rec, stop=sim_time) if 'GPE' in layer_dic.keys(): layer_dic['GPE'].get_signal('s', start=start_rec, stop=sim_time) if 'SNR' in layer_dic.keys(): layer_dic['SNR'].get_signal('s', start=start_rec, stop=sim_time) if 'STN' in layer_dic.keys(): layer_dic['STN'].get_signal('s', start=start_rec, stop=sim_time) return layer_dic
def simulate_network_test(params_msn_d1, params_msn_d2, params_stn, synapse_models, sim_time, seed, I_e_add, threads=1, start_rec=0, model_params={}, params_in={}, dis_conn_GPE_STN=False): ''' params_msn_d1 - dictionary with timing and burst freq setup for msn {'base_rates':0.1, 'base_times':[1], 'mod_rates': 20, 'mod_times':[1,200], 'mod_units':list() 'n_tot':500, n_mod=20} params_msn_d2 - dictionary with timing and burst freq setup for gpe params_stn - dictionary {'rate':50} same as params_msn neuron_model - string, the neuron model to use synapse_models - dict, {'MSN':'...', 'GPE':,'...', 'STN':'...'} sim_time - simulation time seed - seed for random generator I_e_add - diabled start_rec - start recording from model_params - general model paramters ''' my_nest.ResetKernel(threads=8) numpy.random.seed(seed) params = { 'conns': { 'MSN_D1_SNR': { 'syn': synapse_models[0] }, 'GPE_SNR': { 'syn': synapse_models[1] } } } params = misc.dict_merge(model_params, params) params = misc.dict_merge({'neurons': {'GPE': {'paused': 0}}}, params) model_list, model_dict = models(params_in) layer_list, connect_list = network(model_dict, params) # Create neurons and synapses layer_dic = {} for name, model, props in layer_list: # Update input current my_nest.MyLoadModels(model_dict, [model[1]]) if name in I_IN_VIVO.keys(): I_e = my_nest.GetDefaults(model[1])['I_e'] + I_IN_VIVO[name] my_nest.SetDefaults(model[1], {'I_e': I_e}) #! Create layer, retrieve neurons ids per elements and p if model[0] == 'spike_generator': layer = MyLayerPoissonInput(layer_props=props, sd=True, sd_params={ 'start': start_rec, 'stop': sim_time }) else: layer = MyLayerGroup(layer_props=props, sd=True, mm=False, mm_dt=0.1, sd_params={ 'start': start_rec, 'stop': sim_time }) for iter, id in enumerate(layer[:]): if name == 'GPE' and params_msn_d2[ 'n_mod'] and iter < params['neurons']['GPE']['paused']: scg = my_nest.Create('step_current_generator', n=1) rec = my_nest.GetStatus([id])[0]['receptor_types'] my_nest.SetStatus( scg, { 'amplitude_times': params_msn_d2['mod_times'], 'amplitude_values': [0., -300., 0.] }) my_nest.Connect(scg, [id], params={'receptor_type': rec['CURR']}) I_e = my_nest.GetDefaults(model[1])['I_e'] if I_E_VARIATION[name]: I = numpy.random.normal(I_e, I_E_VARIATION[name]) else: I = I_e #I=I_e my_nest.SetStatus([id], {'I_e': I}) layer_dic[name] = layer mm = nest.Create('multimeter', 1) recodables = ['V_m', 'I', 'g_AMPA', 'g_NMDA', 'g_GABAA_1', 'g_GABAA_2'] my_nest.SetStatus(mm, {'interval': 0.1, 'record_from': recodables}) my_nest.Connect(mm, [layer_dic['STN'].ids[0]]) # Connect populations for conn in connect_list: name = conn[0] + '_' + conn[1] my_nest.MyLoadModels(model_dict, [conn[2]['synapse_model']]) if dis_conn_GPE_STN == 'GPE' and (name in ['GPE_SNR']): r, syn = 32 * 30.0, 'GPE_SNR_gaba_s_ref' if not syn in my_nest.Models(): my_nest.MyLoadModels(model_dict, [syn]) pg = my_nest.Create('poisson_generator', 1, { 'rate': r, 'start': 1. }) my_nest.DivergentConnect(pg, layer_dic[conn[1]].ids, model=syn) elif dis_conn_GPE_STN == 'STN' and (name in ['STN_SNR']): r, syn = 30 * 10.0, 'STN_SNR_ampa_s' if not syn in my_nest.Models(): my_nest.MyLoadModels(model_dict, [syn]) pg = my_nest.Create('poisson_generator', 1, { 'rate': r, 'start': 1. }) my_nest.DivergentConnect(pg, layer_dic[conn[1]].ids, model=syn) else: name = name + '_' + conn[3] tp.ConnectLayers(layer_dic[conn[0]].layer_id, layer_dic[conn[1]].layer_id, conn[2]) layer_dic[conn[1]].add_connection(source=layer_dic[conn[0]], type=conn[3], props=conn[2]) # Sort MSN D2 such that the closest to center is first in ids list. # Do this to we can get focused inhibition in GPe if params_msn_d2['focus']: MSN_D2_idx = layer_dic['MSN_D2'].sort_ids() else: MSN_D2_idx = range(len(numpy.array(layer_dic['MSN_D2'].ids))) n_mod_msn_d1 = params_msn_d1['n_mod'] n_mod_msn_d2 = params_msn_d2['n_mod'] MSN_D1_ids = layer_dic['MSN_D1'].ids MSN_D2_ids = layer_dic['MSN_D2'].ids MSN_D1_mod, MSN_D2_mod = [], [] if params_msn_d1['n_mod']: MSN_D1_mod = MSN_D1_ids[0:n_mod_msn_d1] if params_msn_d2['n_mod']: MSN_D2_mod = MSN_D2_ids[0:n_mod_msn_d2 * params_msn_d2['skip']:params_msn_d2['skip']] MSN_D1_base = list(set(MSN_D1_ids).difference(MSN_D1_mod)) MSN_D2_base = list(set(MSN_D2_ids).difference(MSN_D2_mod)) #layer_dic['MSN_D1'].ids[0:n_base_msn_d1] #MSN_D2_ids=numpy.array(layer_dic['MSN_D2'].ids) #MSN_D2_base=MSN_D2_ids#[MSN_D2_idx[0:n_base_msn_d1]] #set().difference(t) layer_dic['MSN_D1'].set_spike_times(params_msn_d1['base_rates'], params_msn_d1['base_times'], sim_time, ids=MSN_D1_base) layer_dic['MSN_D2'].set_spike_times(params_msn_d2['base_rates'], params_msn_d2['base_times'], sim_time, ids=MSN_D2_base) if params_msn_d1['n_mod']: layer_dic['MSN_D1'].set_spike_times(params_msn_d1['mod_rates'], params_msn_d1['mod_times'], sim_time, ids=MSN_D1_mod) if params_msn_d2['n_mod']: layer_dic['MSN_D2'].set_spike_times(params_msn_d2['mod_rates'], params_msn_d2['mod_times'], sim_time, ids=MSN_D2_mod) STN_CTX_input_base = my_nest.Create('poisson_generator', params={ 'rate': BASE_RATE_CTX_STN, 'start': 0., 'stop': sim_time }) my_nest.MyLoadModels(model_dict, ['CTX_STN_ampa_s']) my_nest.DivergentConnect(STN_CTX_input_base, layer_dic['STN'].ids, model='CTX_STN_ampa_s') if params_stn['mod']: STN_CTX_input_mod = my_nest.Create('poisson_generator', params={ 'rate': params_stn['mod_rate'], 'start': params_stn['mod_times'][0], 'stop': params_stn['mod_times'][1] }) my_nest.DivergentConnect(STN_CTX_input_mod, layer_dic['STN'].ids, model='CTX_STN_ampa_s') #tar=[] #for id in layer_dic['MSN_D1'].ids: # tar.extend(sorted(nest.GetStatus(my_nest.FindConnections([id]),'target'))[:-1]) #pylab.subplot(211).hist(tar, 1500) # # tar=[] # for id in layer_dic['MSN_D2'].ids: # tar.extend(sorted(nest.GetStatus(my_nest.FindConnections([id]),'target'))[1:]) # # pylab.subplot(212).hist(tar, 1500) #pylab.show() # # my_nest.MySimulate(sim_time) if params_msn_d1['n_mod']: layer_dic['MSN_D1'].id_mod = MSN_D1_mod if params_msn_d2['n_mod']: layer_dic['MSN_D2'].id_mod = MSN_D2_mod #layer_dic['MSN_D1'].get_signal( 's', start=start_rec, stop=sim_time ) #layer_dic['MSN_D2'].get_signal( 's', start=start_rec, stop=sim_time ) #layer_dic['GPE'].get_signal( 's', start=start_rec, stop=sim_time ) #layer_dic['SNR'].get_signal( 's', start=start_rec, stop=sim_time ) #layer_dic['STN'].get_signal( 's', start=start_rec, stop=sim_time ) st_mm = my_nest.GetStatus(mm)[0] pylab.plot(st_mm['events']['g_AMPA']) pylab.plot(st_mm['events']['g_GABAA_1']) pylab.plot(st_mm['events']['g_NMDA']) pylab.plot(st_mm['events']['g_GABAA_2']) m_ampa = numpy.mean(st_mm['events']['g_AMPA']) m_gaba = numpy.mean(st_mm['events']['g_GABAA_1']) pylab.title("{0} m_ampa:{1:2.1f} m_gaba:{2:2.1f}".format( my_nest.version(), m_ampa, m_gaba)) pylab.show() return layer_dic
def simulate_recovery(revoceryTimes, load=True): # Path were raw data is saved. For example the spike trains. save_result_at=OUTPUT_PATH+'/simulate_recovery.pkl' save_header_at=OUTPUT_PATH+'/simulate_recovery_header' relativeRecovery=[] n=len(revoceryTimes) if not load: for syn in SYNAPSE_MODELS: my_nest.ResetKernel() model_list, model_dict=models() my_nest.MyLoadModels( model_list, NEURON_MODELS ) my_nest.MyLoadModels( model_list, [syn]) ss=my_nest.GetDefaults(syn) synapticEficacy = ss['weight']*ss['U'] SNR = MyGroup( NEURON_MODELS[0], n, mm=True, mm_dt = .1, params={'I_e':-150.}, record_from=['g_AMPA']) tSim=10000 spikeTimes=[] for rt in revoceryTimes: #spikeTimes.append(numpy.array([1.,11.,21.,31.,41.,41+rt])) # Choosen so that it starts at a pairpulse ration of 0.2 spikeTimes.append(numpy.array([1.,11.,21.,31.,41., 51.,61.,71.,81.,91., 101.,111.,121.,131.,141., 151.,161.,171.,181.,191., 191+rt])) for target, st in zip(SNR, spikeTimes ) : source = my_nest.Create('spike_generator', params={'spike_times':st} ) my_nest.SetDefaults(syn, params={'delay':1.}) my_nest.Connect(source, [target], model=syn) my_nest.MySimulate(tSim) SNR.get_signal( 'g','g_AMPA', stop=tSim ) # retrieve signal signal=SNR.signals['g_AMPA'] tmpSteadyState=[] for i, st in enumerate(spikeTimes, start=1): if SNR.mm_dt==0.1: indecies=numpy.int64(numpy.ceil(st*10))+9 elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st)) values=signal[i].signal[indecies]-signal[i].signal[indecies-1] tmpSteadyState.append(values[-1]/synapticEficacy) #tmpSteadyState.append(max(values)/synapticEficacy) relativeRecovery.append(tmpSteadyState) relativeRecovery=numpy.array(relativeRecovery) header=HEADER_SIMULATION_SETUP misc.text_save(header, save_header_at) misc.pickle_save([revoceryTimes, relativeRecovery], save_result_at) elif load: revoceryTimes, relativeRecovery=misc.pickle_load(save_result_at) return revoceryTimes, relativeRecovery
def simulate_steady_state_freq(frequencies, flag='ss', load=True): # Path were raw data is saved. For example the spike trains. save_result_at=OUTPUT_PATH+'/simulate_steady_state_freq.pkl' save_header_at=OUTPUT_PATH+'/simulate_steady_state_freq_header' relativeFacilitation=[] n=len(frequencies) if not load: for syn in SYNAPSE_MODELS: my_nest.ResetKernel() model_list, model_dict=models() my_nest.MyLoadModels( model_list, NEURON_MODELS ) my_nest.MyLoadModels( model_list, [syn]) SNR = MyGroup( NEURON_MODELS[0], n, mm=True, mm_dt = .1, params={'I_e':-150.}, record_from=['g_AMPA'] ) tSim=5*1000/frequencies[0] spikeTimes=[] tmpSteadyState=[] for f in frequencies : isi = 1000./f spikeTimes.append(numpy.arange(1,tSim,isi)) for target, st in zip(SNR, spikeTimes ) : source = my_nest.Create('spike_generator', params={'spike_times':st} ) my_nest.SetDefaults(syn, params={'delay':1.}) my_nest.Connect(source, [target], model=syn) my_nest.MySimulate(tSim) SNR.get_signal( 'g','g_AMPA', stop=tSim ) # retrieve signal signal=SNR.signals['g_AMPA'] for i, st in enumerate(spikeTimes, start=1): if SNR.mm_dt==0.1: indecies=numpy.int64(numpy.ceil(st*10))+9 elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st)) values=signal[i].signal[indecies]-signal[i].signal[indecies-1] ss=my_nest.GetDefaults(syn) synapticEficacy = ss['weight']*ss['U'] if flag=='ss': tmpSteadyState.append(values[-1]/synapticEficacy) if flag=='max': tmpSteadyState.append(max(values)/synapticEficacy) relativeFacilitation.append(tmpSteadyState) relativeFacilitation=numpy.array(relativeFacilitation) header=HEADER_SIMULATION_SETUP misc.text_save(header, save_header_at) misc.pickle_save([frequencies, relativeFacilitation], save_result_at) elif load: frequencies, relativeFacilitation=misc.pickle_load(save_result_at) return frequencies, relativeFacilitation