Exemplo n.º 1
0
def fmin(load, save_at):
    
  
    x0=[188, 0.08]  #[current, w_GPE_STN] 20 Hz
  
  
  #29.720625
  #0.011041875

#     x0=[290, 0.119] #25
#     x0=[430,0.18] #30 Hz
#     x=[540, 0.215] #35 Hz
#     x0=[702, 0.28] #40 Hz
#     x0=[830., 0.336] #45 Hz
#     x0=[876.7, 0.349]  #[current, w_GPE_STN] 46 Hz
#     x0=[1000.8, 0.3957] # 50 Hz]
#     x0=[1159., 0.458] # 55 Hz]    
#     x0=[1159.+2.5*5*29.7, 0.458+2.5*5*0.01104] # 80 Hz] 
    x0=[2102, 0.794] # 80 Hz] 
#     z=[1161, 454] #
    if not load:
        [xopt,fopt, iter, funcalls , warnflag, allvecs] = opt.fmin(error_fun, 
                                                                   x0, 
                                                                   args=([sim_time]), 
                                                                   maxiter=20, 
                                                                   maxfun=20, 
                                                                   full_output=1, retall=1)

        misc.pickle_save([xopt,fopt, iter, funcalls , warnflag, allvecs], save_at)
    else:
        [xopt,fopt, iter, funcalls , warnflag, allvecs]=misc.pickle_load(save_at)        
    return xopt  
def simulate_diff_3x_freq(hzs=[10,20, 30],n_msns=[40/2, 20/2, 13/2], res=20, load=True):

    save_result_at=OUTPUT_PATH+'/simulate_diff_3x_freq.pkl'
    n_exp=100
    if not load:  
        spk_mean=[]
        for hz, n_msn in zip(hzs,n_msns):
            sim_time= SEL_ONSET+1000
            params_msn={'base_rates':[MSN_BASE_RATE], 'base_times':[1], 'mod_rates': [MSN_BASE_RATE, hz, MSN_BASE_RATE],
                        'mod_times':[1,SEL_ONSET, SEL_ONSET+500], 'n_tot':N_MSN, 'n_mod':n_msn}
            params_gpe={'base_rates':[GPE_BASE_RATE], 'base_times':[1], 'n_tot':N_GPE, 'n_mod':0}
            params_stn={'base_rates':[STN_BASE_RATE], 'base_times':[1], 'n_tot':N_STN, 'n_mod':0}
            synapse_models={'MSN':'MSN_SNR_gaba_p1', 'GPE':'GPE_SNR_gaba_p',
                            'STN':'STN_SNR_ampa_s'}
            
            #times, spk_binned =_simulate_model([params_msn, params_gpe,params_gpe, 'SNR_izh', 
            #                                   synapse_models, sim_time, 0])
            
            t=ttime.time()
        
            times, spk_binned =simulate_model(params_msn, params_gpe, params_stn, 'SNR_izh', 
                                              synapse_models, sim_time, res, n_exp=n_exp,threads=4)
            
            print 'Time:',ttime.time()-t
            spk_mean.append(numpy.mean(spk_binned,axis=0))
        spk_mean=numpy.array(spk_mean)*1000/res       
        misc.pickle_save([times, spk_mean], save_result_at)        
    else:        
        times, spk_mean = misc.pickle_load(save_result_at)   
    return times, spk_mean
def simulate_filterting_burst(load, save_at, interval, N_MSN, params_msn_d1,
                              params_msn_d2, models_msn, sim_time, start_re):

    N_MSN_syn_on_SNR = 500.
    max_base_rate = 0.9
    max_syn_events_burst = 1000  # Max for burst, then need to add on the contribution of
    # background, See at return statement.
    msn_burst_rate = 20.
    msn_base_rate = 0.1

    n_max_bursting = (max_syn_events_burst) / msn_burst_rate - 1

    n_burst_per_SNR = numpy.arange(1, n_max_bursting, 2)
    prop_mod = n_burst_per_SNR / N_MSN_syn_on_SNR
    mod_const_syn = prop_mod * N_MSN

    mod = prop_mod * float(N_MSN)
    mod = numpy.array([int(m) for m in mod])

    freq = numpy.ones(len(mod)) * msn_burst_rate
    syn_events_burst = numpy.array(mod * freq +
                                   (N_MSN - mod) * 0.1) * 500. / N_MSN

    model_params = {
        'misc': {
            'N_MSN': N_MSN
        },
        'conns': {
            'MSN_D2_GPE': {
                'lines': False
            }
        },
        'neurons': {
            'MSN_D1': {
                'n': N_MSN
            },
            'MSN_D2': {
                'n': N_MSN
            }
        }
    }
    seed = range(len(mod))
    base_rates = numpy.ones(len(mod)) * 0.1

    if not load:
        mr = simulate_filterting_burst_fun(mod, freq, base_rates,
                                           params_msn_d1, params_msn_d2,
                                           params_stn, synapse_models,
                                           sim_time, seed, {}, threads,
                                           start_rec, model_params)
        misc.pickle_save(mr, save_at)
    else:
        mr = misc.pickle_load(save_at)

    mrb = numpy.mean(mr[:, interval[0]:interval[1]], axis=1)
    syn_ev_b = numpy.array(mod * freq + (N_MSN - mod) * 0.1) * 500. / N_MSN
    data = numpy.array([syn_ev_b, mrb])

    return data
def simulate(load,
             save_at,
             n_exp,
             res,
             params_msn_d1,
             params_msn_d2,
             params_stn,
             synapse_models,
             sim_time,
             seed,
             start_rec,
             I_e_add,
             threads,
             model_params,
             dis_conn_GPE_STN=False):

    if not load:
        r = []

        for i in range(n_exp):
            seed = i
            p = numpy.random.randint(1)
            start_rec += p
            sim_time += p
            layer_dic = simulate_network(params_msn_d1,
                                         params_msn_d2,
                                         params_stn,
                                         synapse_models,
                                         sim_time,
                                         seed,
                                         I_e_add,
                                         threads,
                                         start_rec,
                                         model_params,
                                         dis_conn_GPE_STN=dis_conn_GPE_STN)

            layer_dic['SNR'].get_signal('s', start=start_rec, stop=sim_time)
            signal = layer_dic['SNR'].signals['spikes']
            r.append(
                numpy.mean(signal.spike_histogram(time_bin=1, normalized=True),
                           axis=0))

        numpy.array(r)
        misc.pickle_save(r, save_at)
    else:
        r = misc.pickle_load(save_at)
    r = numpy.array(r)
    r = misc.convolve(r, res, 'triangle', single=False)
    mr = numpy.mean(r, axis=0)
    mstd = numpy.std(r, axis=0)
    d = [mr, mstd]
    return d
def my_fmin(load, save_at, x0, interval, syn, N_MSN, burst_rate):

    x = x0
    if not load:

        x, e, i, allvecs = my_opt(x,
                                  sim_time,
                                  interval,
                                  syn,
                                  burst_rate,
                                  N_MSN,
                                  maxiter=10)
        misc.pickle_save([x, e, i, allvecs], save_at)
    else:
        [x, e, i, allvecs] = misc.pickle_load(save_at)
    return x, e
Exemplo n.º 6
0
def fmin(load, save_at, x0, n_exp, n_gpe, res1_stn_rate_ch):
    sim_time = 10000

    if not load:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = opt.fmin(error_fun,
                             x0,
                             args=([sim_time, n_exp, n_gpe, res1_stn_rate_ch]),
                             maxiter=10,
                             maxfun=10,
                             full_output=1,
                             retall=1)

        misc.pickle_save([xopt, fopt, iter, funcalls, warnflag, allvecs],
                         save_at)
    else:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = misc.pickle_load(save_at)
    return xopt, fopt
def simulate_filtering(load, save_at, N_MSN, params_msn_d1, params_msn_d2,
                       models_msn, sim_time, start_rec):

    freq_filter = numpy.linspace(0.1, 2.6, 10)
    seed = range(len(freq_filter))

    model_params = {
        'misc': {
            'N_MSN': N_MSN
        },
        'neurons': {
            'MSN_D1': {
                'n': N_MSN
            },
            'MSN_D2': {
                'n': N_MSN
            }
        }
    }

    if not load:
        mr = []

        for syn in models_msn:
            synapse_models = [syn, 'GPE_SNR_gaba_p']
            mr.append(
                simulate_filtering_fun(freq_filter, params_msn_d1,
                                       params_msn_d2, params_stn,
                                       synapse_models, sim_time, seed, {},
                                       threads, start_rec, model_params))

        mr = numpy.array(mr)
        misc.pickle_save(mr, save_at)

    else:
        mr = misc.pickle_load(save_at)

    syn_ev = freq_filter * N_MSN * 500. / N_MSN

    # Row one in mr is s min, then s max and finally the plastic synapse
    data = numpy.array([syn_ev, freq_filter, mr[0, :], mr[1, :], mr[2, :]])
    return data
def fmin(load, save_at, x0, interval, syn, N_MSN, burst_rate):

    #[current, w_GPE_STN]
    args = (sim_time, interval, syn, burst_rate, N_MSN)
    if not load:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = opt.fmin(error_fun,
                             x0,
                             args=args,
                             maxiter=20,
                             maxfun=10,
                             full_output=1,
                             retall=1)

        misc.pickle_save([xopt, fopt, iter, funcalls, warnflag, allvecs],
                         save_at)
    else:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = misc.pickle_load(save_at)
    return xopt, fopt
Exemplo n.º 9
0
def fmin(load, save_at):
    sim_time = 10000

    x0 = [42, 1.3, 0.35]  #[50, 0.9, 0.2]  #[current, w_GPE_GPE, w_STN_GPE]
    if not load:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = opt.fmin(error_fun,
                             x0,
                             args=([sim_time]),
                             maxiter=50,
                             maxfun=50,
                             full_output=1,
                             retall=1)

        misc.pickle_save([xopt, fopt, iter, funcalls, warnflag, allvecs],
                         save_at)
    else:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = misc.pickle_load(save_at)
    return xopt
def simulate_signal_rates(load=True, hzs=[1, 2]):

    # Path were raw data is saved. For example the spike trains.
    save_result_at = DP['OUTPUT_PATH'] + '/simulate_signal_rates.pkl'
    save_header_at = DP['OUTPUT_PATH'] + '/simulate_signal_rates_header'

    rates = []
    if not load:
        for hz in hzs:
            rates.append(simulate_get_rates(msn_burst_rate=hz, load=load))

        rates = numpy.array(rates)

        header = HEADER_SIMULATION_SETUP
        misc.text_save(header, save_header_at)
        misc.pickle_save(rates, save_result_at)

    else:
        rates = misc.pickle_load(save_result_at)

    return rates
def simulate_1500_eval(load, save_at, n_exp, params_msn_d1, params_msn_d2,
                       params_stn, synapse_models, sim_time, seed, I_e_add,
                       threads, start_rec):

    rates = []
    if not load:
        for i in range(n_exp):
            seed = i
            layer_dic, r = simulate_1500(params_msn_d1, params_msn_d2,
                                         params_stn, synapse_models, sim_time,
                                         seed, I_e_add, threads, start_rec)
            rates.append(r)

        rates = numpy.array(rates)

        misc.pickle_save(rates, save_at)
    else:
        rates = misc.pickle_load(save_at)

    mr = numpy.mean(rates, axis=0)
    stdr = numpy.std(rates, axis=0)

    return mr, stdr
def fmin(load, save_at, x0, n_exp, r_target, params_msn_d1, params_msn_d2,
         params_stn, sim_time, I_e_add, threads, start_rec, model_params,
         p_weights):

    #[current, w_GPE_STN]
    args = (n_exp, r_target, params_msn_d1, params_msn_d2, params_stn,
            sim_time, I_e_add, threads, start_rec, model_params, p_weights)
    if not load:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = opt.fmin(error_fun,
                             x0,
                             args=args,
                             maxiter=20,
                             maxfun=20,
                             full_output=1,
                             retall=1)

        misc.pickle_save([xopt, fopt, iter, funcalls, warnflag, allvecs],
                         save_at)
    else:
        [xopt, fopt, iter, funcalls, warnflag,
         allvecs] = misc.pickle_load(save_at)
    return xopt, fopt
Exemplo n.º 13
0
def simulate_rate_first_and_second_bursts_full(load=True):
    global OUTPUT_PATH

    save_result_at = OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_full.pkl'
    save_header_at = OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_full_header'

    # Range
    transient_stops = numpy.arange(100, 3200, 500)

    #hzs=[8,20]
    if not load:
        data = {}

        data['rates'] = []

        for stop in transient_stops:

            mean_rates, mean_rates_std, info_string = simulate_rate_first_and_second_bursts(
                selection_intervals=[0.0, 500.0, 500. + stop, 1000. + stop],
                load=False)
            data['rates'].append(mean_rates[0])

        s = '\n'
        s = s + 'simulate_rate_first_and_second_bursts_full\n'
        s = s + ' %s %5s %s \n' % ('Transient stops', str(transient_stops[0]) +
                                   '-' + str(transient_stops[-1]), 'ms')

        header = s
        misc.text_save(header, save_header_at)
        misc.pickle_save([data, s], save_result_at)
        info_string = s
    elif load:
        data, info_string = misc.pickle_load(save_result_at)

    data['rates'] = numpy.array(data['rates'])

    return transient_stops, data, info_string
Exemplo n.º 14
0
def simulate_example(load=True):

    global SNR_INJECTED_CURRENT
    global NEURON_MODELS
    global N_GPE
    global N_MSN_BURST
    global N_MSN
    global GPE_BASE_RATE
    global FILE_NAME
    global OUTPUT_PATH
    global SYNAPSE_MODELS_TESTED
    global SEL_ONSET

    #n_exp =200 # number of experiments
    n_exp = 20  # number of experiments

    # Path were raw data is saved. For example the spike trains.
    save_result_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_example.pkl'
    save_header_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_example_header'

    burst_time = 500.
    sim_time = burst_time + SEL_ONSET + 1000.

    MODEL_LIST = models()
    my_nest.ResetKernel()
    my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND)

    SNR_list = []  # List with SNR groups for synapse.
    if not load:
        MSN_base = MyPoissonInput(n=N_MSN_BASE * n_exp, sd=True)
        MSN_burst = MyPoissonInput(n=N_MSN_BURST * n_exp, sd=True)
        GPE = MyPoissonInput(n=N_GPE * n_exp, sd=True)

        # Set spike times MSN and GPe
        # Non bursting MSNs

        for id in MSN_base[:]:
            seed = numpy.random.random_integers(0, 1000000.0)
            MSN_base.set_spike_times(id=id,
                                     rates=[MSN_BASE_RATE],
                                     times=[1],
                                     t_stop=sim_time,
                                     seed=seed)

        # Background GPe
        for id in GPE[:]:
            seed = numpy.random.random_integers(0, 1000000.0)
            GPE.set_spike_times(id=id,
                                rates=[GPE_BASE_RATE],
                                times=[1],
                                t_stop=sim_time,
                                seed=seed)

        # Bursting MSNs
        for id in MSN_burst[:]:
            rates = [MSN_BASE_RATE, MSN_BURST_RATE, MSN_BASE_RATE]
            times = [1, SEL_ONSET, burst_time + SEL_ONSET]
            t_stop = sim_time
            seed = numpy.random.random_integers(0, 1000000.0)

            MSN_burst.set_spike_times(id=id,
                                      rates=rates,
                                      times=times,
                                      t_stop=t_stop,
                                      seed=seed)

        for i_syn in range(len(SYNAPSE_MODELS_TESTED)):

            params = []
            I_e = my_nest.GetDefaults(
                NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
            for i in range(n_exp):
                #params.append({'I_e':numpy.random.normal(I_e,
                #                                         0.1*I_e)})
                params.append({'I_e': I_e})

            #{'I_e':SNR_INJECTED_CURRENT}
            SNR = MyGroup(NEURON_MODELS[0],
                          n=n_exp,
                          sd=True,
                          params=params,
                          mm_dt=.1,
                          record_from=[''])

            SNR_list.append(SNR)

        # Connect, experiment specific
        sources_MSN_SNR_base = numpy.arange(0, n_exp * N_MSN_BASE)
        sources_MSN_SNR_burst = numpy.arange(0, n_exp * N_MSN_BURST)

        targets_MSN_SNR_base = numpy.mgrid[0:n_exp, 0:N_MSN_BASE][0].reshape(
            1, N_MSN_BASE * n_exp)[0]
        targets_MSN_SNR_burst = numpy.mgrid[0:n_exp, 0:N_MSN_BURST][0].reshape(
            1, N_MSN_BURST * n_exp)[0]

        sources_GPE_SNR = numpy.arange(0, n_exp * N_GPE)
        targets_GPE_SNR = numpy.mgrid[0:n_exp,
                                      0:N_GPE][0].reshape(1, N_GPE * n_exp)[0]

        for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
            syn = SYNAPSE_MODELS_TESTED[i_syn]
            SNR = SNR_list[i_syn]
            my_nest.Connect(MSN_base[sources_MSN_SNR_base],
                            SNR[targets_MSN_SNR_base],
                            model=syn)
            my_nest.Connect(MSN_burst[sources_MSN_SNR_burst],
                            SNR[targets_MSN_SNR_burst],
                            model=syn)
            my_nest.Connect(GPE[sources_GPE_SNR],
                            SNR[targets_GPE_SNR],
                            model=SYNAPSE_MODELS_BACKGROUND[0])

        my_nest.MySimulate(sim_time)

        MSN_base.get_signal('s', start=0, stop=sim_time)
        MSN_burst.get_signal('s', start=0, stop=sim_time)

        for SNR in SNR_list:
            SNR.get_signal('s', start=0, stop=sim_time)

        # Get firing rates of MSNs
        MSN_firing_rates = []

        MSN_all = copy.deepcopy(MSN_base)
        MSN_all.merge(MSN_burst)

        time_bin = 20.
        groups = [MSN_base, MSN_burst, MSN_all]
        for group in groups:
            timeAxis, firingRates = group.signals['spikes'].my_firing_rate(
                bin=time_bin, display=False)
            MSN_firing_rates.append([timeAxis, firingRates])

        # Pick out spikes for burst, base and all to use in scatter plot
        MSN_spikes_and_ids = []

        g1 = MSN_burst.slice(MSN_burst[0:N_MSN_BURST])
        g2 = MSN_base.slice(MSN_base[0:N_MSN_BASE])

        ids_MSN_burst = range(450, 450 + N_MSN_BURST)
        ids_MSN_base = [id for id in range(N_MSN) if id not in IDS_MSN_BURST]

        # Rename ids for plotting purpose

        g1_dict = dict([[id1, id2] for id1, id2 in zip(g1.ids, ids_MSN_burst)])
        g2_dict = dict([[id1, id2] for id1, id2 in zip(g2.ids, ids_MSN_base)])

        groups = [g1, g2]
        dics = [g1_dict, g2_dict]
        for group, dic in zip(groups, dics):
            raw_data = group.signals['spikes'].raw_data()
            for i in range(raw_data.shape[0]):
                raw_data[i, 1] = dic[raw_data[i, 1]]
            MSN_spikes_and_ids.append(raw_data)

        #times, binned_data=MSN_base.signals['spikes'].binned_raw_data(0, sim_time, res=1, clip=0)
        #filtered_binned_data=misc.time_resolved_rate(binned_data, 100, kernel_type='triangle', res=1)

        pre_ref_1 = str(SNR_list[0].signals['spikes'].mean_rate(
            SEL_ONSET - 500, SEL_ONSET))
        pre_ref_2 = str(SNR_list[1].signals['spikes'].mean_rate(
            SEL_ONSET - 500, SEL_ONSET))
        pre_dyn = str(SNR_list[2].signals['spikes'].mean_rate(
            SEL_ONSET - 500, SEL_ONSET))

        s = '\n'
        s = s + 'Simulate example:\n'
        s = s + '%s %5s %3s \n' % ('N experiments:', str(n_exp), '#')
        s = s + '%s %5s %3s \n' % ('Bin size MSN hz:', str(time_bin), 'ms')
        s = s + '%s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE), 'Hz')
        s = s + '%s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE),
                                   'Hz')
        s = s + '%s %5s %3s \n' % ('GPe rate:', str(GPE_BASE_RATE), 'Hz')
        s = s + '%s %5s %3s \n' % ('Burst time:', str(burst_time), 'ms')
        s = s + '%s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref_1[0:4], 'Hz')
        s = s + '%s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref_2[0:4], 'Hz')
        s = s + '%s %5s %3s \n' % ('Pre sel rate Dyn:', pre_dyn[0:4], 'Hz')

        header = HEADER_SIMULATION_SETUP + s

        misc.pickle_save([MSN_firing_rates, MSN_spikes_and_ids, SNR_list, s],
                         save_result_at)
        misc.text_save(header, save_header_at)

    else:
        MSN_firing_rates, MSN_spikes_and_ids, SNR_list, s = misc.pickle_load(
            save_result_at)

    return MSN_firing_rates, MSN_spikes_and_ids, SNR_list, s
Exemplo n.º 15
0
            simulate_network(params_msn_d1,
                             params_msn_d2,
                             params_stn,
                             synapse_models,
                             sim_time=sim_time,
                             seed=seed,
                             I_e_add={
                                 'SNR': 300,
                                 'STN': 0,
                                 'GPE': 30
                             },
                             threads=4,
                             start_rec=500.))
    misc.pickle_save(layer_dics, save_result_at)
else:
    layer_dics = misc.pickle_load(save_result_at)

params_msn_d1 = {
    'base_rates': [0.1],
    'base_times': [1],
    'mod_rates': [0.1, 20, 0.1, 20, 0.1],
    'mod_times': [1, 1000, 1000 + 500],
    'n_mod': 60
}
params_msn_d2 = {
    'base_rates': [0.1],
    'base_times': [1],
    'mod_rates': [0.1, 20, 0.1],
    'mod_times': [1, 1000, 1000 + 500],
    'n_mod': 0,
    'focus': False,
Exemplo n.º 16
0
def simulate_sensitivity(load, save_result_at, n_exp, params_msn_d1, params_msn_d2, 
                         params_stn, synapse_models,model_params, sim_time, 
                         start_rec):
    
    p_weights=numpy.ones(17)
    p_weights_ch_names=[r'$g^{CTX-STN}_{0}$','$g^{GPE-STN}_{0}$','$g^{MSN_{D2}-GPe}_0$',
                         '$g^{STN-GPe}_{0}$','$g^{GPe-GPe}_{0}$','$g^{MSN_{D1}-SNr}_0$',
                         '$g^{STN-SNr}_{0}$','$g^{GPe-SNr}_{0}$']
    
    #p_weights_ch_names=['$g^{STN-SNr}_{0}$','$g^{GPe-SNr}_{0}$']
    p_weights_ch=[6, 8, 9, 10, 12, 13, 14, 16]
    #p_weights_ch=[14,16]
    p_conn=numpy.ones(7)
    p_conn_names=[r'$N_{MSN_{D1}-SNr}$', r'$N_{MSN_{D2}-GPe}$', r'$N_{STN-GPe}$',
                   r'$N_{GPe-GPe}$', r'$N_{GPe-STN}$', r'$N_{STN-SNr}$', 
                   r'$N_{GPe-SNr}$']
    #p_conn_names=[r'$N_{STN-SNr}$',  r'$N_{GPe-SNr}$']
    p_conn_ch=[0, 1, 2, 3, 4 ,5, 6]
    
    
    p_I_e=numpy.ones(3)
    #p_I_e_names=['$I_{In vivo}^{SNr}$', '$I_{In vivo}^{GPe}$', '$I_{In vivo}^{STN}$']
    #p_I_e_ch=[0,1,2]
    
    d=[]
    prop_ch=0.2 # Change each parameter up and down 0.2
    seed=0
    save_result_at_tmp=save_result_at+'weights'
    if not load[0]:
        dd=[]
        seed=2
        mr, mstd=simulate_sensitivity_fun(n_exp, params_msn_d1, params_msn_d2, params_stn,
                              synapse_models, sim_time, seed,
                              {}, threads, 
                              start_rec, model_params, p_weights, p_conn, p_I_e)
        dd=[[-1,0,mr[0],mr[1],mr[2], mstd[0],mstd[1],mstd[2], 0]]
        
        for p in p_weights_ch:
            for prop in [1+prop_ch,1-prop_ch]:
               seed+=1
                
               p_weights_tmp=copy.deepcopy(p_weights)
               p_weights_tmp[p]*=prop
               mr, mstd=simulate_sensitivity_fun(n_exp,params_msn_d1, params_msn_d2, params_stn,
                              synapse_models, sim_time, seed,
                              {}, threads, 
                              start_rec,model_params, p_weights_tmp, p_conn, p_I_e)
               
               dd.append([ 0,0, mr[0], mr[1], mr[2], mstd[0], mstd[1], mstd[2], 0])
        misc.pickle_save(dd,save_result_at_tmp)   
    else:
        dd=misc.pickle_load(save_result_at_tmp)   
    d.extend(dd)
    save_result_at_tmp=save_result_at+'conn'
    if not load[1]:    
        dd=[]
        for p in p_conn_ch:
            for prop in [1+prop_ch,1-prop_ch]:
               seed+=1
               p_conn_tmp=copy.deepcopy(p_conn)
               p_conn_tmp[p]*=prop
               mr, mstd=simulate_sensitivity_fun(n_exp,params_msn_d1, params_msn_d2, params_stn,
                              synapse_models, sim_time, seed,
                              {}, threads, 
                              start_rec, model_params, p_weights, p_conn_tmp, p_I_e)
               
               dd.append([1,0,mr[0],mr[1],mr[2], mstd[0],mstd[1],mstd[2], 0])
            
        misc.pickle_save(dd, save_result_at_tmp)   
    else:
        dd=misc.pickle_load(save_result_at_tmp)
    d.extend(dd)
#    save_result_at_tmp=save_result_at+'I_e'
#    if not load[2]:    
#        d=[]
#        for p in p_I_e_ch:
#            for prop in [1+prop_ch,1-prop_ch]:
#               seed+=1
#               p_I_e_tmp=copy.deepcopy(p_I_e)
#               p_I_e_tmp[p]*=prop
#               
#               mr, mstd=simulate_sensitivity_fun(n_exp,params_msn_d1, params_msn_d2, params_stn,
#                                                      synapse_models, sim_time, seed,
#                                                      {}, threads, start_rec, model_params, 
#                                                      p_weights, p_conn, p_I_e_tmp)
#               
#               d.append([2,0,mr[0],mr[1],mr[2], mstd[0],mstd[1],mstd[2], 0])
#                
#        misc.pickle_save(d,save_result_at_tmp)     
#    else:
#        d.extend(misc.pickle_load(save_result_at_tmp) ) 
        
    
       
    d=numpy.array(d)
    br=d[0,2:5] 
    bstd=d[0,5:8] 
    
    up=d[1::2,2:5]
    down=d[2::2,2:5]
    upstd=d[1::2,5:8]
    downstd=d[2::2,5:8]    
    dp=numpy.abs(up-down)/2.
    
    
    # get as precent change
    for i in range(up.shape[0]):
        up[i,:]-=br
        up[i,:]/=br
        up[i,:]*=100.
        down[i,:]-=br
        down[i,:]/=br    
        down[i,:]*=100.
        
        upstd[i,:]-=bstd
        upstd[i,:]/=bstd
        upstd[i,:]*=100.
        downstd[i,:]-=bstd
        downstd[i,:]/=bstd    
        downstd[i,:]*=100.
          
    data_rate_change=[[up, upstd],[down, downstd]]      
    return numpy.array(d), data_rate_change, dp, p_weights_ch_names + p_conn_names #+ p_I_e_names
Exemplo n.º 17
0
def simulate_recovery(revoceryTimes, load=True):
    
    # Path were raw data is saved. For example the spike trains.
    save_result_at=OUTPUT_PATH+'/simulate_recovery.pkl'
    save_header_at=OUTPUT_PATH+'/simulate_recovery_header'   
    
    relativeRecovery=[]
    n=len(revoceryTimes)
    if not load:
        for syn in SYNAPSE_MODELS:
            my_nest.ResetKernel()  
            model_list, model_dict=models()     
            my_nest.MyLoadModels( model_list, NEURON_MODELS )
            my_nest.MyLoadModels( model_list, [syn])
            
            ss=my_nest.GetDefaults(syn)       
            synapticEficacy = ss['weight']*ss['U'] 
    
            SNR = MyGroup( NEURON_MODELS[0], n, mm=True, mm_dt = .1, 
                           params={'I_e':-150.}, record_from=['g_AMPA'])
            
            tSim=10000
            spikeTimes=[]
            for rt in revoceryTimes:
                #spikeTimes.append(numpy.array([1.,11.,21.,31.,41.,41+rt]))
                
                # Choosen so that it starts at a pairpulse ration of 0.2 
                spikeTimes.append(numpy.array([1.,11.,21.,31.,41.,
                                               51.,61.,71.,81.,91.,
                                               101.,111.,121.,131.,141.,
                                               151.,161.,171.,181.,191.,
                                               191+rt]))
     
            for target, st in zip(SNR, spikeTimes ) :
       
                source = my_nest.Create('spike_generator', 
                                    params={'spike_times':st} )
                my_nest.SetDefaults(syn, params={'delay':1.})
                my_nest.Connect(source, [target], model=syn)
        
            my_nest.MySimulate(tSim)
            SNR.get_signal( 'g','g_AMPA', stop=tSim ) # retrieve signal
            
            signal=SNR.signals['g_AMPA']
            
            tmpSteadyState=[]
            for i, st in enumerate(spikeTimes, start=1):
                
                if SNR.mm_dt==0.1:  indecies=numpy.int64(numpy.ceil(st*10))+9
                elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st))
                
                values=signal[i].signal[indecies]-signal[i].signal[indecies-1]
                
                tmpSteadyState.append(values[-1]/synapticEficacy)
                #tmpSteadyState.append(max(values)/synapticEficacy)
                
            relativeRecovery.append(tmpSteadyState)
            
        relativeRecovery=numpy.array(relativeRecovery)
        
        
        header=HEADER_SIMULATION_SETUP
        misc.text_save(header, save_header_at)    
        misc.pickle_save([revoceryTimes, relativeRecovery], save_result_at)
        

        
    elif load: 
            revoceryTimes, relativeRecovery=misc.pickle_load(save_result_at)
        
    return revoceryTimes, relativeRecovery
Exemplo n.º 18
0
def simulate_steady_state_freq(frequencies, flag='ss', load=True):
    
     # Path were raw data is saved. For example the spike trains.
    save_result_at=OUTPUT_PATH+'/simulate_steady_state_freq.pkl'
    save_header_at=OUTPUT_PATH+'/simulate_steady_state_freq_header'   
    
    relativeFacilitation=[]
    n=len(frequencies)
    if not load:    
        for syn in SYNAPSE_MODELS:
            my_nest.ResetKernel()   
            model_list, model_dict=models()    
            my_nest.MyLoadModels( model_list, NEURON_MODELS )
            my_nest.MyLoadModels( model_list, [syn])
            
                    
            SNR = MyGroup( NEURON_MODELS[0], n, mm=True, mm_dt = .1, 
                               params={'I_e':-150.},
                           record_from=['g_AMPA'] )

            tSim=5*1000/frequencies[0]  
            spikeTimes=[]    
            tmpSteadyState=[]    
            for f in frequencies :

                isi  = 1000./f
                spikeTimes.append(numpy.arange(1,tSim,isi))
            
            for target, st in zip(SNR, spikeTimes ) :
                    source = my_nest.Create('spike_generator', 
                                        params={'spike_times':st} )
                    my_nest.SetDefaults(syn, params={'delay':1.})
                    my_nest.Connect(source, [target], model=syn)
            
            my_nest.MySimulate(tSim)
                
            SNR.get_signal( 'g','g_AMPA', stop=tSim ) # retrieve signal
                  
            signal=SNR.signals['g_AMPA']
                
                
            for i, st in enumerate(spikeTimes, start=1):
                    
                    if SNR.mm_dt==0.1:  indecies=numpy.int64(numpy.ceil(st*10))+9
                    elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st))
                    
                    values=signal[i].signal[indecies]-signal[i].signal[indecies-1]
                    
                    ss=my_nest.GetDefaults(syn)       
                    synapticEficacy = ss['weight']*ss['U'] 
                    
                    if flag=='ss':  tmpSteadyState.append(values[-1]/synapticEficacy)
                    if flag=='max': tmpSteadyState.append(max(values)/synapticEficacy)
                    
            relativeFacilitation.append(tmpSteadyState)
            
        relativeFacilitation=numpy.array(relativeFacilitation)
        
    
        header=HEADER_SIMULATION_SETUP
        misc.text_save(header, save_header_at)
        misc.pickle_save([frequencies, relativeFacilitation], save_result_at)

        
    elif load: 
            frequencies, relativeFacilitation=misc.pickle_load(save_result_at)
        
    return frequencies, relativeFacilitation
Exemplo n.º 19
0
def simulate_example(hz=0, load=True):
    global SNR_INJECTED_CURRENT
    global NEURON_MODELS
    global N_GPE
    global N_SEL
    global N_MSN
    global N_STN
    global MSN_RATE_BASE
    global STN_BASE_RATE
    global SNAME
    global SPATH
    global SYNAPSE_MODELS
    global SEL_ONSET
    global GPE_BASE_RATE

    #n_exp = 20
    n_exp = 200

    RATE_SELE = hz  # Selection rate
    save_at = SPATH + '/' + NEURON_MODELS[0] + '-example.pkl'

    sim_time = SEL_TIME + SEL_ONSET + 500.
    SNAME_NB = hz + 1000

    experiments = range(n_exp)

    MODEL_LIST = models()
    my_nest.ResetKernel()
    my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND)

    GPE_list = []  # GPE input for each experiment
    for i_exp in experiments:
        GPE = MyPoissonInput(n=N_GPE,
                             sd=True,
                             spath=SPATH,
                             sname_nb=SNAME_NB + i_exp)
        GPE_list.append(GPE)

    MSN_list = []  # MSN input for each experiment
    for i_exp in experiments:
        MSN = MyPoissonInput(n=N_MSN, sd=False)
        MSN_list.append(MSN)

    STN_list = []  # MSN input for each experiment
    for i_exp in experiments:
        STN = MyPoissonInput(n=N_STN, sd=False)
        STN_list.append(STN)

    SNR_list = []  # SNR groups for each synapse
    I_e = my_nest.GetDefaults(NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
    for i_syn in range(len(SYNAPSE_MODELS_TESTED)):
        SNR = MyGroup(NEURON_MODELS[0], n=n_exp, params={'I_e': I_e}, sd=True)
        SNR_list.append(SNR)

    if not load:
        for i_exp in experiments:
            GPE = GPE_list[i_exp]
            MSN = MSN_list[i_exp]
            STN = STN_list[i_exp]

            # Set spike times
            # Base rate MSN
            for id in MSN[:]:
                MSN.set_spike_times(id=id,
                                    rates=[MSN_RATE_BASE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))
            # Base rate STN
            for id in STN[:]:
                STN.set_spike_times(id=id,
                                    rates=[STN_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Set spike times
            # Base rate
            for id in GPE[0:N_GPE - N_SEL]:
                GPE.set_spike_times(id=id,
                                    rates=[GPE_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Selection
            for id in GPE[N_GPE - N_SEL:N_GPE]:
                rates = [GPE_BASE_RATE, RATE_SELE, GPE_BASE_RATE]
                times = [1, SEL_ONSET, SEL_TIME + SEL_ONSET]
                t_stop = sim_time
                GPE.set_spike_times(id=id,
                                    rates=rates,
                                    times=times,
                                    t_stop=t_stop,
                                    seed=int(numpy.random.random() * 10000.0))

            # Connect
            for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
                target = SNR_list[i_syn][i_exp]
                my_nest.ConvergentConnect(GPE[:], [target], model=syn)
                my_nest.ConvergentConnect(MSN[:], [target],
                                          model=SYNAPSE_MODELS_BACKGROUND[0])
                my_nest.ConvergentConnect(STN[:], [target],
                                          model=SYNAPSE_MODELS_BACKGROUND[1])

        my_nest.MySimulate(sim_time)

        for GPE in GPE_list:
            GPE.get_signal('s')
        for SNR in SNR_list:
            SNR.get_signal('s')

        misc.pickle_save([GPE_list, SNR_list], save_at)

    elif load:
        GPE_list, SNR_list = misc.pickle_load(save_at)

    pre_ref = str(SNR_list[0].signals['spikes'].mean_rate(
        SEL_ONSET - 5000, SEL_ONSET))
    pre_dyn = str(SNR_list[1].signals['spikes'].mean_rate(
        SEL_ONSET - 500, SEL_ONSET))

    statusSynapes = []
    for syn in SYNAPSE_MODELS_TESTED:
        statusSynapes.append(my_nest.GetDefaults(syn))

    s = '\n'
    s = s + 'Example:\n'
    s = s + ' %s %5s %3s \n' % ('N experiments:', str(len(experiments)), '#')
    s = s + ' %s %5s %3s \n' % ('N GPEs:', str(N_GPE), '#')

    s = s + ' %s %5s %3s \n' % ('Base rate:', str(GPE_BASE_RATE), 'spikes/s')
    s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE), 'spikes/s')
    s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME), 'ms')
    s = s + ' %s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref[0:4], 'spikes/s')
    s = s + ' %s %5s %3s \n' % ('Pre sel rate Dyn:', pre_dyn[0:4], 'spikes/s')
    for ss in statusSynapes:
        s = s + '\n'
        s = s + ' %s %10s\n' % ('Synapse', ss['synapsemodel'])
        s = s + ' %s %5s %3s\n' % ('Weight', str(round(ss['weight'], 1)), 'nS')

    return GPE_list, SNR_list, s
Exemplo n.º 20
0
def simulate_example(hz_1=0., hz_2=100., load=True):
    global I_E
    global NEURON_MODELS
    global N_GPE
    global N_SEL
    global N_MSN
    global N_STN
    global MSN_RATE_BASE
    global STN_RATE_BASE
    global SNAME
    global SPATH
    global SYNAPSE_MODELS
    global SEL_ONSET

    N_EXP = 200

    RATE_BASE = 25  # Base rate
    RATE_SELE_1 = hz_1
    RATE_SELE_2 = hz_2  # Selection rate
    SAVE_AT = SPATH + '/' + NEURON_MODELS[0] + '-example.pkl'
    SEL_TIME_1 = 500.
    SEL_TIME_2 = 200.
    sim_time = SEL_TIME_1 + SEL_TIME_2 + SEL_ONSET + 500.
    SNAME_NB = hz_1 + hz_2 + 1000

    EXPERIMENTS = range(N_EXP)

    MODEL_LIST = models()
    my_nest.ResetKernel()
    my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND)

    GPE_list = []  # GPE input for each experiment
    for i_exp in EXPERIMENTS:
        GPE = MyPoissonInput(n=N_GPE, sd=True)
        GPE_list.append(GPE)

    MSN_list = []  # MSN input for each experiment
    for i_exp in EXPERIMENTS:
        MSN = MyPoissonInput(n=N_MSN, sd=True)
        MSN_list.append(MSN)

    STN_list = []  # MSN input for each experiment
    for i_exp in EXPERIMENTS:
        STN = MyPoissonInput(n=N_STN, sd=True)
        STN_list.append(STN)

    SNR_list = []  # SNR groups for each synapse
    for i_syn in SYNAPSE_MODELS_TESTED:
        I_e = my_nest.GetDefaults(NEURON_MODELS[0])['I_e'] + I_E
        SNR = MyGroup(NEURON_MODELS[0],
                      n=N_EXP,
                      params={'I_e': I_e},
                      sd=True,
                      mm=False,
                      mm_dt=.1,
                      record_from=[''])
        SNR_list.append(SNR)

    if not load:
        for i_exp in EXPERIMENTS:
            GPE = GPE_list[i_exp]
            MSN = MSN_list[i_exp]
            STN = STN_list[i_exp]

            # Set spike times
            # Base rate MSN
            for id in MSN[:]:
                MSN.set_spike_times(id=id,
                                    rates=[MSN_RATE_BASE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Base rate
            for id in GPE[0:N_GPE - N_SEL]:
                GPE.set_spike_times(id=id,
                                    rates=[RATE_BASE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))
            # Base rate STN
            for id in STN[:]:
                STN.set_spike_times(id=id,
                                    rates=[STN_RATE_BASE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Selection
            for id in GPE[N_GPE - N_SEL:N_GPE]:
                rates = [RATE_BASE, RATE_SELE_1, RATE_SELE_2, RATE_BASE]
                times = [
                    1, SEL_ONSET, SEL_ONSET + SEL_TIME_1,
                    SEL_ONSET + SEL_TIME_1 + SEL_TIME_2
                ]
                t_stop = sim_time
                GPE.set_spike_times(id=id,
                                    rates=rates,
                                    times=times,
                                    t_stop=t_stop,
                                    seed=int(numpy.random.random() * 10000.0))

            # Connect
            for i, syn in enumerate(SYNAPSE_MODELS_TESTED):
                target = SNR_list[i][i_exp]
                my_nest.ConvergentConnect(GPE[:], [target], model=syn)
                my_nest.ConvergentConnect(MSN[:], [target],
                                          model=SYNAPSE_MODELS_BACKGROUND[0])
                my_nest.ConvergentConnect(STN[:], [target],
                                          model=SYNAPSE_MODELS_BACKGROUND[1])

        my_nest.MySimulate(sim_time)

        for GPE in GPE_list:
            GPE.get_signal('s')
        for SNR in SNR_list:
            SNR.get_signal('s')

        misc.pickle_save([GPE_list, SNR_list], SAVE_AT)

    elif load:
        GPE_list, SNR_list = misc.pickle_load(SAVE_AT)

    pre_ref = str(SNR_list[0].signals['spikes'].mean_rate(
        SEL_ONSET - 500, SEL_ONSET))
    pre_dyn = str(SNR_list[1].signals['spikes'].mean_rate(
        SEL_ONSET - 500, SEL_ONSET))

    statusSynapes = []
    for syn in SYNAPSE_MODELS_TESTED:
        statusSynapes.append(my_nest.GetDefaults(syn))

    s = '\n'
    s = s + 'Example:\n'
    s = s + ' %s %5s %3s \n' % ('N experiments:', str(len(EXPERIMENTS)), '#')
    s = s + ' %s %5s %3s \n' % ('N GPEs:', str(N_GPE), '#')

    s = s + ' %s %5s %3s \n' % ('Base rate:', str(RATE_BASE), 'spikes/s')
    s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE_1),
                                'spikes/s')
    s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME_1), 'ms')
    s = s + ' %s %5s %3s \n' % ('Selection rate:', str(RATE_SELE_2),
                                'spikes/s')
    s = s + ' %s %5s %3s \n' % ('Selection time:', str(SEL_TIME_2), 'ms')

    s = s + ' %s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref[0:4], 'spikes/s')
    s = s + ' %s %5s %3s \n' % ('Pre sel rate Dyn:', pre_dyn[0:4], 'spikes/s')
    for ss in statusSynapes:
        s = s + '\n'
        s = s + ' %s %10s\n' % ('Synapse', ss['synapsemodel'])
        s = s + ' %s %5s %3s\n' % ('Weight', str(round(ss['weight'], 1)), 'nS')

    return GPE_list, SNR_list, s
Exemplo n.º 21
0
def simulate_selection_vs_neurons_full(selRateInterval,
                                       load_pickle=True,
                                       load_raw=True):
    global OUTPUT_PATH

    save_result_at = OUTPUT_PATH + '/simulate_selection_vs_neurons_full.pkl'
    save_header_at = OUTPUT_PATH + '/simulate_selection_vs_neurons_full_header'

    # Range
    hzs = numpy.arange(7, 49, 1)

    #hzs=[8,20]
    if not load_pickle:
        data = {}

        for syn in range(3):
            data[syn] = {}
            data[syn]['rates_thr'] = [[] for k in range(len(SEL_INTERVALS))]
            data[syn]['rates_std_thr'] = [[]
                                          for k in range(len(SEL_INTERVALS))]

            data[syn]['msn_at_thr'] = [[] for k in range(len(SEL_INTERVALS))]
            data[syn]['n_max_sel'] = [[] for k in range(len(SEL_INTERVALS))]

        n_max_sel = 218
        progress = ''
        i_hz = 0
        for hz in hzs:

            n, rate_data, r_std_data, n_max_sel, s = simulate_selection_vs_neurons(
                SEL_INTERVALS, hz, load_raw, n_max_sel=n_max_sel)
            n_sel_vec = numpy.arange(n_max_sel + 1)

            # Clear n_max_cell
            n_max_sel = 0
            for i_interval in range(len(rate_data)):
                for i_syn in range(len(rate_data[i_interval])):

                    r_syn = rate_data[i_interval][i_syn]
                    r_std_syn = r_std_data[i_interval][i_syn]

                    # Retrieve selection threshold passing

                    r_std_syn_tmp = r_std_syn[r_syn < SELECTION_THR]
                    n_sel_vec_tmp = n_sel_vec[r_syn < SELECTION_THR]
                    r_syn_tmp = r_syn[r_syn < SELECTION_THR]

                    data[i_syn]['rates_thr'][i_interval].append(r_syn_tmp[0])
                    data[i_syn]['rates_std_thr'][i_interval].append(
                        r_std_syn_tmp[0])
                    data[i_syn]['msn_at_thr'][i_interval].append(
                        n_sel_vec_tmp[0])

                    # Find new n_max_sel
                    msn_at_thr = data[i_syn]['msn_at_thr'][i_interval][i_hz]
                    n_max_sel = int(
                        numpy.ceil(max(msn_at_thr * 2.0, n_max_sel)))
                    data[i_syn]['n_max_sel'][i_interval].append(n_max_sel)

            i_hz += 1
            progress += str(hz) + ' hz finished, n_max_sel=' + str(
                n_max_sel) + '\n'
            print progress

        s = '\n'
        s = s + 'simulate_selection_vs_neurons_full\n'
        s = s + ' %s %5s %s \n' % ('Range hz',
                                   str(hzs[0]) + '-' + str(hzs[-1]), '#')

        header = HEADER_SIMULATION_SETUP + s
        misc.text_save(header, save_header_at)
        misc.pickle_save([data, s], save_result_at)
        info_string = s
    elif load_pickle:
        data, info_string = misc.pickle_load(save_result_at)

    return hzs, data, info_string
Exemplo n.º 22
0
ax = ax_list[2]
plot_example_firing_frequency_MSN(ax, MSN_firing_rates)
#plot_thr_rate_vs_std(ax, data, hzs)

ax = ax_list[3]
plot_example_SNR(ax, SNR_list)

ax = ax_list[4]
plot_selection_vs_neurons_full(ax, hzs, data)

# plot_selection_vs_neurons

ax = ax_list[5]
# article_filtering.py has to be run before
data = misc.pickle_load(os.getcwd() + '/output/mean_rates_filtering' +
                        NEURON_MODELS[0])
MSNmeanRates = data['MSN_mean_rates']
SNRmeanRates = data['SNR_mean_rates']
print MSNmeanRates
print SNRmeanRates
syn_events = MSNmeanRates * N_MSN
SNRmeanRates[2, syn_events < LIM_SYN_EVENTS]
plot_SNr_rate_vs_syn_event2(ax, syn_events, SNRmeanRates)

hz = 20
nb, rate_data, r_std_data, n_max_sel, s = simulate_selection_vs_neurons(
    SEL_INTERVALS, msn_burst_rate=hz, load=True)
i = 2
syn_events = nb * hz + (N_MSN - nb) * 0.1
plot_SNr_rate_vs_syn_event1(ax, syn_events, rate_data[i], r_std_data[i])
Exemplo n.º 23
0
def simulate_hyperdirect(load, N_MSN, save_at, threads, flag_bg=False):

    ra = random.random() * 200.
    start_rec = 900 + ra
    delay = 10.0
    params_msn_d1 = {
        'base_rates': [0.1],
        'base_times': [1],
        'mod_rates': [0.1, 5000.0, 0.1],
        'mod_times': [1, 1000. + ra + delay, 1000. + ra + 2. + delay],
        'n_mod': 0.,
        'bg_rate': 0
    }
    params_msn_d2 = {
        'base_rates': [0.1],
        'base_times': [1],
        'mod_rates': [0.1, 0.1, 0.1],
        'mod_times': [1, 1000, 1000 + 500],
        'n_mod': 0,
        'focus': False,
        'skip': 1,
        'bg_rate': 0
    }
    params_stn = {
        'rate': 300.,
        'mod': True,
        'mod_rate': 0.,
        'mod_times': [1000. + ra, 1000. + ra + 2.]
    }

    model_params = {
        'conns': {
            'MSN_D2_GPE': {
                'lines': False
            },
            'STN_GPE': {
                'lesion': True
            },
            'GPE_STN': {
                'lesion': True
            }
        },
        'neurons': {
            'MSN_D1': {
                'n': N_MSN
            },
            'MSN_D2': {
                'n': N_MSN
            }
        }
    }

    synapse_models = ['MSN_SNR_gaba_p1', 'GPE_SNR_gaba_p']

    if flag_bg: save_at = save_at + '_bg'

    sim_time = 1300.

    resolution = 5
    n_exp = 5

    proportions_d1 = numpy.linspace(0.01, 0.1, resolution)  #arange(1,7,1)*150.
    proportions_d2 = numpy.linspace(0.01, 0.1, resolution)  #arange(1,7,1)*150.
    mods_d1 = proportions_d1 * N_MSN
    mods_d2 = proportions_d2 * N_MSN

    raw_r = []
    conv_r = []
    mean_conv_r = []
    std_conv_r = []
    m_d1 = 1000
    m_d2 = 0
    inputs = []
    i = 0
    if not load:
        tmp_rates = []
        for e in range(n_exp):
            seed = i

            rates_SNR, rates_GPE = simulate_network_direct_indirect_onoff_vs_rate(
                m_d1,
                m_d1,
                params_msn_d1,
                params_msn_d2,
                params_stn,
                synapse_models, {
                    'SNR': 280,
                    'STN': 0,
                    'GPE': 20
                },
                sim_time=sim_time,
                seed=seed,
                threads=threads,
                start_rec=start_rec,
                model_params=model_params,
                flag_bg=flag_bg)

            tmp_rates.append(list(rates_SNR))
            raw_r.append(numpy.array(tmp_rates))

            i += 1

        raw_r.append(numpy.array(tmp_rates))

        inputs.append((m_d1, m_d2))

        misc.pickle_save([raw_r, conv_r, mean_conv_r, std_conv_r, inputs],
                         save_at)
    else:
        raw_r, conv_r, mean_conv_r, std_conv_r, inputs = misc.pickle_load(
            save_at)
    conv_r = []
    mean_conv_r = []
    std_conv_r = []

    conv_r, mean_conv_r, std_conv_r = conv_data(raw_r,
                                                conv_r,
                                                mean_conv_r,
                                                std_conv_r,
                                                bin=1,
                                                kernel='triangle')

    #    pylab.plot(mean_conv_r[-1])
    #    pylab.plot(conv_r[-1][0])
    #    pylab.plot(conv_r[-1][1])
    #    pylab.plot(mean_conv_r[-1]-std_conv_r[-1],'--k')
    #    pylab.plot(mean_conv_r[-1]+std_conv_r[-1],'--k')
    #    pylab.show()

    return raw_r, conv_r
Exemplo n.º 24
0
def simulate_direct_vs_indirect(load,
                                N_MSN,
                                burst_rate,
                                save_at,
                                threads,
                                resolution=10,
                                flag_bg=False):

    params_msn_d1 = {
        'base_rates': [0.1],
        'base_times': [1],
        'mod_rates': [0.1, burst_rate, 0.1],
        'mod_times': [1, 1000, 1000 + 500],
        'n_mod': 0,
        'bg_rate': 0
    }
    params_msn_d2 = {
        'base_rates': [0.1],
        'base_times': [1],
        'mod_rates': [0.1, burst_rate, 0.1],
        'mod_times': [1, 1000, 1000 + 500],
        'n_mod': 0,
        'focus': False,
        'skip': 1,
        'bg_rate': 0
    }
    params_stn = {
        'rate': 250.,
        'mod': False,
        'mod_rate': 0.,
        'mod_times': [1000., 1000. + 500.]
    }

    model_params = {
        'conns': {
            'MSN_D2_GPE': {
                'lines': False
            }
        },
        'neurons': {
            'MSN_D1': {
                'n': N_MSN
            },
            'MSN_D2': {
                'n': N_MSN
            }
        }
    }

    synapse_models = ['MSN_SNR_gaba_p1', 'GPE_SNR_gaba_p']

    if flag_bg: save_at = save_at + '_bg'

    sim_time = 2000.

    n_exp = 2

    proportions_d1 = numpy.linspace(0.01, 0.15,
                                    resolution)  #arange(1,7,1)*150.
    proportions_d2 = numpy.linspace(0.01, 0.15,
                                    resolution)  #arange(1,7,1)*150.
    mods_d1 = proportions_d1 * N_MSN
    mods_d2 = proportions_d2 * N_MSN

    raw_r = []
    conv_r = []
    mean_conv_r = []
    std_conv_r = []

    inputs = []
    i = 0
    if not load:
        for m_d1 in mods_d1:
            for m_d2 in mods_d2:
                tmp_rates = []
                for e in range(n_exp):
                    seed = i

                    rates_SNR, rates_GPE = simulate_network_direct_indirect_onoff_vs_rate(
                        m_d1,
                        m_d1,
                        params_msn_d1,
                        params_msn_d2,
                        params_stn,
                        synapse_models, {
                            'SNR': 280,
                            'STN': 0,
                            'GPE': 20
                        },
                        sim_time=sim_time,
                        seed=seed,
                        threads=threads,
                        start_rec=500.,
                        model_params=model_params,
                        flag_bg=flag_bg)

                    tmp_rates.append(list(rates_SNR))

                    i += 1

                raw_r.append(numpy.array(tmp_rates))
                conv_r.append(
                    misc.convolve(raw_r[-1], 100, 'rectangle', single=False))
                mean_conv_r.append(numpy.mean(conv_r[-1], axis=0))
                std_conv_r.append(numpy.std(conv_r[-1], axis=0))
                inputs.append((m_d1, m_d2))

                misc.pickle_save(
                    [raw_r, conv_r, mean_conv_r, std_conv_r, inputs], save_at)
    else:
        raw_r, conv_r, mean_conv_r, std_conv_r, inputs = misc.pickle_load(
            save_at)

    data, data_split, slopes = get_data_and_slopes(inputs, mean_conv_r,
                                                   std_conv_r, resolution,
                                                   N_MSN)

    return data, data_split, slopes, burst_rate, proportions_d1, proportions_d2
Exemplo n.º 25
0
def simulate_indirect(load,
                      N_MSN,
                      save_at,
                      threads,
                      resolution=10,
                      max_prop=1.1,
                      flag_bg=False,
                      skip=1,
                      lines=False):

    params_msn_d1 = {
        'base_rates': [0.1],
        'base_times': [1],
        'mod_rates': [],
        'mod_times': [],
        'n_mod': 0,
        'bg_rate': 0
    }
    params_msn_d2 = {
        'base_rates': [0.1],
        'base_times': [1],
        'mod_rates': [],
        'mod_times': [],
        'n_mod': 0,
        'focus': False,
        'skip': skip,
        'bg_rate': 0
    }
    params_stn = {
        'rate': 250.,
        'mod': False,
        'mod_rate': 0.,
        'mod_times': [1000., 1000. + 500.]
    }

    synapse_models = ['MSN_SNR_gaba_p1', 'GPE_SNR_gaba_p']

    model_params = {
        'misc': {
            'N_MSN': N_MSN
        },
        'conns': {
            'MSN_D2_GPE': {
                'lines': lines
            }
        },
        'neurons': {
            'MSN_D1': {
                'n': N_MSN
            },
            'MSN_D2': {
                'n': N_MSN
            }
        }
    }

    if flag_bg: save_at = save_at + '_bg'

    sim_time = 2000.
    n_exp = 5

    burst_rate = numpy.linspace(15, 50, resolution)
    proportions = numpy.linspace(0.01, max_prop,
                                 resolution)  #arange(1,7,1)*150.
    mods = proportions * N_MSN

    raw_r, conv_r, mean_conv_r, std_conv_r, inputs = [], [], [], [], []
    i = 0
    if not load:
        for m in mods:

            for r in burst_rate:
                tmp_rates = []
                for e in range(n_exp):
                    seed = i
                    rates_SNR, rates_GPE = simulate_indirect_fun(
                        m,
                        r,
                        params_msn_d1,
                        params_msn_d2,
                        params_stn,
                        synapse_models, {
                            'SNR': 280,
                            'STN': 0,
                            'GPE': 20
                        },
                        sim_time=sim_time,
                        seed=seed,
                        threads=threads,
                        start_rec=500.,
                        model_params=model_params,
                        flag_bg=flag_bg,
                        max_mod=max(mods))

                    tmp_rates.append(list(rates_SNR))

                    i += 1

                raw_r.append(numpy.array(tmp_rates))
                conv_r.append(
                    misc.convolve(raw_r[-1], 100, 'rectangle', single=False))
                mean_conv_r.append(numpy.mean(conv_r[-1], axis=0))
                std_conv_r.append(numpy.std(conv_r[-1], axis=0))
                inputs.append((m, r))

                misc.pickle_save(
                    [raw_r, conv_r, mean_conv_r, std_conv_r, inputs], save_at)
    else:
        raw_r, conv_r, mean_conv_r, std_conv_r, inputs = misc.pickle_load(
            save_at)

    data, data_split, slopes = get_data_and_slopes(inputs, mean_conv_r,
                                                   std_conv_r, resolution,
                                                   N_MSN)

    return data, data_split, slopes, burst_rate, proportions
save_result_at = OUTPUT_PATH + '/simulate_target.plk'
p_weights = numpy.ones(17)
n_exp = 5

if 0:
    r_target = []
    for i in range(n_exp):
        seed = i
        r_SNR, r_GPE, r_STN = simulate(params_msn_d1, params_msn_d2,
                                       params_stn, synapse_models, sim_time,
                                       seed, {}, threads, start_rec,
                                       model_params, p_weights)

        r_target.append(r_SNR)
    misc.pickle_save([r_target, r_GPE, r_STN], save_result_at)
else:
    r_target, r_GPE, r_STN = misc.pickle_load(save_result_at)

r_target = numpy.array(r_target)
mr_target = numpy.mean(r_target, axis=0)
# Will find the value that GPE_SNR_ref synapse should be multiplied with
# so that it have the same effect in the network as the plastic at base-level
# network activty
x0 = 1.0
save_at = OUTPUT_PATH + '/simulate_network_fmin' + str(n_exp) + '.plk'
x, e = fmin(0, save_at, x0, n_exp, mr_target, params_msn_d1, params_msn_d2,
            params_stn, sim_time, {}, threads, start_rec, model_params,
            p_weights)

print x, e, r_target, r_GPE, r_STN
Exemplo n.º 27
0
def simulate_selection_vs_neurons(selection_intervals=[0.0, 500.0],
                                  hz=20,
                                  load=True):
    global SNR_INJECTED_CURRENT
    global NEURON_MODELS
    global N_GPE
    global N_MSN_BURST
    global N_MSN
    global GPE_BASE_RATE
    global FILE_NAME
    global OUTPUT_PATH
    global SYNAPSE_MODELS_TESTED
    global SEL_ONSET

    #n_exp=100
    n_exp = 2

    if hz > 7:
        n_max_sel = 60
    if hz > 20:
        n_max_sel = 30
    else:
        n_max_sel = 100

    RATE_BASE = 0.1
    RATE_SELE = hz
    save_result_at = (OUTPUT_PATH + '/' + FILE_NAME +
                      '-simulate_selection_vs_neurons' + str(hz) + '-hz.pkl')
    save_header_at = (OUTPUT_PATH + '/' + FILE_NAME +
                      '-simulate_selection_vs_neurons' + str(hz) +
                      '-hz_header')

    burst_time = 500.
    sim_time = burst_time + SEL_ONSET + 500.

    EXPERIMENTS = range(n_exp)

    MODEL_LIST = models()
    my_nest.ResetKernel()
    my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND)

    MSN_list = []  # MSN input for each experiment
    for i_exp in EXPERIMENTS:
        MSN = MyPoissonInput(n=N_MSN + n_max_sel, sd=True)
        MSN_list.append(MSN)

    GPE_list = []  # GPE input for each experiment
    for i_exp in EXPERIMENTS:
        GPE = MyPoissonInput(n=N_GPE, sd=True)
        GPE_list.append(GPE)

    SNR_list = []  # SNR groups for each synapse and number of selected MSN
    SNR_list_experiments = []
    for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
        SNR = []
        for i_sel in range(n_max_sel + 1):  # Plus one to get no burst point

            I_e = my_nest.GetDefaults(
                NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
            SNR.append(
                MyGroup(NEURON_MODELS[0],
                        n=n_exp,
                        sd=True,
                        params={'I_e': I_e}))

        SNR_list.append(SNR)

    if not load:
        for i_exp in EXPERIMENTS:
            MSN = MSN_list[i_exp]
            GPE = GPE_list[i_exp]

            # Set spike times
            # Base rate
            for id in MSN[1:N_MSN]:
                MSN.set_spike_times(id=id,
                                    rates=[RATE_BASE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Selection
            for id in MSN[N_MSN:N_MSN + n_max_sel]:
                rates = [RATE_BASE, RATE_SELE, RATE_BASE]
                times = [1, SEL_ONSET, burst_time + SEL_ONSET]
                t_stop = sim_time
                MSN.set_spike_times(id=id,
                                    rates=rates,
                                    times=times,
                                    t_stop=t_stop,
                                    seed=int(numpy.random.random() * 10000.0))

            # Base rate GPE
            for id in GPE[:]:
                GPE.set_spike_times(id=id,
                                    rates=[GPE_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Connect
            for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
                # i_sel goes over 0,..., n_max_sel
                for i_sel in range(0, n_max_sel + 1):
                    target = SNR_list[i_syn][i_sel][i_exp]

                    my_nest.ConvergentConnect(MSN[0:N_MSN - i_sel], [target],
                                              model=syn)
                    my_nest.ConvergentConnect(MSN[N_MSN:N_MSN + i_sel],
                                              [target],
                                              model=syn)
                    my_nest.ConvergentConnect(
                        GPE[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0])

        my_nest.MySimulate(sim_time)

        for SNR_sel in SNR_list:
            for SNR in SNR_sel:
                SNR.get_signal('s')

        sel_interval_mean_rates = []
        sel_interval_mean_rates_std = []
        for i_interval, interval in enumerate(selection_intervals):
            t1 = selection_intervals[i_interval][0]
            t2 = selection_intervals[i_interval][1]

            mean_rates = []
            mean_rates_std = []

            # Time until arrival of spikes in SNr
            delay = my_nest.GetDefaults(SYNAPSE_MODELS_BACKGROUND[0])['delay']
            for SNR_sel in SNR_list:
                m_r = []
                m_r_std = []
                for SNR in SNR_sel:

                    m_r.append(SNR.signals['spikes'].mean_rate(
                        SEL_ONSET + t1 + delay, SEL_ONSET + t2 + delay))
                    m_r_std.append(SNR.signals['spikes'].mean_rate_std(
                        SEL_ONSET + t1 + delay, SEL_ONSET + t2 + delay))

                mean_rates.append(m_r)
                mean_rates_std.append(m_r_std)

            mean_rates = numpy.array(mean_rates)
            mean_rates_std = numpy.array(mean_rates_std)

            sel_interval_mean_rates.append(mean_rates)
            sel_interval_mean_rates_std.append(mean_rates_std)

        nb_neurons = numpy.arange(0, n_max_sel + 1, 1)

        s = '\n'
        s = s + ' %s %5s %3s \n' % ('N MSNs:', str(N_MSN), '#')
        s = s + ' %s %5s %3s \n' % ('N experiments:', str(n_exp), '#')
        s = s + ' %s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE), 'Hz')
        s = s + ' %s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE),
                                    'Hz')
        s = s + ' %s %5s %3s \n' % ('GPe rate:', str(GPE_BASE_RATE), 'Hz')
        s = s + ' %s %5s %3s \n' % ('Burst time:', str(burst_time), 'ms')
        s = s + ' %s %5s %3s \n' % ('SNR_INJECTED_CURRENT:',
                                    str(SNR_INJECTED_CURRENT), 'pA')
        for i_interval, interval in enumerate(selection_intervals):
            s = s + ' %s %5s %3s \n' % ('Sel interval ' + str(i_interval) +
                                        ':', str(selection_intervals), 'ms')

        info_string = s

        header = HEADER_SIMULATION_SETUP + s
        misc.text_save(header, save_header_at)
        misc.pickle_save([
            nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std,
            info_string
        ], save_result_at)

    elif load:
        nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std, info_string = misc.pickle_load(
            save_result_at)

    return nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std, info_string
Exemplo n.º 28
0
    ax = ax_list[1]
    plot_example_raster_GPE(ax, GPE_list)

    ax = ax_list[2]
    plot_example_firing_frequency_GPE(ax, GPE_list)

    ax = ax_list[3]
    plot_example_SNR(ax, SNR_list)

    ax = ax_list[4]
    plot_selection_vs_neurons(ax, nb, mr)

    ax = ax_list[5]
    # Article filtering has to be run before
    data = misc.pickle_load(os.getcwd() +
                            '/output/mean_rates_GPE_constant_supression' +
                            NEURON_MODELS[0])
    GPEmeanRates = data['GPE_mean_rates']
    SNRmeanRates = data['SNR_mean_rates']
    print GPEmeanRates
    print SNRmeanRates
    syn_events = GPEmeanRates * N_GPE
    #SNRmeanRates[1,syn_events<600]
    plot_SNr_rate_vs_syn_event2(ax, syn_events, SNRmeanRates)

    #syn_events=nbNeurons1*20.0+(N_MSN-nbNeurons1)*0.1
    plot_SNr_rate_vs_syn_event1(ax, (max(nb) - nb) * GPE_BASE_RATE, mr)

    pylab.show()

    # dpi does not matter since svg and pdf are both vectorbased
Exemplo n.º 29
0
def simulate_selection_vs_neurons_full(selRateInterval,
                                       load_pickle=True,
                                       load_raw=True):
    global N_MSN

    save_result_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_selection_vs_neurons_full.pkl'
    save_result_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_selection_vs_neurons_full_header'

    selection_intervals = [[0, 200], [300, 500]]
    #Range 1
    #hzs=range(5,8)

    # Range 2
    #hzs=range(8,61,1) # MPI can cope with jump 7->8 when n max selected decreases

    # Range
    hzs = range(5, 49, 1)

    #hzs=[8,20]
    if not load_pickle:
        data = {}
        for syn in SYNAPSE_MODELS_TESTED:
            data[syn] = {}
            data[syn]['rates'] = [[] for k in range(len(selection_intervals))]
            data[syn]['selMat'] = [[] for k in range(len(selection_intervals))]
            data[syn]['thrVec'] = [[] for k in range(len(selection_intervals))]

        for hz in hzs:
            n, r, r_std, s = simulate_selection_vs_neurons(
                selection_intervals, hz, load_raw)

            print hz, 'hz finished'
            for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
                for k in range(len(selection_intervals)):
                    data[syn]['rates'][k].append(r[k][i_syn, :])

                # Create matricies
                # Adjust rates if they are different length, add zeros at end of
                # short vectors. OBS only true if last rate is zero in vectorn that
                # is elonged.

                for k in range(len(selection_intervals)):
                    rates = data[syn]['rates'][k]

                    maxLen = 0
                    for r in rates:
                        if len(r) > maxLen:
                            maxLen = len(r)
                    for i_r, r in enumerate(rates):
                        rates[i_r] = numpy.append(
                            r, numpy.zeros((1, maxLen - len(r))))

                    selMat = rates
                    thrVec = []

                    for i in range(rates.shape[0]):
                        p = True
                        for j in range(rates.shape[1]):
                            if SELECTION_THR < r[i, j]:
                                selMat[i, j] = 3
                            elif (SELECTION_THR >= rates[i, j]) and (
                                    SELECTION_THR < rates[i, j - 1]) and p:
                                selMat[i, j] = 2
                                thrVec.append(j + 1)  # Neurons for threshold
                                p = False
                            else:
                                selMat[i, j] = 1
                        if p:
                            thrVec[j].append(100)

                    data[syn]['selMat'][k] = selMat
                    data[syn]['thrVec'][k] = numpy.array(thrVec)

        if not mpiRun:
            header = HEADER_SIMULATION_SETUP + s
            misc.text_save(header, save_header_at)
            misc.pickle_save(data, save_result_at)

    elif load_pickle:
        data = misc.pickle_load(save_result_at)

    s = '\n'

    info_string = s

    return hzs, data, info_string
def simulate_example(MSN_hz=20, GPE_hz=0, load=True, n_gpe_sel=3, sel_time_GPE=500):
    global GPE_BASE_RATE
    global STN_BASE_RATE
    global MSN_BASE_RATE
    global MSN_BURST_TIME

    global NEURON_MODELS
    global N_GPE
    global N_STN
    global N_MSN
    global N_MSN_BURST

    global SNAME
    global SPATH
    global SYNAPSE_MODELS
    global SEL_ONSET
    global SNR_INJECTED_CURRENT
    
    n_exp = 200
   
    msn_rate_sel = MSN_hz # Selection rate     
    gpe_sel_rate = GPE_hz # Selection rate     

    sel_time_MSN = MSN_BURST_TIME
    sim_time = sel_time_MSN+SEL_ONSET+500.
    
    EXPERIMENTS=range(n_exp)
    
    MODEL_LIST=models()
    my_nest.ResetKernel()       
    my_nest.MyLoadModels( MODEL_LIST, NEURON_MODELS )
    my_nest.MyLoadModels( MODEL_LIST, SYNAPSE_MODELS)      
    my_nest.MyLoadModels( MODEL_LIST, SYNAPSE_MODELS_BACKGROUND)       
 
    
    MSN_list=[] # MSN input for each experiment
    for i_exp in EXPERIMENTS:
        MSN = MyPoissonInput( n=N_MSN+N_MSN_BURST, sd=True)
        MSN_list.append(MSN)
 
    GPE_list=[] # GPE input for each experiment
    for i_exp in EXPERIMENTS:
        GPE = MyPoissonInput( n=N_GPE+n_gpe_sel, sd=True)
        GPE_list.append(GPE)

    STN_list=[] # GPE input for each experiment
    for i_exp in EXPERIMENTS:
        STN = MyPoissonInput( n=N_STN, sd=True)
        STN_list.append(GPE)

    
    SNR_list=[] # SNR groups for each synapse
    
    
    for i, SNR_i_c in enumerate(SNR_INJECTED_CURRENT):
        I_e=my_nest.GetDefaults(NEURON_MODELS[0])['I_e']+SNR_i_c    
        SNR = MyGroup( NEURON_MODELS[0], n=n_exp, params={'I_e':I_e}, 
                       sd=True, mm=False,
                       mm_dt=.1, record_from=[''])
        SNR_list.append(SNR)

   
    if not load:
        for i_exp in EXPERIMENTS:    
            
            # MSN
            MSN = MSN_list[i_exp]
            
            # Set spike times
            # Base rate
            for id in MSN[0:N_MSN]:                 
                MSN.set_spike_times(id=id, rates=[MSN_BASE_RATE], times=[1], 
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random()*10000.0))               
      
            # Selection MSN        
            for id in MSN[N_MSN:N_MSN+N_MSN_BURST]: 
                rates = [MSN_BASE_RATE, msn_rate_sel, MSN_BASE_RATE]
                times = [1, SEL_ONSET, sel_time_MSN + SEL_ONSET]
                t_stop = sim_time
                MSN.set_spike_times(id=id, rates=rates, times=times, 
                                    t_stop=t_stop, 
                                    seed=int(numpy.random.random()*10000.0))     
        
     
            # GPE
            GPE = GPE_list[i_exp]
            
            # Set spike times
            # Base rate
            for id in GPE[:]:                 
                GPE.set_spike_times(id=id, rates=[GPE_BASE_RATE], times=[1], 
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random()*10000.0))               
      
            # Selection GPE        
            for id in GPE[N_GPE:N_GPE+n_gpe_sel]: 
                rates = [GPE_BASE_RATE, gpe_sel_rate, GPE_BASE_RATE]
                
                # If GPe excited smaller selection time
                times = [1, SEL_ONSET, sel_time_GPE + SEL_ONSET]
                t_stop = sim_time
                GPE.set_spike_times(id=id, rates=rates, times=times, 
                                    t_stop=t_stop, seed=int(numpy.random.random()*100000.0))     

            # Base rate STN
            for id in STN[:]:                 
                STN.set_spike_times(id=id, rates=[STN_BASE_RATE], times=[1], 
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random()*10000.0))     
                
            idx_MSN_s=range(0,N_MSN-N_MSN_BURST)
            idx_MSN_s.extend(range(N_MSN,N_MSN+N_MSN_BURST))
            idx_GPE_s=range(0,N_GPE-n_gpe_sel)
            idx_GPE_s.extend(range(N_GPE,N_GPE+n_gpe_sel))
            
            # Connect with MSN burst         
            target=SNR_list[0][i_exp]
            my_nest.ConvergentConnect(MSN[idx_MSN_s], [target], model=SYNAPSE_MODELS[0])
            my_nest.ConvergentConnect(GPE[0:N_GPE], [target], model=SYNAPSE_MODELS[1])               
            my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) 
      
            # With GPe pause
            target=SNR_list[1][i_exp]
            my_nest.ConvergentConnect(MSN[0:N_MSN], [target], model=SYNAPSE_MODELS[0])
            my_nest.ConvergentConnect(GPE[idx_GPE_s], [target], model=SYNAPSE_MODELS[1])                
            my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) 
            
            # With MSN burst and GPe pause
            target=SNR_list[2][i_exp]
            my_nest.ConvergentConnect(MSN[idx_MSN_s], [target], model=SYNAPSE_MODELS[0])
            my_nest.ConvergentConnect(GPE[idx_GPE_s], [target], model=SYNAPSE_MODELS[1])         
            my_nest.ConvergentConnect(STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0]) 
                      
        my_nest.MySimulate( sim_time )

        for MSN in MSN_list: 
            MSN.get_signal( 's' )      
        for GPE in GPE_list: 
            GPE.get_signal( 's' )   
        for SNR in SNR_list: 
            SNR.get_signal( 's' ) 

        misc.pickle_save([MSN_list, GPE_list,SNR_list] , save_at)

    if load:
        MSN_list, GPE_list, SNR_list=misc.pickle_load(save_at)
        
    pre_dyn_MSN=str(SNR_list[0].signals['spikes'].mean_rate(SEL_ONSET-500,
                                                            SEL_ONSET)) 
    pre_dyn_GPE=str(SNR_list[1].signals['spikes'].mean_rate(SEL_ONSET-500,
                                                            SEL_ONSET))   
      
    s='\n'
    s=s+'Example:\n'
    s = s + ' %s %5s %3s \n' % ( 'N experiments:', str ( len(EXPERIMENTS) ),  '#' )  
    s = s + ' %s %5s %3s \n' % ( 'N MSN:', str ( N_MSN ),  '#' )  
    s = s + ' %s %5s %3s \n' % ( 'N GPE:', str ( N_GPE ),  '#' )  
    s='\n'
    s = s + ' %s %5s %3s \n' % ( 'Base rate MSN:',   str ( MSN_BASE_RATE),'spikes/s' )     
    s = s + ' %s %5s %3s \n' % ( 'Sel rate MSN:', str ( msn_rate_sel ), 'spikes/s' )
    s = s + ' %s %5s %3s \n' % ( 'Sel time MSN:', str ( sel_time_MSN ), 'ms' )
    s='\n'
    s = s + ' %s %5s %3s \n' % ( 'Base rate GPe:',   str ( GPE_BASE_RATE),'spikes/s' )   
    s = s + ' %s %5s %3s \n' % ( 'Sel rate GPe:', str ( gpe_sel_rate ), 'spikes/s' )  
    s = s + ' %s %5s %3s \n' % ( 'Sel time GPe:', str ( sel_time_GPE ), 'ms' )
    s = s + ' %s %5s %3s \n' % ( 'Pre sel rate Dyn MSN:', pre_dyn_MSN[0:4], 'spikes/s' )
    s = s + ' %s %5s %3s \n' % ( 'Pre sel rate Dyn GPe:', pre_dyn_GPE[0:4], 'spikes/s' )
      
    return MSN_list, GPE_list, SNR_list, s

    info_string=s
    
    return MSN_hzs, GPE_hzs, data, info_string