def simulate_signal_rates(load=True, hzs=[1, 2]):

    # Path were raw data is saved. For example the spike trains.
    save_result_at = DP['OUTPUT_PATH'] + '/simulate_signal_rates.pkl'
    save_header_at = DP['OUTPUT_PATH'] + '/simulate_signal_rates_header'

    rates = []
    if not load:
        for hz in hzs:
            rates.append(simulate_get_rates(msn_burst_rate=hz, load=load))

        rates = numpy.array(rates)

        header = HEADER_SIMULATION_SETUP
        misc.text_save(header, save_header_at)
        misc.pickle_save(rates, save_result_at)

    else:
        rates = misc.pickle_load(save_result_at)

    return rates
예제 #2
0
def simulate_rate_first_and_second_bursts_full(load=True):
    global OUTPUT_PATH

    save_result_at = OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_full.pkl'
    save_header_at = OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_full_header'

    # Range
    transient_stops = numpy.arange(100, 3200, 500)

    #hzs=[8,20]
    if not load:
        data = {}

        data['rates'] = []

        for stop in transient_stops:

            mean_rates, mean_rates_std, info_string = simulate_rate_first_and_second_bursts(
                selection_intervals=[0.0, 500.0, 500. + stop, 1000. + stop],
                load=False)
            data['rates'].append(mean_rates[0])

        s = '\n'
        s = s + 'simulate_rate_first_and_second_bursts_full\n'
        s = s + ' %s %5s %s \n' % ('Transient stops', str(transient_stops[0]) +
                                   '-' + str(transient_stops[-1]), 'ms')

        header = s
        misc.text_save(header, save_header_at)
        misc.pickle_save([data, s], save_result_at)
        info_string = s
    elif load:
        data, info_string = misc.pickle_load(save_result_at)

    data['rates'] = numpy.array(data['rates'])

    return transient_stops, data, info_string
예제 #3
0
def simulate_selection_vs_neurons_full(selRateInterval,
                                       load_pickle=True,
                                       load_raw=True):
    global N_MSN

    save_result_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_selection_vs_neurons_full.pkl'
    save_result_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_selection_vs_neurons_full_header'

    selection_intervals = [[0, 200], [300, 500]]
    #Range 1
    #hzs=range(5,8)

    # Range 2
    #hzs=range(8,61,1) # MPI can cope with jump 7->8 when n max selected decreases

    # Range
    hzs = range(5, 49, 1)

    #hzs=[8,20]
    if not load_pickle:
        data = {}
        for syn in SYNAPSE_MODELS_TESTED:
            data[syn] = {}
            data[syn]['rates'] = [[] for k in range(len(selection_intervals))]
            data[syn]['selMat'] = [[] for k in range(len(selection_intervals))]
            data[syn]['thrVec'] = [[] for k in range(len(selection_intervals))]

        for hz in hzs:
            n, r, r_std, s = simulate_selection_vs_neurons(
                selection_intervals, hz, load_raw)

            print hz, 'hz finished'
            for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
                for k in range(len(selection_intervals)):
                    data[syn]['rates'][k].append(r[k][i_syn, :])

                # Create matricies
                # Adjust rates if they are different length, add zeros at end of
                # short vectors. OBS only true if last rate is zero in vectorn that
                # is elonged.

                for k in range(len(selection_intervals)):
                    rates = data[syn]['rates'][k]

                    maxLen = 0
                    for r in rates:
                        if len(r) > maxLen:
                            maxLen = len(r)
                    for i_r, r in enumerate(rates):
                        rates[i_r] = numpy.append(
                            r, numpy.zeros((1, maxLen - len(r))))

                    selMat = rates
                    thrVec = []

                    for i in range(rates.shape[0]):
                        p = True
                        for j in range(rates.shape[1]):
                            if SELECTION_THR < r[i, j]:
                                selMat[i, j] = 3
                            elif (SELECTION_THR >= rates[i, j]) and (
                                    SELECTION_THR < rates[i, j - 1]) and p:
                                selMat[i, j] = 2
                                thrVec.append(j + 1)  # Neurons for threshold
                                p = False
                            else:
                                selMat[i, j] = 1
                        if p:
                            thrVec[j].append(100)

                    data[syn]['selMat'][k] = selMat
                    data[syn]['thrVec'][k] = numpy.array(thrVec)

        if not mpiRun:
            header = HEADER_SIMULATION_SETUP + s
            misc.text_save(header, save_header_at)
            misc.pickle_save(data, save_result_at)

    elif load_pickle:
        data = misc.pickle_load(save_result_at)

    s = '\n'

    info_string = s

    return hzs, data, info_string
예제 #4
0
def simulate_selection_vs_neurons(selection_intervals=[0.0, 500.0],
                                  hz=20,
                                  load=True):
    global SNR_INJECTED_CURRENT
    global NEURON_MODELS
    global N_GPE
    global N_MSN_BURST
    global N_MSN
    global GPE_BASE_RATE
    global FILE_NAME
    global OUTPUT_PATH
    global SYNAPSE_MODELS_TESTED
    global SEL_ONSET

    #n_exp=100
    n_exp = 2

    if hz > 7:
        n_max_sel = 60
    if hz > 20:
        n_max_sel = 30
    else:
        n_max_sel = 100

    RATE_BASE = 0.1
    RATE_SELE = hz
    save_result_at = (OUTPUT_PATH + '/' + FILE_NAME +
                      '-simulate_selection_vs_neurons' + str(hz) + '-hz.pkl')
    save_header_at = (OUTPUT_PATH + '/' + FILE_NAME +
                      '-simulate_selection_vs_neurons' + str(hz) +
                      '-hz_header')

    burst_time = 500.
    sim_time = burst_time + SEL_ONSET + 500.

    EXPERIMENTS = range(n_exp)

    MODEL_LIST = models()
    my_nest.ResetKernel()
    my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND)

    MSN_list = []  # MSN input for each experiment
    for i_exp in EXPERIMENTS:
        MSN = MyPoissonInput(n=N_MSN + n_max_sel, sd=True)
        MSN_list.append(MSN)

    GPE_list = []  # GPE input for each experiment
    for i_exp in EXPERIMENTS:
        GPE = MyPoissonInput(n=N_GPE, sd=True)
        GPE_list.append(GPE)

    SNR_list = []  # SNR groups for each synapse and number of selected MSN
    SNR_list_experiments = []
    for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
        SNR = []
        for i_sel in range(n_max_sel + 1):  # Plus one to get no burst point

            I_e = my_nest.GetDefaults(
                NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
            SNR.append(
                MyGroup(NEURON_MODELS[0],
                        n=n_exp,
                        sd=True,
                        params={'I_e': I_e}))

        SNR_list.append(SNR)

    if not load:
        for i_exp in EXPERIMENTS:
            MSN = MSN_list[i_exp]
            GPE = GPE_list[i_exp]

            # Set spike times
            # Base rate
            for id in MSN[1:N_MSN]:
                MSN.set_spike_times(id=id,
                                    rates=[RATE_BASE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Selection
            for id in MSN[N_MSN:N_MSN + n_max_sel]:
                rates = [RATE_BASE, RATE_SELE, RATE_BASE]
                times = [1, SEL_ONSET, burst_time + SEL_ONSET]
                t_stop = sim_time
                MSN.set_spike_times(id=id,
                                    rates=rates,
                                    times=times,
                                    t_stop=t_stop,
                                    seed=int(numpy.random.random() * 10000.0))

            # Base rate GPE
            for id in GPE[:]:
                GPE.set_spike_times(id=id,
                                    rates=[GPE_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Connect
            for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
                # i_sel goes over 0,..., n_max_sel
                for i_sel in range(0, n_max_sel + 1):
                    target = SNR_list[i_syn][i_sel][i_exp]

                    my_nest.ConvergentConnect(MSN[0:N_MSN - i_sel], [target],
                                              model=syn)
                    my_nest.ConvergentConnect(MSN[N_MSN:N_MSN + i_sel],
                                              [target],
                                              model=syn)
                    my_nest.ConvergentConnect(
                        GPE[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0])

        my_nest.MySimulate(sim_time)

        for SNR_sel in SNR_list:
            for SNR in SNR_sel:
                SNR.get_signal('s')

        sel_interval_mean_rates = []
        sel_interval_mean_rates_std = []
        for i_interval, interval in enumerate(selection_intervals):
            t1 = selection_intervals[i_interval][0]
            t2 = selection_intervals[i_interval][1]

            mean_rates = []
            mean_rates_std = []

            # Time until arrival of spikes in SNr
            delay = my_nest.GetDefaults(SYNAPSE_MODELS_BACKGROUND[0])['delay']
            for SNR_sel in SNR_list:
                m_r = []
                m_r_std = []
                for SNR in SNR_sel:

                    m_r.append(SNR.signals['spikes'].mean_rate(
                        SEL_ONSET + t1 + delay, SEL_ONSET + t2 + delay))
                    m_r_std.append(SNR.signals['spikes'].mean_rate_std(
                        SEL_ONSET + t1 + delay, SEL_ONSET + t2 + delay))

                mean_rates.append(m_r)
                mean_rates_std.append(m_r_std)

            mean_rates = numpy.array(mean_rates)
            mean_rates_std = numpy.array(mean_rates_std)

            sel_interval_mean_rates.append(mean_rates)
            sel_interval_mean_rates_std.append(mean_rates_std)

        nb_neurons = numpy.arange(0, n_max_sel + 1, 1)

        s = '\n'
        s = s + ' %s %5s %3s \n' % ('N MSNs:', str(N_MSN), '#')
        s = s + ' %s %5s %3s \n' % ('N experiments:', str(n_exp), '#')
        s = s + ' %s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE), 'Hz')
        s = s + ' %s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE),
                                    'Hz')
        s = s + ' %s %5s %3s \n' % ('GPe rate:', str(GPE_BASE_RATE), 'Hz')
        s = s + ' %s %5s %3s \n' % ('Burst time:', str(burst_time), 'ms')
        s = s + ' %s %5s %3s \n' % ('SNR_INJECTED_CURRENT:',
                                    str(SNR_INJECTED_CURRENT), 'pA')
        for i_interval, interval in enumerate(selection_intervals):
            s = s + ' %s %5s %3s \n' % ('Sel interval ' + str(i_interval) +
                                        ':', str(selection_intervals), 'ms')

        info_string = s

        header = HEADER_SIMULATION_SETUP + s
        misc.text_save(header, save_header_at)
        misc.pickle_save([
            nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std,
            info_string
        ], save_result_at)

    elif load:
        nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std, info_string = misc.pickle_load(
            save_result_at)

    return nb_neurons, sel_interval_mean_rates, sel_interval_mean_rates_std, info_string
예제 #5
0
def simulate_example(load=True):

    global SNR_INJECTED_CURRENT
    global NEURON_MODELS
    global N_GPE
    global N_MSN_BURST
    global N_MSN
    global GPE_BASE_RATE
    global FILE_NAME
    global OUTPUT_PATH
    global SYNAPSE_MODELS_TESTED
    global SEL_ONSET

    #n_exp =200 # number of experiments
    n_exp = 20  # number of experiments

    # Path were raw data is saved. For example the spike trains.
    save_result_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_example.pkl'
    save_header_at = OUTPUT_PATH + '/' + FILE_NAME + '-simulate_example_header'

    burst_time = 500.
    sim_time = burst_time + SEL_ONSET + 1000.

    MODEL_LIST = models()
    my_nest.ResetKernel()
    my_nest.MyLoadModels(MODEL_LIST, NEURON_MODELS)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(MODEL_LIST, SYNAPSE_MODELS_BACKGROUND)

    SNR_list = []  # List with SNR groups for synapse.
    if not load:
        MSN_base = MyPoissonInput(n=N_MSN_BASE * n_exp, sd=True)
        MSN_burst = MyPoissonInput(n=N_MSN_BURST * n_exp, sd=True)
        GPE = MyPoissonInput(n=N_GPE * n_exp, sd=True)

        # Set spike times MSN and GPe
        # Non bursting MSNs

        for id in MSN_base[:]:
            seed = numpy.random.random_integers(0, 1000000.0)
            MSN_base.set_spike_times(id=id,
                                     rates=[MSN_BASE_RATE],
                                     times=[1],
                                     t_stop=sim_time,
                                     seed=seed)

        # Background GPe
        for id in GPE[:]:
            seed = numpy.random.random_integers(0, 1000000.0)
            GPE.set_spike_times(id=id,
                                rates=[GPE_BASE_RATE],
                                times=[1],
                                t_stop=sim_time,
                                seed=seed)

        # Bursting MSNs
        for id in MSN_burst[:]:
            rates = [MSN_BASE_RATE, MSN_BURST_RATE, MSN_BASE_RATE]
            times = [1, SEL_ONSET, burst_time + SEL_ONSET]
            t_stop = sim_time
            seed = numpy.random.random_integers(0, 1000000.0)

            MSN_burst.set_spike_times(id=id,
                                      rates=rates,
                                      times=times,
                                      t_stop=t_stop,
                                      seed=seed)

        for i_syn in range(len(SYNAPSE_MODELS_TESTED)):

            params = []
            I_e = my_nest.GetDefaults(
                NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
            for i in range(n_exp):
                #params.append({'I_e':numpy.random.normal(I_e,
                #                                         0.1*I_e)})
                params.append({'I_e': I_e})

            #{'I_e':SNR_INJECTED_CURRENT}
            SNR = MyGroup(NEURON_MODELS[0],
                          n=n_exp,
                          sd=True,
                          params=params,
                          mm_dt=.1,
                          record_from=[''])

            SNR_list.append(SNR)

        # Connect, experiment specific
        sources_MSN_SNR_base = numpy.arange(0, n_exp * N_MSN_BASE)
        sources_MSN_SNR_burst = numpy.arange(0, n_exp * N_MSN_BURST)

        targets_MSN_SNR_base = numpy.mgrid[0:n_exp, 0:N_MSN_BASE][0].reshape(
            1, N_MSN_BASE * n_exp)[0]
        targets_MSN_SNR_burst = numpy.mgrid[0:n_exp, 0:N_MSN_BURST][0].reshape(
            1, N_MSN_BURST * n_exp)[0]

        sources_GPE_SNR = numpy.arange(0, n_exp * N_GPE)
        targets_GPE_SNR = numpy.mgrid[0:n_exp,
                                      0:N_GPE][0].reshape(1, N_GPE * n_exp)[0]

        for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
            syn = SYNAPSE_MODELS_TESTED[i_syn]
            SNR = SNR_list[i_syn]
            my_nest.Connect(MSN_base[sources_MSN_SNR_base],
                            SNR[targets_MSN_SNR_base],
                            model=syn)
            my_nest.Connect(MSN_burst[sources_MSN_SNR_burst],
                            SNR[targets_MSN_SNR_burst],
                            model=syn)
            my_nest.Connect(GPE[sources_GPE_SNR],
                            SNR[targets_GPE_SNR],
                            model=SYNAPSE_MODELS_BACKGROUND[0])

        my_nest.MySimulate(sim_time)

        MSN_base.get_signal('s', start=0, stop=sim_time)
        MSN_burst.get_signal('s', start=0, stop=sim_time)

        for SNR in SNR_list:
            SNR.get_signal('s', start=0, stop=sim_time)

        # Get firing rates of MSNs
        MSN_firing_rates = []

        MSN_all = copy.deepcopy(MSN_base)
        MSN_all.merge(MSN_burst)

        time_bin = 20.
        groups = [MSN_base, MSN_burst, MSN_all]
        for group in groups:
            timeAxis, firingRates = group.signals['spikes'].my_firing_rate(
                bin=time_bin, display=False)
            MSN_firing_rates.append([timeAxis, firingRates])

        # Pick out spikes for burst, base and all to use in scatter plot
        MSN_spikes_and_ids = []

        g1 = MSN_burst.slice(MSN_burst[0:N_MSN_BURST])
        g2 = MSN_base.slice(MSN_base[0:N_MSN_BASE])

        ids_MSN_burst = range(450, 450 + N_MSN_BURST)
        ids_MSN_base = [id for id in range(N_MSN) if id not in IDS_MSN_BURST]

        # Rename ids for plotting purpose

        g1_dict = dict([[id1, id2] for id1, id2 in zip(g1.ids, ids_MSN_burst)])
        g2_dict = dict([[id1, id2] for id1, id2 in zip(g2.ids, ids_MSN_base)])

        groups = [g1, g2]
        dics = [g1_dict, g2_dict]
        for group, dic in zip(groups, dics):
            raw_data = group.signals['spikes'].raw_data()
            for i in range(raw_data.shape[0]):
                raw_data[i, 1] = dic[raw_data[i, 1]]
            MSN_spikes_and_ids.append(raw_data)

        #times, binned_data=MSN_base.signals['spikes'].binned_raw_data(0, sim_time, res=1, clip=0)
        #filtered_binned_data=misc.time_resolved_rate(binned_data, 100, kernel_type='triangle', res=1)

        pre_ref_1 = str(SNR_list[0].signals['spikes'].mean_rate(
            SEL_ONSET - 500, SEL_ONSET))
        pre_ref_2 = str(SNR_list[1].signals['spikes'].mean_rate(
            SEL_ONSET - 500, SEL_ONSET))
        pre_dyn = str(SNR_list[2].signals['spikes'].mean_rate(
            SEL_ONSET - 500, SEL_ONSET))

        s = '\n'
        s = s + 'Simulate example:\n'
        s = s + '%s %5s %3s \n' % ('N experiments:', str(n_exp), '#')
        s = s + '%s %5s %3s \n' % ('Bin size MSN hz:', str(time_bin), 'ms')
        s = s + '%s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE), 'Hz')
        s = s + '%s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE),
                                   'Hz')
        s = s + '%s %5s %3s \n' % ('GPe rate:', str(GPE_BASE_RATE), 'Hz')
        s = s + '%s %5s %3s \n' % ('Burst time:', str(burst_time), 'ms')
        s = s + '%s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref_1[0:4], 'Hz')
        s = s + '%s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref_2[0:4], 'Hz')
        s = s + '%s %5s %3s \n' % ('Pre sel rate Dyn:', pre_dyn[0:4], 'Hz')

        header = HEADER_SIMULATION_SETUP + s

        misc.pickle_save([MSN_firing_rates, MSN_spikes_and_ids, SNR_list, s],
                         save_result_at)
        misc.text_save(header, save_header_at)

    else:
        MSN_firing_rates, MSN_spikes_and_ids, SNR_list, s = misc.pickle_load(
            save_result_at)

    return MSN_firing_rates, MSN_spikes_and_ids, SNR_list, s
예제 #6
0
def simulate_recovery(revoceryTimes, load=True):
    
    # Path were raw data is saved. For example the spike trains.
    save_result_at=OUTPUT_PATH+'/simulate_recovery.pkl'
    save_header_at=OUTPUT_PATH+'/simulate_recovery_header'   
    
    relativeRecovery=[]
    n=len(revoceryTimes)
    if not load:
        for syn in SYNAPSE_MODELS:
            my_nest.ResetKernel()  
            model_list, model_dict=models()     
            my_nest.MyLoadModels( model_list, NEURON_MODELS )
            my_nest.MyLoadModels( model_list, [syn])
            
            ss=my_nest.GetDefaults(syn)       
            synapticEficacy = ss['weight']*ss['U'] 
    
            SNR = MyGroup( NEURON_MODELS[0], n, mm=True, mm_dt = .1, 
                           params={'I_e':-150.}, record_from=['g_AMPA'])
            
            tSim=10000
            spikeTimes=[]
            for rt in revoceryTimes:
                #spikeTimes.append(numpy.array([1.,11.,21.,31.,41.,41+rt]))
                
                # Choosen so that it starts at a pairpulse ration of 0.2 
                spikeTimes.append(numpy.array([1.,11.,21.,31.,41.,
                                               51.,61.,71.,81.,91.,
                                               101.,111.,121.,131.,141.,
                                               151.,161.,171.,181.,191.,
                                               191+rt]))
     
            for target, st in zip(SNR, spikeTimes ) :
       
                source = my_nest.Create('spike_generator', 
                                    params={'spike_times':st} )
                my_nest.SetDefaults(syn, params={'delay':1.})
                my_nest.Connect(source, [target], model=syn)
        
            my_nest.MySimulate(tSim)
            SNR.get_signal( 'g','g_AMPA', stop=tSim ) # retrieve signal
            
            signal=SNR.signals['g_AMPA']
            
            tmpSteadyState=[]
            for i, st in enumerate(spikeTimes, start=1):
                
                if SNR.mm_dt==0.1:  indecies=numpy.int64(numpy.ceil(st*10))+9
                elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st))
                
                values=signal[i].signal[indecies]-signal[i].signal[indecies-1]
                
                tmpSteadyState.append(values[-1]/synapticEficacy)
                #tmpSteadyState.append(max(values)/synapticEficacy)
                
            relativeRecovery.append(tmpSteadyState)
            
        relativeRecovery=numpy.array(relativeRecovery)
        
        
        header=HEADER_SIMULATION_SETUP
        misc.text_save(header, save_header_at)    
        misc.pickle_save([revoceryTimes, relativeRecovery], save_result_at)
        

        
    elif load: 
            revoceryTimes, relativeRecovery=misc.pickle_load(save_result_at)
        
    return revoceryTimes, relativeRecovery
예제 #7
0
def simulate_steady_state_freq(frequencies, flag='ss', load=True):
    
     # Path were raw data is saved. For example the spike trains.
    save_result_at=OUTPUT_PATH+'/simulate_steady_state_freq.pkl'
    save_header_at=OUTPUT_PATH+'/simulate_steady_state_freq_header'   
    
    relativeFacilitation=[]
    n=len(frequencies)
    if not load:    
        for syn in SYNAPSE_MODELS:
            my_nest.ResetKernel()   
            model_list, model_dict=models()    
            my_nest.MyLoadModels( model_list, NEURON_MODELS )
            my_nest.MyLoadModels( model_list, [syn])
            
                    
            SNR = MyGroup( NEURON_MODELS[0], n, mm=True, mm_dt = .1, 
                               params={'I_e':-150.},
                           record_from=['g_AMPA'] )

            tSim=5*1000/frequencies[0]  
            spikeTimes=[]    
            tmpSteadyState=[]    
            for f in frequencies :

                isi  = 1000./f
                spikeTimes.append(numpy.arange(1,tSim,isi))
            
            for target, st in zip(SNR, spikeTimes ) :
                    source = my_nest.Create('spike_generator', 
                                        params={'spike_times':st} )
                    my_nest.SetDefaults(syn, params={'delay':1.})
                    my_nest.Connect(source, [target], model=syn)
            
            my_nest.MySimulate(tSim)
                
            SNR.get_signal( 'g','g_AMPA', stop=tSim ) # retrieve signal
                  
            signal=SNR.signals['g_AMPA']
                
                
            for i, st in enumerate(spikeTimes, start=1):
                    
                    if SNR.mm_dt==0.1:  indecies=numpy.int64(numpy.ceil(st*10))+9
                    elif SNR.mm_dt==1.: indecies=numpy.int64(numpy.ceil(st))
                    
                    values=signal[i].signal[indecies]-signal[i].signal[indecies-1]
                    
                    ss=my_nest.GetDefaults(syn)       
                    synapticEficacy = ss['weight']*ss['U'] 
                    
                    if flag=='ss':  tmpSteadyState.append(values[-1]/synapticEficacy)
                    if flag=='max': tmpSteadyState.append(max(values)/synapticEficacy)
                    
            relativeFacilitation.append(tmpSteadyState)
            
        relativeFacilitation=numpy.array(relativeFacilitation)
        
    
        header=HEADER_SIMULATION_SETUP
        misc.text_save(header, save_header_at)
        misc.pickle_save([frequencies, relativeFacilitation], save_result_at)

        
    elif load: 
            frequencies, relativeFacilitation=misc.pickle_load(save_result_at)
        
    return frequencies, relativeFacilitation
예제 #8
0
def simulate_selection_vs_neurons_full(selRateInterval,
                                       load_pickle=True,
                                       load_raw=True):
    global OUTPUT_PATH

    save_result_at = OUTPUT_PATH + '/simulate_selection_vs_neurons_full.pkl'
    save_header_at = OUTPUT_PATH + '/simulate_selection_vs_neurons_full_header'

    # Range
    hzs = numpy.arange(7, 49, 1)

    #hzs=[8,20]
    if not load_pickle:
        data = {}

        for syn in range(3):
            data[syn] = {}
            data[syn]['rates_thr'] = [[] for k in range(len(SEL_INTERVALS))]
            data[syn]['rates_std_thr'] = [[]
                                          for k in range(len(SEL_INTERVALS))]

            data[syn]['msn_at_thr'] = [[] for k in range(len(SEL_INTERVALS))]
            data[syn]['n_max_sel'] = [[] for k in range(len(SEL_INTERVALS))]

        n_max_sel = 218
        progress = ''
        i_hz = 0
        for hz in hzs:

            n, rate_data, r_std_data, n_max_sel, s = simulate_selection_vs_neurons(
                SEL_INTERVALS, hz, load_raw, n_max_sel=n_max_sel)
            n_sel_vec = numpy.arange(n_max_sel + 1)

            # Clear n_max_cell
            n_max_sel = 0
            for i_interval in range(len(rate_data)):
                for i_syn in range(len(rate_data[i_interval])):

                    r_syn = rate_data[i_interval][i_syn]
                    r_std_syn = r_std_data[i_interval][i_syn]

                    # Retrieve selection threshold passing

                    r_std_syn_tmp = r_std_syn[r_syn < SELECTION_THR]
                    n_sel_vec_tmp = n_sel_vec[r_syn < SELECTION_THR]
                    r_syn_tmp = r_syn[r_syn < SELECTION_THR]

                    data[i_syn]['rates_thr'][i_interval].append(r_syn_tmp[0])
                    data[i_syn]['rates_std_thr'][i_interval].append(
                        r_std_syn_tmp[0])
                    data[i_syn]['msn_at_thr'][i_interval].append(
                        n_sel_vec_tmp[0])

                    # Find new n_max_sel
                    msn_at_thr = data[i_syn]['msn_at_thr'][i_interval][i_hz]
                    n_max_sel = int(
                        numpy.ceil(max(msn_at_thr * 2.0, n_max_sel)))
                    data[i_syn]['n_max_sel'][i_interval].append(n_max_sel)

            i_hz += 1
            progress += str(hz) + ' hz finished, n_max_sel=' + str(
                n_max_sel) + '\n'
            print progress

        s = '\n'
        s = s + 'simulate_selection_vs_neurons_full\n'
        s = s + ' %s %5s %s \n' % ('Range hz',
                                   str(hzs[0]) + '-' + str(hzs[-1]), '#')

        header = HEADER_SIMULATION_SETUP + s
        misc.text_save(header, save_header_at)
        misc.pickle_save([data, s], save_result_at)
        info_string = s
    elif load_pickle:
        data, info_string = misc.pickle_load(save_result_at)

    return hzs, data, info_string
예제 #9
0
def simulate_rate_first_and_second_bursts(
        selection_intervals=[0.0, 500.0, 1000., 1500.], load=True):
    global SNR_INJECTED_CURRENT
    global NEURON_MODELS
    global N_GPE
    global N_MSN_BURST
    global N_MSN
    global N_STN
    global MSN_BASE_RATE
    global GPE_BASE_RATE
    global STN_BASE_RATE
    global FILE_NAME
    global OUTPUT_PATH
    global SYNAPSE_MODELS_TESTED
    global SEL_ONSET

    #n_exp=20
    n_exp = 200
    msn_burst_rate = 20
    n_msn_burst = N_MSN_BURST

    transient_stop = selection_intervals[2] - selection_intervals[1]
    save_result_at = (OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_' +
                      str(transient_stop) + 'ms.pkl')
    save_header_at = (OUTPUT_PATH + '/simulate_rate_first_and_second_bursts_' +
                      str(transient_stop) + 'ms_header')

    burst_time = 500.
    sim_time = SEL_ONSET + selection_intervals[3] + 500.

    EXPERIMENTS = range(n_exp)

    model_list = models()
    my_nest.ResetKernel(threads=1)
    my_nest.MyLoadModels(model_list, NEURON_MODELS)
    my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_BACKGROUND)
    if not load:
        MSN_list = []  # MSN input for each experiment
        for i_exp in EXPERIMENTS:
            MSN = MyPoissonInput(n=N_MSN + n_msn_burst, sd=True)
            MSN_list.append(MSN)

        GPE_list = []  # GPE input for each experiment
        for i_exp in EXPERIMENTS:
            GPE = MyPoissonInput(n=N_GPE, sd=True)
            GPE_list.append(GPE)

        STN_list = []  # GPE input for each experiment
        for i_exp in EXPERIMENTS:
            STN = MyPoissonInput(n=N_STN, sd=True)
            STN_list.append(STN)

        SNR_list = []  # SNR groups for each synapse and number of selected MSN
        SNR_list_experiments = []
        for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
            SNR = []
            for i_sel in range(1):

                I_e = my_nest.GetDefaults(
                    NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
                SNR.append(
                    MyGroup(NEURON_MODELS[0],
                            n=n_exp,
                            sd=True,
                            params={'I_e': I_e}))

            SNR_list.append(SNR)

        for i_exp in EXPERIMENTS:
            MSN = MSN_list[i_exp]
            GPE = GPE_list[i_exp]
            STN = STN_list[i_exp]

            # Set spike times
            # Base rate
            for id in MSN[1:N_MSN]:
                MSN.set_spike_times(id=id,
                                    rates=[MSN_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Selection
            for id in MSN[N_MSN:N_MSN + n_msn_burst]:
                rates = [
                    MSN_BASE_RATE, msn_burst_rate, MSN_BASE_RATE,
                    msn_burst_rate, MSN_BASE_RATE
                ]

                t1 = selection_intervals[0]
                t2 = selection_intervals[1]
                t3 = selection_intervals[2]
                t4 = selection_intervals[3]
                times = [
                    1, SEL_ONSET + t1, SEL_ONSET + t2, SEL_ONSET + t3,
                    SEL_ONSET + t4
                ]
                t_stop = sim_time
                MSN.set_spike_times(id=id,
                                    rates=rates,
                                    times=times,
                                    t_stop=t_stop,
                                    seed=int(numpy.random.random() * 10000.0))

            # Base rate GPE
            for id in GPE[:]:
                GPE.set_spike_times(id=id,
                                    rates=[GPE_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Base rate GPE
            for id in STN[:]:
                STN.set_spike_times(id=id,
                                    rates=[STN_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Connect
            for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
                # i_sel goes over 0,..., n_max_sel
                for i_sel, n_sel in enumerate(
                        range(n_msn_burst, n_msn_burst + 1)):
                    target = SNR_list[i_syn][i_sel][i_exp]

                    my_nest.ConvergentConnect(MSN[0:N_MSN - n_sel], [target],
                                              model=syn)
                    my_nest.ConvergentConnect(MSN[N_MSN:N_MSN + n_sel],
                                              [target],
                                              model=syn)
                    my_nest.ConvergentConnect(
                        GPE[:], [target], model=SYNAPSE_MODELS_BACKGROUND[0])
                    my_nest.ConvergentConnect(
                        STN[:], [target], model=SYNAPSE_MODELS_BACKGROUND[1])

        my_nest.MySimulate(sim_time)

        for SNR_sel in SNR_list:
            for SNR in SNR_sel:
                SNR.get_signal('s')

        t1 = selection_intervals[0]
        t3 = selection_intervals[2]

        mean_rates = []
        mean_rates_std = []
        # Time until arrival of spikes in SNr
        delay = my_nest.GetDefaults(SYNAPSE_MODELS_BACKGROUND[0])['delay']
        for SNR_sel in SNR_list:
            m_r = []
            m_r_std = []
            for SNR in SNR_sel:

                # Mean rate during first 200 ms
                m_r.append(SNR.signals['spikes'].mean_rate(
                    SEL_ONSET + t1 + delay, SEL_ONSET + t1 + 200 + delay))
                m_r.append(SNR.signals['spikes'].mean_rate(
                    SEL_ONSET + t3 + delay, SEL_ONSET + t3 + 200 + delay))

                m_r_std.append(SNR.signals['spikes'].mean_rate_std(
                    SEL_ONSET + t1 + delay, SEL_ONSET + t1 + 200 + delay))
                m_r_std.append(SNR.signals['spikes'].mean_rate_std(
                    SEL_ONSET + t3 + delay, SEL_ONSET + t3 + 200 + delay))

            mean_rates.append(m_r)
            mean_rates_std.append(m_r_std)

        mean_rates = numpy.array(mean_rates)
        mean_rates_std = numpy.array(mean_rates_std)

        s = '\n'
        s = s + 'simulate_rate_first_and_second_bursts\n'
        s = s + '%s %5s %3s \n' % ('Simulation time', str(sim_time), '#')
        s = s + '%s %5s %3s \n' % ('N MSNs:', str(N_MSN), '#')
        s = s + '%s %5s %3s \n' % ('N MSN_bursts:', str(n_msn_burst), '#')
        s = s + '%s %5s %3s \n' % ('N experiments:', str(n_exp), '#')
        s = s + '%s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE),
                                   'spikes/s')
        s = s + '%s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE),
                                   'spikes/s')
        s = s + '%s %5s %3s \n' % ('MSN burst time:', str(burst_time), 'ms')
        s = s + '%s %5s %3s \n' % ('GPe base rate:', str(GPE_BASE_RATE),
                                   'spikes/s')
        s = s + '%s %5s %3s \n' % ('SNR injected current:',
                                   str(SNR_INJECTED_CURRENT), 'pA')
        for i_interval, interval in enumerate(selection_intervals):
            s = s + '%s %5s %3s \n' % ('Sel interval ' + str(i_interval) + ':',
                                       str(selection_intervals), 'ms')

        info_string = s

        header = s
        misc.text_save(header, save_header_at)
        misc.pickle_save([mean_rates, mean_rates_std, info_string],
                         save_result_at)

    elif load:
        mean_rates, mean_rates_std, info_string = misc.pickle_load(
            save_result_at)

    return mean_rates, mean_rates_std, info_string
예제 #10
0
def simulate_example(load=True):

    global GPE_BASE_RATE
    global FILE_NAME
    global N_GPE
    global N_STN
    global N_MSN_BURST
    global N_MSN
    global NEURON_MODELS
    global OUTPUT_PATH
    global SEL_ONSET
    global SNR_INJECTED_CURRENT
    global SYNAPSE_MODELS_TESTED

    #n_exp =200 # number of experiments
    n_exp = 200  # number of experiments

    # Path were raw data is saved. For example the spike trains.
    save_result_at = OUTPUT_PATH + '/simulate_example.pkl'
    save_header_at = OUTPUT_PATH + '/simulate_example_header'

    burst_time = 500.
    sim_time = SEL_INTERVAL_2[1] + 500

    model_list = models()
    my_nest.ResetKernel(threads=8)
    my_nest.MyLoadModels(model_list, NEURON_MODELS)
    my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_TESTED)
    my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_BACKGROUND)

    SNR_list = []  # List with SNR groups for synapse.
    if not load:
        MSN_base = MyPoissonInput(n=N_MSN_BASE * n_exp)
        MSN_burst = MyPoissonInput(n=N_MSN_BURST * n_exp)
        GPE = MyPoissonInput(n=N_GPE * n_exp, sd=True)
        STN = MyPoissonInput(n=N_STN * n_exp, sd=True)

        # Set spike times MSN and GPe
        # Non bursting MSNs
        for id in MSN_base[:]:
            seed = numpy.random.random_integers(0, 1000000.0)
            MSN_base.set_spike_times(id=id,
                                     rates=[MSN_BASE_RATE],
                                     times=[1],
                                     t_stop=sim_time,
                                     seed=seed)

        # Background GPe
        for id in GPE[:]:
            seed = numpy.random.random_integers(0, 1000000.0)
            GPE.set_spike_times(id=id,
                                rates=[GPE_BASE_RATE],
                                times=[1],
                                t_stop=sim_time,
                                seed=seed)
        # Background STN
        for id in STN[:]:
            seed = numpy.random.random_integers(0, 1000000.0)
            STN.set_spike_times(id=id,
                                rates=[STN_BASE_RATE],
                                times=[1],
                                t_stop=sim_time,
                                seed=seed)

        # Bursting MSNs
        for id in MSN_burst[:]:
            rates = [
                MSN_BASE_RATE, MSN_BURST_RATE, MSN_BASE_RATE, MSN_BURST_RATE,
                MSN_BASE_RATE
            ]
            times = [
                1, SEL_INTERVAL_1[0], SEL_INTERVAL_1[1], SEL_INTERVAL_2[0],
                SEL_INTERVAL_2[1]
            ]
            t_stop = sim_time
            seed = numpy.random.random_integers(0, 1000000.0)

            MSN_burst.set_spike_times(id=id,
                                      rates=rates,
                                      times=times,
                                      t_stop=t_stop,
                                      seed=seed)

        for i_syn in range(len(SYNAPSE_MODELS_TESTED)):

            I_e = my_nest.GetDefaults(
                NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
            SNR = MyGroup(NEURON_MODELS[0],
                          n=n_exp,
                          sd=True,
                          params={'I_e': I_e},
                          mm_dt=.1,
                          record_from=[''])

            SNR_list.append(SNR)

        # Connect, experiment specific
        sources_MSN_SNR_base = numpy.arange(0, n_exp * N_MSN_BASE)
        sources_MSN_SNR_burst = numpy.arange(0, n_exp * N_MSN_BURST)

        targets_MSN_SNR_base = numpy.mgrid[0:n_exp, 0:N_MSN_BASE][0].reshape(
            1, N_MSN_BASE * n_exp)[0]
        targets_MSN_SNR_burst = numpy.mgrid[0:n_exp, 0:N_MSN_BURST][0].reshape(
            1, N_MSN_BURST * n_exp)[0]

        sources_GPE_SNR = numpy.arange(0, n_exp * N_GPE)
        targets_GPE_SNR = numpy.mgrid[0:n_exp,
                                      0:N_GPE][0].reshape(1, N_GPE * n_exp)[0]

        sources_STN_SNR = numpy.arange(0, n_exp * N_STN)
        targets_STN_SNR = numpy.mgrid[0:n_exp,
                                      0:N_STN][0].reshape(1, N_STN * n_exp)[0]

        for i_syn, syn in enumerate(SYNAPSE_MODELS_TESTED):
            syn = SYNAPSE_MODELS_TESTED[i_syn]
            SNR = SNR_list[i_syn]
            my_nest.Connect(MSN_base[sources_MSN_SNR_base],
                            SNR[targets_MSN_SNR_base],
                            model=syn)
            my_nest.Connect(MSN_burst[sources_MSN_SNR_burst],
                            SNR[targets_MSN_SNR_burst],
                            model=syn)
            my_nest.Connect(GPE[sources_GPE_SNR],
                            SNR[targets_GPE_SNR],
                            model=SYNAPSE_MODELS_BACKGROUND[0])
            my_nest.Connect(STN[sources_STN_SNR],
                            SNR[targets_STN_SNR],
                            model=SYNAPSE_MODELS_BACKGROUND[1])

        my_nest.MySimulate(sim_time)

        for SNR in SNR_list:
            SNR.get_signal('s', start=0, stop=sim_time)

        pre_ref_1 = str(SNR_list[0].signals['spikes'].mean_rate(
            SEL_ONSET - 500, SEL_ONSET))
        burst_1 = str(SNR_list[0].signals['spikes'].mean_rate(
            SEL_ONSET, SEL_ONSET + 200))
        burst_2 = str(SNR_list[0].signals['spikes'].mean_rate(
            SEL_ONSET + 1000, SEL_ONSET + 1200))
        s = '\n'
        s = s + 'Simulate example:\n'
        s = s + '%s %5s %3s \n' % ('Simulation time', str(sim_time), '#')
        s = s + '%s %5s %3s \n' % ('N experiments:', str(n_exp), '#')
        s = s + '%s %5s %3s \n' % ('MSN base rate:', str(MSN_BASE_RATE),
                                   'spikes/s')
        s = s + '%s %5s %3s \n' % ('MSN burst rate:', str(MSN_BURST_RATE),
                                   'spikes/s')
        s = s + '%s %5s %3s \n' % ('GPe rate:', str(GPE_BASE_RATE), 'spikes/s')
        s = s + '%s %5s %3s \n' % ('Burst time:', str(burst_time), 'ms')
        s = s + '%s %5s %3s \n' % ('Pre sel rate Ref:', pre_ref_1[0:4],
                                   'spikes/s')
        s = s + '%s %5s %3s \n' % ('Burst 1:', burst_1[0:4], 'spikes/s')
        s = s + '%s %5s %3s \n' % ('Burst 2:', burst_2[0:4], 'spikes/s')
        header = s
        misc.text_save(header, save_header_at)
        misc.pickle_save([SNR_list, s], save_result_at)

    else:
        SNR_list, s = misc.pickle_load(save_result_at)

    return SNR_list, s
예제 #11
0
def simulate_MSN_vs_SNR_const_syn_events(load=True):
    global SNR_INJECTED_CURRENT
    global N_MSN
    global N_GPE
    global MSN_BURST_RATE
    global GPE_BASE_RATE

    # Path were raw data is saved. For example the spike trains.
    save_result_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_const_syn_events.pkl'
    save_header_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_const_syn_events_header'

    # REMARK can not be more than rate=const_syn_events/burst_rate
    n_MSN_bursting = numpy.arange(0, N_MAX_BURSTING + 1)

    n_exp = 200
    #n_exp=20

    # Solve (500-n)*x + 20*n=600, where 500 is total number of MSNs, 20 is burst
    # activation, x is MSN mean rate and n is number of bursters.
    # Then x=(600-20*n)/(500-n)
    MSNmeanRates = (SYN_EVENTS -
                    MSN_BURST_RATE * n_MSN_bursting) / (N_MSN - n_MSN_bursting)

    SNRmeanRates = []

    sim_time = 3000.

    if not load:
        for r, n_MSN_b in zip(MSNmeanRates, n_MSN_bursting):
            my_nest.ResetKernel(threads=4)
            model_list, model_dict = models()
            my_nest.MyLoadModels(model_list, NEURON_MODELS)
            my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_TESTED)
            my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_BACKGROUND)

            MSN = []
            SNR = []
            GPE = []
            STN = []

            for i in range(n_exp):
                MSN.append(MyPoissonInput(n=N_MSN, sd=True))
                GPE.append(MyPoissonInput(n=N_GPE, sd=True))
                STN.append(MyPoissonInput(n=N_STN, sd=True))

                I_e = my_nest.GetDefaults(
                    NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
                SNR.append(
                    MyGroup(NEURON_MODELS[0],
                            n=len(SYNAPSE_MODELS_TESTED),
                            params={'I_e': I_e},
                            sd=True))

            for i_exp in range(n_exp):
                for id in MSN[i_exp][:N_MSN - n_MSN_b]:
                    MSN[i_exp].set_spike_times(id=id,
                                               rates=numpy.array([r]),
                                               times=numpy.array([1]),
                                               t_stop=sim_time,
                                               seed=int(numpy.random.random() *
                                                        10000.0))

                for id in MSN[i_exp][N_MSN - n_MSN_b:]:
                    MSN[i_exp].set_spike_times(
                        id=id,
                        rates=numpy.array([r, MSN_BURST_RATE, r]),
                        times=numpy.array([1, SEL_ONSET, SEL_OFFSET]),
                        t_stop=sim_time,
                        seed=int(numpy.random.random() * 10000.0))

                # Base rate GPE
                for id in GPE[i_exp][:]:
                    GPE[i_exp].set_spike_times(id=id,
                                               rates=[GPE_BASE_RATE],
                                               times=[1],
                                               t_stop=sim_time,
                                               seed=int(numpy.random.random() *
                                                        10000.0))
                # Base rate STN
                for id in STN[i_exp][:]:
                    STN[i_exp].set_spike_times(id=id,
                                               rates=[STN_BASE_RATE],
                                               times=[1],
                                               t_stop=sim_time,
                                               seed=int(numpy.random.random() *
                                                        10000.0))

                for j, syn in enumerate(SYNAPSE_MODELS_TESTED):
                    my_nest.ConvergentConnect(MSN[i_exp][:], [SNR[i_exp][j]],
                                              model=syn)
                    my_nest.ConvergentConnect(
                        GPE[i_exp][:], [SNR[i_exp][j]],
                        model=SYNAPSE_MODELS_BACKGROUND[0])
                    my_nest.ConvergentConnect(
                        STN[i_exp][:], [SNR[i_exp][j]],
                        model=SYNAPSE_MODELS_BACKGROUND[1])

            my_nest.MySimulate(sim_time)

            delay = my_nest.GetDefaults(SYNAPSE_MODELS_BACKGROUND[0])['delay']
            SNRmeanRates_tmp = []
            for i in range(n_exp):
                SNR[i].get_signal('s')  # retrieve signal

                SNRmeanRates_tmp.append(SNR[i].signals['spikes'].mean_rates(
                    SEL_ONSET + delay, SEL_OFFSET + delay))

            SNRmeanRates.append(numpy.mean(SNRmeanRates_tmp, axis=0))

        SNRmeanRates = numpy.array(SNRmeanRates).transpose()

        s = '\n'
        s = s + 'simulate_MSN_vs_SNR_const_syn_events:\n'
        s = s + '%s %5s %3s \n' % ('Syn events:', str(SYN_EVENTS), '#')
        s = s + '%s %5s %3s \n' % ('n_exp:', str(n_exp), '#')
        infoString = s

        header = HEADER_SIMULATION_SETUP + s
        misc.text_save(header, save_header_at)
        misc.pickle_save([SNRmeanRates, infoString], save_result_at)

    elif load:
        SNRmeanRates, infoString = misc.pickle_load(save_result_at)

    return n_MSN_bursting, MSNmeanRates, SNRmeanRates, infoString
예제 #12
0
def simulate_MSN_vs_SNR_rate(load=True):
    global SNR_INJECTED_CURRENT
    global N_MSN
    global N_GPE
    global N_STN
    global GPE_BASE_RATE

    # Path were raw data is saved. For example the spike trains.
    save_result_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_rate.pkl'
    save_header_at = OUTPUT_PATH + '/simulate_MSN_vs_SNR_rate_header'

    MSNmeanRates = numpy.arange(0.1, 3.1, 0.1)
    SNRmeanRates = []

    sim_time = 100000.

    if not load:
        for r in MSNmeanRates:
            my_nest.ResetKernel(threads=3)
            model_list, model_dict = models()
            my_nest.MyLoadModels(model_list, NEURON_MODELS)
            my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_TESTED)
            my_nest.MyLoadModels(model_list, SYNAPSE_MODELS_BACKGROUND)

            MSN = MyPoissonInput(n=N_MSN)
            GPE = MyPoissonInput(n=N_GPE)
            STN = MyPoissonInput(n=N_STN)

            I_e = my_nest.GetDefaults(
                NEURON_MODELS[0])['I_e'] + SNR_INJECTED_CURRENT
            SNR = MyGroup(NEURON_MODELS[0],
                          n=len(SYNAPSE_MODELS_TESTED),
                          params={'I_e': I_e},
                          sd=True)

            for id in MSN[:]:
                MSN.set_spike_times(id=id,
                                    rates=numpy.array([r]),
                                    times=numpy.array([1]),
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Base rate GPE
            for id in GPE[:]:
                GPE.set_spike_times(id=id,
                                    rates=[GPE_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            # Base rate STN
            for id in STN[:]:
                STN.set_spike_times(id=id,
                                    rates=[STN_BASE_RATE],
                                    times=[1],
                                    t_stop=sim_time,
                                    seed=int(numpy.random.random() * 10000.0))

            for i, syn in enumerate(SYNAPSE_MODELS_TESTED):
                my_nest.ConvergentConnect(MSN[:], [SNR[i]], model=syn)
                my_nest.ConvergentConnect(GPE[:], [SNR[i]],
                                          model=SYNAPSE_MODELS_BACKGROUND[0])
                my_nest.ConvergentConnect(STN[:], [SNR[i]],
                                          model=SYNAPSE_MODELS_BACKGROUND[1])

            my_nest.MySimulate(sim_time)

            SNR.get_signal('s')  # retrieve signal

            SNRmeanRates.append(SNR.signals['spikes'].mean_rates(
                1000.0, sim_time))

        SNRmeanRates = numpy.array(SNRmeanRates).transpose()
        MSNmeanRates = numpy.array(MSNmeanRates)

        rateAtThr = ''
        for SNRr in SNRmeanRates:
            tmp = str(MSNmeanRates[SNRr >= SELECTION_THR][-1])
            rateAtThr += ' ' + tmp[0:4]

        s = '\n'
        s = s + 'simulate_MSN_vs_SNR_rate:\n'
        s = s + ' %s %5s %3s \n' % ('N MSNs:', str(N_MSN), '#')
        s = s + ' \n%s \n%5s %3s \n' % ('MSN rates:', str(
            MSNmeanRates[0]) + '-' + str(MSNmeanRates[-1]), 'spikes/s')
        s = s + ' %s %5s %3s \n' % ('N GPes:', str(N_GPE), '#')
        s = s + ' %s %5s %3s \n' % ('Threshold SNr:', str(SELECTION_THR),
                                    'spikes/s')
        s = s + ' \n%s \n%5s %3s \n' % ('MSN rate right before threshold SNr:',
                                        str(rateAtThr), 'spikes/s')
        s = s + ' \n%s %5s %3s \n' % ('Simulation time:', str(sim_time), 'ms')
        s = s + ' %s %5s %3s \n' % ('Injected current:',
                                    str(SNR_INJECTED_CURRENT), 'pA')
        infoString = s

        header = HEADER_SIMULATION_SETUP + s
        misc.text_save(header, save_header_at)
        misc.pickle_save([MSNmeanRates, SNRmeanRates, infoString],
                         save_result_at)
    elif load:
        MSNmeanRates, SNRmeanRates, infoString = misc.pickle_load(
            save_result_at)

    return MSNmeanRates, SNRmeanRates, infoString