def ousim(mu_amp, mu_offs, sigma_amp, sigma_offs, freq, V_th): # mu_amp, mu_offs, sigma_amp, sigma_offs, freq, V_th = config if sigma_amp > sigma_offs: sigma_amp = sigma_offs # print("Setting up OU LIF simulation...") ounet = Network() clock.reinit_default_clock() eqs =Equations('dV/dt = mu-(V+V0)/tau + sigma*I/sqrt(dt) : volt') eqs+=Equations('dI/dt = -I/dt + xi/sqrt(dt) : 1') eqs+=Equations('mu = mu_amp*sin(t*freq*2*pi) + mu_offs : volt/second') eqs+=Equations('sigma = sigma_amp*sin(t*freq*2*pi) + sigma_offs :' ' volt/sqrt(second)') eqs.prepare() ounrn = NeuronGroup(1, eqs, threshold=V_th, refractory=t_refr, reset=V_reset) ounet.add(ounrn) ounrn.V = V0 V_mon = StateMonitor(ounrn, 'V', record=True) st_mon = SpikeMonitor(ounrn) ounet.add(V_mon, st_mon) ounet.run(duration) V_mon.insert_spikes(st_mon, value=V_th*2) times = V_mon.times membrane = V_mon[0] return times, st_mon.spiketimes[0], membrane
def initialize(self, **kwds): # Gets the key,value pairs in shared_data for key, val in self.shared_data.iteritems(): setattr(self, key, val) # Gets the key,value pairs in **kwds for key, val in kwds.iteritems(): setattr(self, key, val) self.model = cPickle.loads(self.model) if type(self.model) is str: self.model = Equations(self.model) self.simulator = Simulator(self.model, self.reset, self.threshold, inputs=self.inputs, input_var=self.input_var, dt=self.dt, refractory=self.refractory, max_refractory=self.max_refractory, spikes=self.spikes, traces=self.traces, groups=self.groups, slices=self.slices, overlap=self.overlap, onset=self.onset, neurons=self.nodesize, initial_values=self.initial_values, unit_type=self.unit_type, stepsize=self.stepsize, precision=self.precision, criterion=self.criterion, method=self.method)
def __init__(self, params=zheng_params, network=None): eqs = Equations(''' G_total : siemens G_total_exc : siemens cmr_o : 1 cb : 1 g : 1 c_ab : 1 cb_0 : 1 g_0 : 1 ''') NeuronGroup.__init__(self, 1, model=eqs, compile=True, freeze=True) self.params = params self.c_ab = self.params.c_ab self.cb_0 = self.params.cb_0 self.g_0 = self.params.g_0 self.cb = self.cb_0 self.g = self.g_0 if network is not None: self.G_total = linked_var(network, 'g_syn', func=sum) self.G_total_exc = linked_var(network, 'g_syn_exc', func=sum)
def build(self, traj, brian_list, network_dict): if not self.pre_built: eqs = Equations(traj.model, tau=traj.tau) ng = NeuronGroup(traj.N, eqs, threshold=traj.threshold, reset=traj.reset, refractory=traj.refr) ng.v0 = traj.v00 brian_list.append(ng) network_dict['group'] = ng
def __init__(self, pyramidal_group, clock=defaultclock): eqs = Equations(''' LFP : amp ''') NeuronGroup.__init__(self, 1, model=eqs, compile=True, freeze=True, clock=clock) self.LFP = linked_var(pyramidal_group, 'I_abs', func=sum)
def __init__(self, clock, params=default_params, network=None): eqs = Equations(''' G_total : siemens G_total_exc : siemens ds/dt=eta*(G_total-G_base)/G_base-s/tau_s-(f_in-1.0)/tau_f : 1 df_in/dt=s/second : 1 dv/dt=1/tau_o*(f_in-f_out) : 1 f_out=v**(1.0/alpha) : 1 o_e=1-(1-e_base)**(1/f_in) : 1 dq/dt=1/tau_o*((f_in*o_e/e_base)-f_out*q/v) : 1 y=v_base*((k1+k2)*(1-q)-(k2+k3)*(1-v)) : 1 G_base : siemens eta : 1/second tau_s : second tau_f : second alpha : 1 tau_o : second e_base : 1 v_base : 1 k1 : 1 k2 : 1 k3 : 1 ''') NeuronGroup.__init__(self, 1, model=eqs, clock=clock, compile=True, freeze=True) self.params = params self.G_base = params.G_base self.eta = params.eta self.tau_s = params.tau_s self.tau_f = params.tau_f self.alpha = params.alpha self.tau_o = params.tau_o self.e_base = params.e_base self.v_base = params.v_base self.k1 = params.k1 self.params.s_e = params.s_e_0 * exp(-params.TE / params.T_2E) self.params.s_i = params.s_i_0 * exp(-params.TE / params.T_2I) self.params.beta = self.params.s_e / self.params.s_i self.k2 = self.params.beta * params.r_0 * self.e_base * params.TE self.k3 = self.params.beta - 1 self.f_in = 1 self.s = 0 self.v = 1 self.q = 1 if network is not None: self.G_total = linked_var(network, 'g_syn', func=sum) self.G_total_exc = linked_var(network, 'g_syn_exc', func=sum)
def lifsim(mu_amp, mu_offs, simga_amp, sigma_offs, freq, V_th): lifnet = Network() clock.reinit_default_clock() eqs = Equations('dV/dt = (-V+V0)/tau : volt') eqs.prepare() lifnrn = NeuronGroup(1, eqs, threshold=V_th, refractory=t_refr, reset=V_reset) lifnet.add(lifnrn) pulse_times = (np.arange(1, duration*freq, 1)+0.25)/freq pulse_spikes = [] Npoiss = 5000 Npulse = 5000 wpoiss = (mu_offs-mu_amp)/(Npoiss*freq) wpulse = mu_amp/(Npulse*freq) sigma = 1/(freq*5) if (wpulse != 0): for pt in pulse_times: pp = PulsePacket(t=pt*second, n=Npulse, sigma=sigma) pulse_spikes.extend(pp.spiketimes) pulse_input = SpikeGeneratorGroup(Npulse, pulse_spikes) pulse_conn = Connection(pulse_input, lifnrn, 'V', weight=wpulse) lifnet.add(pulse_input, pulse_conn) if (wpoiss != 0): poiss_input = PoissonGroup(Npoiss, freq) poiss_conn = Connection(poiss_input, lifnrn, 'V', weight=wpoiss) lifnet.add(poiss_input, poiss_conn) V_mon = StateMonitor(lifnrn, 'V', record=True) st_mon = SpikeMonitor(lifnrn) lifnet.add(V_mon, st_mon) lifnet.run(duration) V_mon.insert_spikes(st_mon, value=V_th*2) times = V_mon.times membrane = V_mon[0] return times, st_mon.spiketimes[0], membrane
def initialize(self, **kwds): # Initialization of variables self.use_gpu = self.unit_type=='GPU' # Gets the key,value pairs in shared_data for key, val in self.shared_data.iteritems(): setattr(self, key, val) # Gets the key,value pairs in **kwds for key, val in kwds.iteritems(): setattr(self, key, val) self.neurons = self.nodesize self.groups = self.groups self.model = cPickle.loads(self.model) if type(self.model) is str: self.model = Equations(self.model) self.initialize_neurongroup() self.transform_data() self.inject_input()
def _build_model_eqs(traj): """Computes model equations for the excitatory and inhibitory population. Equation objects are created by fusing `model.eqs` and `model.synaptic.eqs` and replacing `PRE` by `i` (for inhibitory) or `e` (for excitatory) depending on the type of population. :return: Dictionary with 'i' equation object for inhibitory neurons and 'e' for excitatory """ model_eqs = traj.model.eqs post_eqs = {} for name_post in ['i', 'e']: variables_dict = {} new_model_eqs = model_eqs.replace('POST', name_post) for name_pre in ['i', 'e']: conn_eqs = traj.model.synaptic.eqs new_conn_eqs = conn_eqs.replace('PRE', name_pre) new_model_eqs += new_conn_eqs tau1 = traj.model.synaptic['tau1'] tau2 = traj.model.synaptic['tau2_' + name_pre] normalization = (tau1 - tau2) / tau2 invtau1 = 1.0 / tau1 invtau2 = 1.0 / tau2 variables_dict['invtau1_' + name_pre] = invtau1 variables_dict['invtau2_' + name_pre] = invtau2 variables_dict['normalization_' + name_pre] = normalization variables_dict['tau1_' + name_pre] = tau1 variables_dict['tau2_' + name_pre] = tau2 variables_dict['tau_' + name_post] = traj.model['tau_' + name_post] post_eqs[name_post] = Equations(new_model_eqs, **variables_dict) return post_eqs
def __init__(self, lip_size, params, background_inputs=None, visual_cortex_input=None, go_input=None): self.lip_size = lip_size self.N = 2 * self.lip_size self.params = params self.background_inputs = background_inputs self.visual_cortex_input = visual_cortex_input self.go_input = go_input ## Set up equations # Exponential integrate-and-fire neuron eqs = exp_IF(params.C, params.gL, params.EL, params.VT, params.DeltaT) # AMPA conductance - recurrent input current eqs += exp_synapse('g_ampa_r', params.tau_ampa, siemens) eqs += Current('I_ampa_r=g_ampa_r*(E-vm): amp', E=params.E_ampa) # AMPA conductance - background input current eqs += exp_synapse('g_ampa_b', params.tau_ampa, siemens) eqs += Current('I_ampa_b=g_ampa_b*(E-vm): amp', E=params.E_ampa) # AMPA conductance - task input current eqs += exp_synapse('g_ampa_x', params.tau_ampa, siemens) eqs += Current('I_ampa_x=g_ampa_x*(E-vm): amp', E=params.E_ampa) # AMPA conductance - go input current eqs += exp_synapse('g_ampa_g', params.tau_ampa, siemens) eqs += Current('I_ampa_g=g_ampa_g*(E-vm): amp', E=params.E_ampa) # Voltage-dependent NMDA conductance eqs += biexp_synapse('g_nmda', params.tau1_nmda, params.tau2_nmda, siemens) eqs += Equations('g_V = 1/(1+(Mg/3.57)*exp(-0.062 *vm/mV)) : 1 ', Mg=params.Mg) eqs += Current('I_nmda=g_V*g_nmda*(E-vm): amp', E=params.E_nmda) # GABA-A conductance eqs += exp_synapse('g_gaba_a', params.tau_gaba_a, siemens) eqs += Current('I_gaba_a=g_gaba_a*(E-vm): amp', E=params.E_gaba_a) # GABA-B conductance eqs += biexp_synapse('g_gaba_b', params.tau1_gaba_b, params.tau2_gaba_b, siemens) eqs += Current('I_gaba_b=g_gaba_b*(E-vm): amp', E=params.E_gaba_b) # Total synaptic conductance eqs += Equations( 'g_syn=g_ampa_r+g_ampa_x+g_ampa_g+g_ampa_b+g_V*g_nmda+g_gaba_a+g_gaba_b : siemens' ) eqs += Equations( 'g_syn_exc=g_ampa_r+g_ampa_x+g_ampa_g+g_ampa_b+g_V*g_nmda : siemens' ) # Total synaptic current eqs += Equations( 'I_abs=abs(I_ampa_r)+abs(I_ampa_b)+abs(I_ampa_x)+abs(I_ampa_g)+abs(I_nmda)+abs(I_gaba_a) : amp' ) NeuronGroup.__init__(self, self.N, model=eqs, threshold=-20 * mV, reset=params.EL, compile=True) self.init_subpopulations() self.connections = [] self.init_connectivity() if self.background_inputs is not None: # Background -> E+I population connections background_left_ampa = init_connection(self.background_inputs[0], self.left_lip.neuron_group, 'g_ampa_b', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_b_e, delay=5 * ms) background_right_ampa = init_connection( self.background_inputs[1], self.right_lip.neuron_group, 'g_ampa_b', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_b_e, delay=5 * ms) self.connections.append(background_left_ampa) self.connections.append(background_right_ampa) if self.visual_cortex_input is not None: # Task input -> E population connections vc_left_lip_ampa = init_connection(self.visual_cortex_input[0], self.left_lip.e_contra_vis, 'g_ampa_x', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_v_ec_vis, delay=270 * ms) vc_right_lip_ampa = init_connection(self.visual_cortex_input[1], self.right_lip.e_contra_vis, 'g_ampa_x', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_v_ec_vis, delay=270 * ms) self.connections.append(vc_left_lip_ampa) self.connections.append(vc_right_lip_ampa) if self.go_input is not None: go_left_lip_i_ampa = init_connection(self.go_input, self.left_lip.i_group, 'g_ampa_g', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_g_i, delay=5 * ms) go_right_lip_i_ampa = init_connection(self.go_input, self.right_lip.i_group, 'g_ampa_g', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_g_i, delay=5 * ms) go_left_lip_e_ampa = init_connection(self.go_input, self.left_lip.e_group, 'g_ampa_g', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_g_e, delay=5 * ms) go_right_lip_e_ampa = init_connection(self.go_input, self.right_lip.e_group, 'g_ampa_g', self.params.w_ampa_min, self.params.w_ampa_max, self.params.p_g_e, delay=5 * ms) self.connections.append(go_left_lip_i_ampa) self.connections.append(go_right_lip_i_ampa) self.connections.append(go_left_lip_e_ampa) self.connections.append(go_right_lip_e_ampa)
Fit an integrate-and-fire model to an in-vitro electrophysiological recording during one second. ''' from brian import loadtxt, ms, Equations from brian.library.modelfitting import * if __name__ == '__main__': nvar = 100 equations = ''' dV/dt=(R*I-V)/tau : 1 I : 1 R : 1 tau : second ''' equations += '\n'.join(['var' + str(i) + ':0' for i in range(nvar)]) equations = Equations(equations) input = loadtxt('current.txt') spikes = loadtxt('spikes.txt') results = modelfitting(model=equations, reset=0, threshold=1, data=spikes, input=input, dt=.1 * ms, popsize=1000, maxiter=3, delta=4 * ms, gpu=1, R=[1.0e9, 9.0e9], tau=[10 * ms, 40 * ms], refractory=[0 * ms, 10 * ms])
w = 2*ms nkreuzsamples = 3 Vrest = 0*mV Vth = 20*mV tau = 20*ms Nnrns = 4 Ningroups = 1 Nin_per_group = 50 fin = 20*Hz ingroup_sync = [0.5] sigma = 0*ms weight = 2.0*mV Nallin = Nin_per_group*Ningroups Nin = 25 # total number of connections each cell receives lifeq_exc = Equations("dV/dt = (Vrest-V)/tau : volt") lifeq_exc.prepare() nrngroup = NeuronGroup(Nnrns, lifeq_exc, threshold="V>Vth", reset=Vrest, refractory=2*ms) nrngroup.V = Vrest network.add(nrngroup) print("Setting up inputs and connections ...") ingroups = [] inpconns = [] for ing in range(Ningroups): ingroup = sl.tools.fast_synchronous_input_gen(Nin_per_group, fin, ingroup_sync[ing], sigma, duration, shuffle=False) inpconn = Connection(ingroup, nrngroup, 'V') ingroups.append(ingroup) inpconns.append(inpconn)
def __init__(self, clock, params=zheng_params, network=None): eqs = Equations( ''' G_total : siemens G_total_exc : siemens ds/dt=eta*(G_total-G_base)/G_base-s/tau_s-(f_in-1.0)/tau_f : 1 df_in/dt=s/second : 1.0 dv/dt=1/tau_o*(f_in-f_out) : 1 f_out=v**(1.0/alpha) : 1 do_e/dt=1.0/(phi/f_in)*(-o_e+(1.0-g)*(1.0-(1.0-e_base/(1.0-g_0))**(1.0/f_in))) : %.4f dcb/dt=1.0/(phi/f_in)*(-cb-(c_ab*o_e)/oe_log+c_ab*g) : 1 oe_log : 1 cmr_o=(cb-g*c_ab)/(cb_0-g_0*c_ab) : 1 dg/dt=1.0/(j*v_ratio*((r*transitTime)/e_base))*((cmr_o-1.0)-k*s) : %.4f dq/dt=1/tau_o*((f_in*o_e/e_base)-f_out*q/v) : 1 y=v_0*((k1+k2)*(1-q)-(k2+k3)*(1-v)) : 1 G_base : siemens eta : 1/second tau_s : second tau_f : second alpha : 1 tau_o : second v_0 : 1 k1 : 1 k2 : 1 k3 : 1 phi : %.4f*second e_base : %.4f g_0 : %.4f c_ab : 1 cb_0 : 1 v_ratio : 1 j : 1 transitTime : second k : 1 r : 1 ''' % (params.e_base, params.g_0, params.phi, params.e_base, params.g_0)) NeuronGroup.__init__(self, 1, model=eqs, clock=clock, compile=True, freeze=True) self.params = params self.G_base = params.G_base self.eta = params.eta self.tau_s = params.tau_s self.tau_f = params.tau_f self.alpha = params.alpha self.tau_o = params.tau_o self.e_base = params.e_base self.v_0 = params.v_0 self.k1 = params.k1 self.params.s_e = params.s_e_0 * exp(-params.TE / params.T_2E) self.params.s_i = params.s_i_0 * exp(-params.TE / params.T_2I) self.params.beta = self.params.s_e / self.params.s_i self.k2 = self.params.beta * params.r_0 * self.e_base * params.TE self.k3 = self.params.beta - 1.0 self.c_ab = self.params.c_ab self.cb_0 = self.params.cb_0 self.g_0 = self.params.g_0 self.phi = self.params.phi self.v_ratio = self.params.v_ratio self.j = self.params.j self.transitTime = self.params.transitTime self.k = self.params.k self.r = self.params.r self.f_in = 1.0 self.s = 0.0 self.v = 1.0 self.o_e = self.e_base self.cb = self.cb_0 self.g = self.g_0 self.oe_log = np.log(1.0 - self.o_e / (1.0 - self.g)) self.q = 1.0 if network is not None: self.G_total = linked_var(network, 'g_syn', func=sum) self.G_total_exc = linked_var(network, 'g_syn_exc', func=sum)
#!/usr/bin/env python ''' Model fitting example. Fit an integrate-and-fire model to an in-vitro electrophysiological recording during one second. ''' from brian import loadtxt, ms, Equations from brian.library.modelfitting import * if __name__ == '__main__': equations = Equations(''' dV/dt=(R*I-V)/tau : 1 I : 1 R : 1 tau : second ''') input = loadtxt('current.txt') spikes = loadtxt('spikes.txt') results = modelfitting(model=equations, reset=0, threshold=1, data=spikes, input=input, dt=.1 * ms, popsize=1000, maxiter=3, delta=4 * ms, R=[1.0e9, 9.0e9], tau=[10 * ms, 40 * ms], refractory=[0 * ms, 10 * ms]) print_table(results)
def run_simulation(realizations=1, trials=1, t=3000 * ms, alpha=1, ree=1, k=50, winlen = 50 * ms, verbose=True, t_stim = 0): """ Run the whole simulation with the specified parameters. All model parameter are set in the function. Keyword arguments: :param realizations: number of repititions of the whole simulation, number of network instances :param trials: number of trials for network instance :param t: simulation time :param alpha: scaling factor for number of neurons in the network :param ree: clustering coefficient :param k: number of clusters :param t_stim : duration of stimulation of a subset of clusters :param winlen: length of window in ms :param verbose: plotting flag :return: numpy matrices with spike times """ # The equations defining our neuron model eqs_string = ''' dV/dt = (mu - V)/tau + x: volt dx/dt = -1.0/tau_2*(x - y/tau_1) : volt/second dy/dt = -y/tau_1 : volt mu : volt tau: second tau_2: second tau_1: second ''' # Model parameters n_e = int(4000 * alpha) # number of exc neurons n_i = int(1000 * alpha) # number of inh neurons tau_e = 15 * ms # membrane time constant (for excitatory synapses) tau_i = 10 * ms # membrane time constant (for inhibitory synapses) tau_syn_2_e = 3 * ms # exc synaptic time constant tau2 in paper tau_syn_2_i = 2 * ms # inh synaptic time constant tau2 in paper tau_syn_1 = 1 * ms # exc/inh synaptic time constant tau1 in paper vt = -50 * mV # firing threshold vr = -65 * mV # reset potential dv = vt - vr # delta v refrac = 5 * ms # absolute refractory period # scale the weights to ensure same variance in the inputs wee = 0.024 * dv * np.sqrt(1. / alpha) wie = 0.014 * dv * np.sqrt(1. / alpha) wii = -0.057 * dv * np.sqrt(1. / alpha) wei = -0.045 * dv * np.sqrt(1. / alpha) # Connection probability p_ee = 0.2 p_ii = 0.5 p_ie = 0.5 p_ei = 0.5 # determine probs for inside and outside of clusters p_in, p_out = get_cluster_connection_probs(ree, k, p_ee) mu_min_e, mu_max_e = 1.1, 1.2 mu_min_i, mu_max_i = 1.0, 1.05 # increase cluster weights if there are clusters wee_cluster = wee if p_in == p_out else 1.9 * wee # define numpy array for data storing all_data = np.zeros((realizations, trials, n_e+n_i, int(t/winlen)//2)) for realization in range(realizations): # clear workspace to make sure that is a new realization of the network clear(True, True) reinit() # set up new random bias parameter for every type of neuron mu_e = vr + np.random.uniform(mu_min_e, mu_max_e, n_e) * dv # bias for excitatory neurons mu_i = vr + np.random.uniform(mu_min_i, mu_max_i, n_i) * dv # bias for excitatory neurons # Let's create an equation object from our string and parameters model_eqs = Equations(eqs_string) # Let's create 5000 neurons all_neurons = NeuronGroup(N=n_e + n_i, model=model_eqs, threshold=vt, reset=vr, refractory=refrac, freeze=True, method='Euler', compile=True) # Divide the neurons into excitatory and inhibitory ones neurons_e = all_neurons[0:n_e] neurons_i = all_neurons[n_e:n_e + n_i] # set the bias neurons_e.mu = mu_e neurons_i.mu = mu_i neurons_e.tau = tau_e neurons_i.tau = tau_i neurons_e.tau_2 = tau_syn_2_e neurons_i.tau_2 = tau_syn_2_i all_neurons.tau_1 = tau_syn_1 # set up connections connections = Connection(all_neurons, all_neurons, 'y') # do the cluster connection like cross validation: cluster neuron := test idx; other neurons := train idx kf = KFold(n=n_e, n_folds=k) for idx_out, idx_in in kf: # idx_out holds all other neurons; idx_in holds all cluster neurons # connect current cluster to itself connections.connect_random(all_neurons[idx_in[0]:idx_in[-1]], all_neurons[idx_in[0]:idx_in[-1]], sparseness=p_in, weight=wee_cluster) # connect current cluster to other neurons connections.connect_random(all_neurons[idx_in[0]:idx_in[-1]], all_neurons[idx_out[0]:idx_out[-1]], sparseness=p_out, weight=wee) # connect all excitatory to all inhibitory, irrespective of clustering connections.connect_random(all_neurons[0:n_e], all_neurons[n_e:(n_e + n_i)], sparseness=p_ie, weight=wie) # connect all inhibitory to all excitatory connections.connect_random(all_neurons[n_e:(n_e + n_i)], all_neurons[0:n_e], sparseness=p_ei, weight=wei) # connect all inhibitory to all inhibitory connections.connect_random(all_neurons[n_e:(n_e + n_i)], all_neurons[n_e:(n_e + n_i)], sparseness=p_ii, weight=wii) # set up spike monitors spike_mon_e = SpikeMonitor(neurons_e) spike_mon_i = SpikeMonitor(neurons_i) # set up network with monitors network = Network(all_neurons, connections, spike_mon_e, spike_mon_i) # run this network for some number of trials, every time with for trial in range(trials): # different initial values all_neurons.V = vr + (vt - vr) * np.random.rand(len(all_neurons)) * 1.4 # Calibration phase # run for the first half of the time to let the neurons adapt network.run(t/2) # reset monitors to start recording phase spike_mon_i.reinit() spike_mon_e.reinit() # stimulation if duration is given # define index variable for the stimulation possibility (is 0 for stimulation time=0) t_stim_idx = int(t_stim / (winlen/ms)) if not(t_stim==0): # Stimulation phase, increase input to subset of clusters all_neurons[:400].mu += 0.07 * dv network.run(t_stim * ms, report='text') # set back to normal all_neurons[:400].mu -= 0.07 * dv # save data all_data[realization, trial, :n_e, :t_stim_idx] = spikes_counter(spike_mon_e, winlen) all_data[realization, trial, n_e:, :t_stim_idx] = spikes_counter(spike_mon_i, winlen) # reset monitors spike_mon_e.reinit() spike_mon_i.reinit() # run the remaining time of the simulation network.run((t/2) - t_stim*ms, report='text') # save results all_data[realization, trial, :n_e, t_stim_idx:] = spikes_counter(spike_mon_e, winlen) all_data[realization, trial, n_e:, t_stim_idx:] = spikes_counter(spike_mon_i, winlen) if verbose: plt.ion() plt.figure() raster_plot(spike_mon_e) plt.title('Excitatory neurons') spike_mon_e.reinit() spike_mon_i.reinit() return all_data
def test_stim_pyramidal_impact(): simulation_clock = Clock(dt=.5 * ms) trial_duration = 1 * second dcs_start_time = .5 * second stim_levels = [-8, -6, -4, -2, -1, -.5, -.25, 0, .25, .5, 1, 2, 4, 6, 8] voltages = np.zeros(len(stim_levels)) for idx, stim_level in enumerate(stim_levels): print('testing stim_level %.3fpA' % stim_level) eqs = exp_IF(default_params.C, default_params.gL, default_params.EL, default_params.VT, default_params.DeltaT) # AMPA conductance - recurrent input current eqs += exp_synapse('g_ampa_r', default_params.tau_ampa, siemens) eqs += Current('I_ampa_r=g_ampa_r*(E-vm): amp', E=default_params.E_ampa) # AMPA conductance - background input current eqs += exp_synapse('g_ampa_b', default_params.tau_ampa, siemens) eqs += Current('I_ampa_b=g_ampa_b*(E-vm): amp', E=default_params.E_ampa) # AMPA conductance - task input current eqs += exp_synapse('g_ampa_x', default_params.tau_ampa, siemens) eqs += Current('I_ampa_x=g_ampa_x*(E-vm): amp', E=default_params.E_ampa) # Voltage-dependent NMDA conductance eqs += biexp_synapse('g_nmda', default_params.tau1_nmda, default_params.tau2_nmda, siemens) eqs += Equations('g_V = 1/(1+(Mg/3.57)*exp(-0.062 *vm/mV)) : 1 ', Mg=default_params.Mg) eqs += Current('I_nmda=g_V*g_nmda*(E-vm): amp', E=default_params.E_nmda) # GABA-A conductance eqs += exp_synapse('g_gaba_a', default_params.tau_gaba_a, siemens) eqs += Current('I_gaba_a=g_gaba_a*(E-vm): amp', E=default_params.E_gaba_a) eqs += InjectedCurrent('I_dcs: amp') group = NeuronGroup(1, model=eqs, threshold=-20 * mV, refractory=pyr_params.refractory, reset=default_params.Vr, compile=True, freeze=True, clock=simulation_clock) group.C = pyr_params.C group.gL = pyr_params.gL @network_operation(clock=simulation_clock) def inject_current(c): if simulation_clock.t > dcs_start_time: group.I_dcs = stim_level * pA monitor = StateMonitor(group, 'vm', simulation_clock, record=True) net = Network(group, monitor, inject_current) net.run(trial_duration, report='text') voltages[idx] = monitor.values[0, -1] * 1000 voltages = voltages - voltages[7] plt.figure() plt.plot(stim_levels, voltages) plt.xlabel('Stimulation level (pA)') plt.ylabel('Voltage Change (mV)') plt.show()
class ModelFitting(Fitness): def initialize(self, **kwds): # Initialization of variables self.use_gpu = self.unit_type=='GPU' # Gets the key,value pairs in shared_data for key, val in self.shared_data.iteritems(): setattr(self, key, val) # Gets the key,value pairs in **kwds for key, val in kwds.iteritems(): setattr(self, key, val) self.neurons = self.nodesize self.groups = self.groups self.model = cPickle.loads(self.model) if type(self.model) is str: self.model = Equations(self.model) self.initialize_neurongroup() self.transform_data() self.inject_input() # if self.use_gpu: # ######## # # TODO # ######## # # Select integration scheme according to method # if self.method == 'Euler': scheme = euler_scheme # elif self.method == 'RK': scheme = rk2_scheme # elif self.method == 'exponential_Euler': scheme = exp_euler_scheme # else: raise Exception("The numerical integration method is not valid") # # self.mf = GPUModelFitting(self.group, self.model, self.input, self.I_offset, # self.spiketimes, self.spiketimes_offset, zeros(self.neurons), 0*ms, self.delta, # precision=self.precision, scheme=scheme) # else: # self.cc = CoincidenceCounter(self.group, self.spiketimes, self.spiketimes_offset, # onset=self.onset, delta=self.delta) def initialize_neurongroup(self): # Add 'refractory' parameter on the CPU only if not self.use_gpu: if self.max_refractory is not None: refractory = 'refractory' self.model.add_param('refractory', second) else: refractory = self.refractory else: if self.max_refractory is not None: refractory = 0*ms else: refractory = self.refractory # Must recompile the Equations : the functions are not transfered after pickling/unpickling self.model.compile_functions() self.group = NeuronGroup(self.neurons, model=self.model, reset=self.reset, threshold=self.threshold, refractory=refractory, max_refractory = self.max_refractory, method = self.method, clock=Clock(dt=self.dt)) if self.initial_values is not None: for param, value in self.initial_values.iteritems(): self.group.state(param)[:] = value def transform_data(self): self.transformer = DataTransformer(self.neurons, self.inputs, spikes = self.spikes, traces = self.traces, dt = self.dt, slices = self.slices, overlap = self.overlap, groups = self.groups) self.total_steps = self.transformer.total_steps self.sliced_duration = self.transformer.sliced_duration self.sliced_inputs = self.transformer.slice_traces(self.inputs) self.inputs_inline, self.inputs_offset = self.transformer.transform_traces(self.sliced_inputs) if self.traces is not None: self.sliced_traces = self.transformer.slice_traces(self.traces) self.traces_inline, self.traces_offset = self.transformer.transform_traces(self.sliced_traces) else: self.sliced_traces, self.traces_inline, self.traces_offset = None, None, None if self.spikes is not None: self.sliced_spikes = self.transformer.slice_spikes(self.spikes) self.spikes_inline, self.spikes_offset = self.transformer.transform_spikes(self.sliced_spikes) else: self.sliced_spikes, self.spikes_inline, self.spikes_offset = None, None, None def inject_input(self): # Injects current in consecutive subgroups, where I_offset have the same value # on successive intervals I_offset = self.inputs_offset k = -1 for i in hstack((nonzero(diff(I_offset))[0], len(I_offset) - 1)): I_offset_subgroup_value = I_offset[i] I_offset_subgroup_length = i - k sliced_subgroup = self.group.subgroup(I_offset_subgroup_length) input_sliced_values = self.inputs_inline[I_offset_subgroup_value:I_offset_subgroup_value + self.total_steps] sliced_subgroup.set_var_by_array(self.input_var, TimedArray(input_sliced_values, clock=self.group.clock)) k = i def initialize_criterion(self, delays,tau_metric = None): # general criterion parameters params = dict(group=self.group, traces=self.sliced_traces, spikes=self.sliced_spikes, targets_count=self.groups*self.slices, duration=self.sliced_duration, onset=self.onset, spikes_inline=self.spikes_inline, spikes_offset=self.spikes_offset, traces_inline=self.traces_inline, traces_offset=self.traces_offset, delays=delays, when='start') criterion_name = self.criterion.__class__.__name__ # criterion-specific parameters if criterion_name == 'GammaFactor': params['delta'] = self.criterion.delta params['coincidence_count_algorithm'] = self.criterion.coincidence_count_algorithm self.criterion_object = GammaFactorCriterion(**params) if criterion_name == 'LpError': params['p'] = self.criterion.p params['varname'] = self.criterion.varname self.criterion_object = LpErrorCriterion(**params) if criterion_name == 'VanRossum': params['tau'] = self.criterion.tau self.criterion_object = VanRossumCriterion(**params) if criterion_name == 'Brette': params['tau_metric'] = tau_metric self.criterion_object = BretteCriterion(**params) def update_neurongroup(self, **param_values): """ Inject fitting parameters into the NeuronGroup """ # Sets the parameter values in the NeuronGroup object self.group.reinit() for param, value in param_values.iteritems(): self.group.state(param)[:] = kron(value, ones(self.slices)) # kron param_values if slicing # Reinitializes the model variables if self.initial_values is not None: for param, value in self.initial_values.iteritems(): self.group.state(param)[:] = value def combine_sliced_values(self, values): if type(values) is tuple: combined_values = tuple([sum(reshape(v, (self.slices, -1)), axis=0) for v in values]) else: combined_values = sum(reshape(values, (self.slices, -1)), axis=0) return combined_values def evaluate(self, **param_values): """ Use fitparams['delays'] to take delays into account Use fitparams['refractory'] to take refractory into account """ delays = param_values.pop('delays', zeros(self.neurons)) refractory = param_values.pop('refractory', zeros(self.neurons)) tau_metric = param_values.pop('tau_metric', zeros(self.neurons)) # repeat spike delays and refractory to take slices into account delays = kron(delays, ones(self.slices)) refractory = kron(refractory, ones(self.slices)) tau_metric = kron(tau_metric, ones(self.slices)) self.update_neurongroup(**param_values) if self.criterion.__class__.__name__ == 'Brette': self.initialize_criterion(delays,tau_metric) else: self.initialize_criterion(delays) if self.use_gpu: pass ######### # TODO ######### # # Reinitializes the simulation object # self.mf.reinit_vars(self.input, self.I_offset, self.spiketimes, self.spiketimes_offset, delays, refractory) # # LAUNCHES the simulation on the GPU # self.mf.launch(self.duration, self.stepsize) # coincidence_count = self.mf.coincidence_count # spike_count = self.mf.spike_count else: # set the refractory period if self.max_refractory is not None: self.group.refractory = refractory # Launch the simulation on the CPU self.group.clock.reinit() net = Network(self.group, self.criterion_object) net.run(self.duration) sliced_values = self.criterion_object.get_values() combined_values = self.combine_sliced_values(sliced_values) values = self.criterion_object.normalize(combined_values) return values