def vary_muext_isopop(pop_iso): rng_seed = 300 pe = dict() for pop in pops: pe[pop] = dict() pe[pop]['r_spt'] = 0 pe[pop_iso]['N'] = 1000 pe['conn_pairs'] = [] model = Model(rng_seed=rng_seed, extra_para=pe, random_conn=False) model.isolate_population(pop_iso=pop_iso) model.build() if pop_iso == 'E': mu_isos = np.linspace(10, 30, 20) else: mu_isos = np.linspace(5, 20, 20) rate_list = list() for mu_iso in mu_isos: model.params[pop_iso]['mu'] = mu_iso * mV model.rng_seed = rng_seed model.reinit() net = Network(model) net.run(1 * second) mon = model.monitor p = model.params pop = pop_iso spiketime = np.array([spike[1] for spike in mon['Spike' + pop].spikes]) rate = np.sum(spiketime > 200 * ms) / p[pop]['N'] / (model.clock.t - 200 * ms) rate_list.append(rate) plt.figure() plt.plot(mu_isos, rate_list) return rate
def vary_gEE_only(mu=17): pops = ['E', 'P', 'S', 'V'] gEE_plot = np.linspace(0, 0.7, 5) rateE = list() for gEE in gEE_plot: pe = dict() pe['conn_pairs'] = [('E', 'E')] pe['g'] = np.zeros((4, 4)) pe['g'][0, 0] = gEE for pop in pops: pe[pop] = dict() pe[pop]['V_Tstd'] = 0 * mV pe['E']['mu'] = mu * mV model = Model(rng_seed=10, extra_para=pe, random_conn=False) model.make_model() model.add_monitors(record_full=True) model.build() #for pop_act in model.pops: #model.make_simple_model(pop_act=pop_act) model.reinit() net = Network(model) net.run(2.0 * second) #model.PSP_plot() #model.raster_plot() mon = model.monitor p = model.params rate = dict() for pop in model.pops: #print pop, spiketime = np.array([ spike[1] for spike in mon['Spike' + pop].spikes ]) # get spike timing rate[pop] = np.sum(spiketime > 200 * ms) / p[pop]['N'] / ( model.clock.t - 200 * ms) # calculate population rate #print '' print 'gEE = {:0.2f} nS, rateE = {:0.2f}'.format(gEE, rate['E']) rateE.append(rate['E']) plt.figure() plt.plot(gEE_plot, rateE) plt.xlabel('gEE (pF)') plt.ylabel('rateE (Hz)')
def print_params(): model = Model(rng_seed=1) p = model.params import tabulate as T import collections NAME = 0 UNIT = 1 UNITNAME = 2 MEANING = 3 pinfo = collections.OrderedDict([ ('N', ['$N$', 1, '', 'Number of neurons']), ('C_m', ['$C_m$', pF, 'pF', 'Membrane capacitance']), ('g_L', ['$g_L$', nS, 'nS', 'Leak conductance']), ('tau', [r'$\tau$', ms, 'ms', 'Membrane time constant']), ('E_L', ['$E_L$', mV, 'mV', 'Resting potential']), ('V_T', ['$V_T$', mV, 'mV', 'Threshold voltage']), ('Delta_T', ['$\Delta_T$', mV, 'mV', 'EIF slope parameter']), ('V_re', ['$V_{\mathrm{re}}$', mV, 'mV', 'Reset potential']), ('tau_refrac', [r'$\tau_{\mathrm{ref}}$', ms, 'ms', 'Refractory period']), ('a', ['$a$', nS, 'nS', 'Subthreshold adaptation']), ('b', ['$C_m$', pA, 'pA', 'Spike-triggered adaptation']), ('tau_w', [r'$\tau_w$', ms, 'ms', 'Adaptation time constant']), ('sigma', [ '$\sigma_{\mathrm{ext}}$', mV, 'mV', 'Standard deviation of external input' ]), ('E_syn', ['$E_{\mathrm{syn}}$', mV, 'mV', 'Reversal potential']), ('tau_syn', [r'$\tau_{\mathrm{syn}}$', ms, 'ms', 'Synaptic time constant']) ]) tabledata = collections.OrderedDict() tabledata['name'] = [pinfo[key][NAME] for key in pinfo] for pop in pops: tabledata[pop] = [ p[pop][key] / pinfo[key][UNIT] for key in pinfo.keys()[:-2] ] # The rest need some special treatment tabledata[pop] += [p['E']['E_' + pop] / pinfo[key][UNIT]] tabledata[pop] += [p['E']['tau_d' + pop] / pinfo[key][UNIT]] tabledata['unit'] = [pinfo[key][UNITNAME] for key in pinfo] tabledata['meaning'] = [pinfo[key][MEANING] for key in pinfo] T.LATEX_ESCAPE_RULES = {} headers = [''] + pops + ['Unit', 'Description'] print T.tabulate(tabledata, headers, tablefmt='latex') for mat in [p['g'] / nS, p['p0'], p['p2']]: print '\\begin{bmatrix}' print " \\\\\n".join([" & ".join(map(str, line)) for line in mat]) print '\\end{bmatrix}\n'
def __init__(self, version=0, recover=True): self.start = time.time() model = Model(rng_seed=1) self.pops = model.pops self.pops_from = self.pops self.version = version self.savefile = 'spiking_PVSSTdensity' + str(self.version) + '.pkl' if os.path.isfile('data/' + self.savefile) and recover: print 'Loading from ' + self.savefile + '...' with open('data/' + self.savefile, 'rb') as f: self.data = pickle.load(f) else: self.data = dict() model = Model(rng_seed=1) p_orig = model.params self.data['p_orig'] = p_orig self.runtime = 1. # second self.mu_step = 0.5 * mV
def get_backgroundinput_fullcircuit_RATE(self, density_P, density_S, mu0): p_orig = self.data['p_orig'] r_spts = np.array([p_orig[pop]['r_spt'] for pop in self.pops]) bound_dict = { 'E': (20, 45), 'P': (10, 30), 'S': (10, 30), 'V': (10, 30) } bounds = [bound_dict[pop] for pop in pops] rng_seed = 300 pe = dict() for pop in pops: pe[pop] = dict() pe['P']['N'] = int(p_orig['P']['N'] * density_P) pe['S']['N'] = int(p_orig['S']['N'] * density_S) model = Model(rng_seed=rng_seed, extra_para=pe, random_conn=False) model.make_model() model.build() def get_r(mus, model, rng_seed): for pop, mu in zip(pops, mus): model.params[pop]['mu'] = mu * mV model.rng_seed = rng_seed model.reinit() net = Network(model) net.run(self.runtime * second) mon = model.monitor p = model.params rate_list = list() for pop in pops: spiketime = np.array( [spike[1] for spike in mon['Spike' + pop].spikes]) rate = np.sum(spiketime > 200 * ms) / p[pop]['N'] / ( model.clock.t - 200 * ms) rate_list.append(rate) print mus, print rate_list return np.array(rate_list) obj_func = lambda x: np.sum((get_r(x, model, rng_seed) - r_spts)**2) res = scipy.optimize.minimize(obj_func, x0=mu0, bounds=bounds, method='SLSQP', options={ 'maxiter': 300, 'ftol': 0.001, 'eps': 0.01 }) print res return res.x
def get_backgroundinput_all(self, recover=True, fullcircuit=True): model = Model(rng_seed=1) p_orig = model.params savefile = 'spiking_backgroundmu' + str(self.version) + '.pkl' if os.path.isfile('data/' + savefile) and recover: with open('data/' + savefile, 'rb') as f: self.data_mu = pickle.load(f) else: self.data_mu = dict() self.data_mu['p_orig'] = p_orig #density_P_plot = np.repeat([0.5,0.75,1,1.25,1.5],5) #density_S_plot = np.tile([0.5,0.75,1,1.25,1.5],5) density_P_plot = np.repeat([0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75], 7) density_S_plot = np.tile([0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75], 7) #density_P_plot = [0.5,0.5,1.5,1.5] #density_S_plot = [0.5,1.5,0.5,1.5] #density_P_plot = [0.2,0.2,1.8,1.8] #density_S_plot = [0.2,1.8,0.2,1.8] print 'Calculating Background inputs' for density_P, density_S in zip(density_P_plot, density_S_plot): if (density_P, density_S) not in self.data_mu.keys(): mu_spt_list = list() for pop_iso in self.pops: mu_spt = self.get_backgroundinput_popiso( pop_iso, density_P, density_S) mu_spt_list.append(mu_spt) if fullcircuit: mu0 = copy(mu_spt_list) # use this as initial guess mu_spt_list = self.get_backgroundinput_fullcircuit( density_P, density_S, mu0) self.data_mu[(density_P, density_S)] = mu_spt_list print 'density P,S ({:0.2f},{:0.2f})'.format( density_P, density_S), print mu_spt_list sys.stdout.write( 'Time spent {:0.1f} second.'.format(time.time() - self.start)) sys.stdout.flush() with open('data/' + savefile, 'wb') as f: pickle.dump(self.data_mu, f)
def get_backgroundinput_fullcircuit(self, density_P, density_S, mu0): p_orig = self.data['p_orig'] V_spts = p_orig['V_spts'] bound_dict = { 'E': (25, 30), 'P': (8, 20), 'S': (13, 18), 'V': (10, 18) } bounds = [bound_dict[pop] for pop in pops] rng_seed = 300 pe = dict() for pop in pops: pe[pop] = dict() pe['P']['N'] = int(p_orig['P']['N'] * density_P) pe['S']['N'] = int(p_orig['S']['N'] * density_S) model = Model(rng_seed=rng_seed, extra_para=pe, random_conn=False) model.make_model() model.build() def get_r(mus, model, rng_seed): for pop, mu in zip(pops, mus): model.params[pop]['mu'] = mu * mV model.rng_seed = rng_seed model.reinit() net = Network(model) net.run(self.runtime * second) V_list = np.array([ model.monitor['V' + pop].mean.mean() / mV for pop in self.pops ]) #print mus, #print V_list return V_list obj_func = lambda x: np.sum((get_r(x, model, rng_seed) - V_spts)**2) #res = scipy.optimize.minimize(obj_func,x0=mu0,bounds=bounds,method='SLSQP',options={'maxiter':100,'eps':0.2}) res = scipy.optimize.minimize(obj_func, x0=mu0, bounds=bounds, method='L-BFGS-B', options={ 'maxfun': 500, 'eps': 0.2 }) print res return res.x
def get_backgroundinput_popiso_RATE(self, pop_iso, density_P, density_S): p_orig = self.data['p_orig'] r_spt = p_orig[pop_iso]['r_spt'] rng_seed = 300 pe = dict() for pop in pops: pe[pop] = dict() pe['P']['N'] = int(p_orig['P']['N'] * density_P) pe['S']['N'] = int(p_orig['S']['N'] * density_S) model = Model(rng_seed=rng_seed, extra_para=pe, random_conn=False) model.isolate_population(pop_iso=pop_iso) model.build() def get_r(mu_iso, model, rng_seed): model.params[pop_iso]['mu'] = mu_iso * mV model.rng_seed = rng_seed model.reinit() net = Network(model) net.run(self.runtime * second) mon = model.monitor p = model.params pop = pop_iso spiketime = np.array( [spike[1] for spike in mon['Spike' + pop].spikes]) rate = np.sum(spiketime > 200 * ms) / p[pop]['N'] / ( model.clock.t - 200 * ms) return rate obj_func = lambda x: (get_r(x, model, rng_seed) - r_spt)**2 bound_dict = { 'E': (20, 45), 'P': (10, 30), 'S': (10, 30), 'V': (10, 30) } bounds = bound_dict[pop_iso] res = scipy.optimize.minimize_scalar(obj_func, bounds=bounds, method='Bounded', options={ 'maxiter': 30, 'xatol': 0.1 }) return res.x
def get_backgroundinput_popiso(self, pop_iso, density_P, density_S): p_orig = self.data['p_orig'] V_spt = p_orig[pop_iso]['V_spt'] rng_seed = 300 pe = dict() for pop in pops: pe[pop] = dict() pe['P']['N'] = int(p_orig['P']['N'] * density_P) pe['S']['N'] = int(p_orig['S']['N'] * density_S) model = Model(rng_seed=rng_seed, extra_para=pe, random_conn=False) model.isolate_population(pop_iso=pop_iso) model.build() def get_V(mu_iso, model, rng_seed): model.params[pop_iso]['mu'] = mu_iso * mV model.rng_seed = rng_seed model.reinit() net = Network(model) net.run(self.runtime * second) meanV = model.monitor['V' + pop_iso].mean.mean() / mV #print mu_iso, #print meanV return meanV obj_func = lambda x: (get_V(x, model, rng_seed) - V_spt)**2 bound_dict = { 'E': (25, 30), 'P': (8, 20), 'S': (13, 18), 'V': (10, 18) } bounds = bound_dict[pop_iso] res = scipy.optimize.minimize_scalar(obj_func, bounds=bounds, method='Bounded', options={ 'maxiter': 100, 'xatol': 0.01 }) #print res return res.x
def sample_run(mus=None, g=None, Np=None, Ns=None, rng_seed=300): runtime = 1.0 pe = dict() if g is not None: pe['g'] = g if mus is None: version = 3 savefile = 'spiking_backgroundmu' + str(version) + '.pkl' with open('data/' + savefile, 'rb') as f: data_mu = pickle.load(f) mus = data_mu[(1, 1)] for pop, mu in zip(['E', 'P', 'S', 'V'], mus): pe[pop] = dict() pe[pop]['mu'] = mu * mV if Np is not None: pe['P']['N'] = Np if Ns is not None: pe['S']['N'] = Ns model = Model(rng_seed=rng_seed, extra_para=pe, random_conn=False) model.make_model() model.add_monitors(record_full=True) model.build() model.rng_seed = rng_seed model.reinit() net = Network(model) print '***************Sample Run*********************' net.run(runtime * second, report='text') mon = model.monitor p = model.params fig = plt.figure(figsize=(3, 2)) ax = fig.add_axes([0.2, 0.2, 0.7, 0.7]) raster_plot(mon['SpikeV'], mon['SpikeS'], mon['SpikeP'], mon['SpikeE'], showgrouplines=True, color='black', markersize=2) xlabel('Time (ms)', fontsize=7) xlim([0, 1000]) xticks([0, 500, 1000]) yticks([3.5, 2.5, 1.5, 0.5], ['E', 'PV', 'SST', 'VIP'], rotation=90) ylabel('Population', fontsize=7) plt.tick_params(axis='both', which='major', labelsize=7) plt.savefig('figure/spikingraster.pdf') print '\nNumber of neurons', print[p[pop]['N'] for pop in model.pops], print '\nAverage Membrane potential (mV)', for pop in model.pops: print '{:0.2f}'.format(mon['V' + pop].values.mean() / mV), print '\nAverage Membrane potential (mV)', for pop in model.pops: print '{:0.2f}'.format(mon['V' + pop].mean.mean() / mV), print '\nInput current to E (pA)', for pop in model.pops: print '{:0.2f}'.format( mon['IE' + pop].values[:, mon['IE' + pop].times > 200 * ms].mean().mean() / pA), print '\nInput current to S (pA)', for pop in model.pops: print '{:0.2f}'.format( mon['IS' + pop].values[:, mon['IS' + pop].times > 200 * ms].mean().mean() / pA), print '\nConductance to E (nS)', for pop in model.pops: print '{:0.2f}'.format( mon['gE' + pop].values[:, mon['gE' + pop].times > 200 * ms].mean().mean() / nS), print '\nCV', for pop in model.pops: CV_list = list() for key, val in mon['Spike' + pop].spiketimes.iteritems(): if len(val) > 5: CV_list.append(CV(val)) print '{:0.2f}'.format(np.mean(CV_list)), print '\nMu external', for pop in model.pops: print '{:0.2f}'.format(p[pop]['mu'] / mV), print '\nsigma external', for pop in model.pops: print '{:0.2f}'.format(p[pop]['sigma'] / mV),
def run_PSPs(pop_acts=None, g=None): pe = dict() if g is not None: pe['g'] = g if pop_acts is None: pop_acts = ['E', 'P', 'S', 'V'] for pop_act in pop_acts: model = Model(rng_seed=10, extra_para=pe) model.make_simple_model(pop_act=pop_act) model.add_monitors(record_full=True) model.build() model.reinit(simple_reinit=True) net = Network(model) net.run(0.1 * second) model.PSP_plot()
def run_density(self, input_PV, density_P, density_S, n_ran, n_rnd_target): p_orig = self.data['p_orig'] print '\nPV input {:d}, density P,S ({:0.2f},{:0.2f})'.format( input_PV, density_P, density_S) # Extra parameters mu_spt_list = self.data_mu[(density_P, density_S)] pe = dict() for pop in self.pops: pe[pop] = dict() pe[pop]['mu'] = mu_spt_list[self.pops.index(pop)] * mV pe['P']['N'] = int(p_orig['P']['N'] * density_P) pe['S']['N'] = int(p_orig['S']['N'] * density_S) pe['P']['mu'] = pe['P'][ 'mu'] + input_PV * self.mu_step # increase if input_PV is 1 data0 = DataDict() model = Model(rng_seed=300, extra_para=pe) model.make_model() model.add_monitors(record_full=False) model.build() for i_rnd in range(n_ran, n_rnd_target): # different random seeds model.rng_seed = 300 + 88 * i_rnd model.reinit() net = Network(model) net.run(self.runtime * second) mon = model.monitor p = model.params for pop in data0.pops: spiketime = np.array([ spike[1] for spike in mon['Spike' + pop].spikes ]) # get spike timing data0['rate' + pop].append( np.sum(spiketime > 200 * ms) / p[pop]['N'] / (model.clock.t - 200 * ms)) # calculate population rate for pop_from in data0.pops_from: data0['g' + pop + pop_from].append( mon['g' + pop + pop_from].mean.mean() / nS) data0['I' + pop + pop_from].append( mon['I' + pop + pop_from].mean.mean() / pA) sys.stdout.write('\r') sys.stdout.write( 'Time spent {:0.1f} second. Repeated {:d} times.'.format( time.time() - self.start, i_rnd + 1)) sys.stdout.flush() return data0