def __init__(self,prefix,new_pars=[],pars_file=[]): '''Creates and simulates a network in NEST''' #Temporary way to run without the selected pars file pars_file = [] print new_pars start_build_net = time.time() if pars_file==[]: # import generic params_d file import sys sys.path.append(home_directory+"/common/") import params_d_ssbn reload(params_d_ssbn) pars = params_d_ssbn.Parameters(new_pars) else: # import specific params_d file fobj,pathname,description = imp.find_module(pars_file) params_d_sp = imp.load_module(pars_file,fobj,pathname,description) pars = params_d_sp.Parameters(new_pars) print "else" print pars.__dict__ print "pars.num_chg_gpe",pars.num_chg_gpe print "pars.num_chg_stn",pars.num_chg_stn self.T_sim = pars.T_sim + pars.T_wup + pars.T_cdown self.record_vm = pars.record_vm self.recorders = {} self.events = {'spikes':[],'vm':[]} self.pars = pars self.pars.prefix = prefix self.pars.seed = new_pars["seed"] seed = self.pars.seed self.seed = seed print "seed",self.seed # INITIALIZE NETWORK ----------------------------------------------------------------------- nest_path_tmp = tempfile.mktemp(prefix=pars.nest_path_tmp) os.mkdir(nest_path_tmp) nest.ResetKernel() print seed[0] #nest.SetKernelStatus({"resolution":0.1}) shutil.rmtree(nest.GetStatus([0],'data_path')[0],ignore_errors=True) nest.SetKernelStatus({'resolution': pars.dt, 'print_time': pars.print_time, 'overwrite_files':pars.owr_files, 'grng_seed':seed[0], 'data_path':nest_path_tmp}) #print '\nBuilding network...' # CREATE SOURCES ---------------------------------------------------------------------------- # For the sine wave self.pg_exc_part_sin = nest.Create('sinusoidal_poisson_generator', 1) self.pg_exc_part_norm = nest.Create('poisson_generator', 1) self.pg_exc = nest.Create('poisson_generator', 1) self.pg_inh = nest.Create('poisson_generator', 1) nest.SetStatus(self.pg_exc, {'rate': pars.pg_rate_exc,'stop':self.pars.chg_time_stn}) # Sine wave of frequency 15 Hz nest.SetStatus(self.pg_exc_part_sin, {'rate': pars.pg_rate_exc*0.5,'start':self.pars.chg_time_stn,'frequency':15.,'amplitude':pars.pg_rate_exc*0.5}) nest.SetStatus(self.pg_exc_part_norm, {'rate': pars.pg_rate_exc,'start':self.pars.chg_time_stn}) nest.SetStatus(self.pg_inh, {'rate': pars.pg_rate_inh}) # CREATE POPULATIONS ----------------------------------------------------------------------- #print 'Creating populations...\n' # STN neurons_exc = [] self.pops_exc = range(len(pars.N_exc)) for ii,nr in enumerate(pars.N_exc): self.pops_exc[ii] = nest.Create(pars.model_type, abs(nr)) neurons_exc.extend(self.pops_exc[ii]) # set neuron parameters for every population independently for ntypes in range(len(pars.N_exc)): nest.SetStatus(self.pops_exc[ntypes], pars.neuron_params_exc[ntypes]) if pars.rnd_dist: nest.SetStatus(neurons_inh,'tau_m',pars.tau_m_rnd) neurons_inh = [] self.pops_inh = range(len(pars.N_inh)) for ii,nr in enumerate(pars.N_inh): self.pops_inh[ii] = nest.Create(pars.model_type, abs(nr)) neurons_inh.extend(self.pops_inh[ii]) # set neuron parameters for every population independently for ntypes in range(len(pars.N_inh)): nest.SetStatus(self.pops_inh[ntypes], pars.neuron_params_inh[ntypes]) if pars.rnd_dist: # False by default nest.SetStatus(neurons_inh,'tau_m',pars.tau_m_rnd) if pars.change_type: # Changes neuron type midway, 0 by default # Changing order, first Gpe, then STN self.time_lis = [0,pars.chg_time_gpe,pars.chg_time_stn] self.pops = self.pops_exc + self.pops_inh self.pops_exc = [item for sublist in self.pops_exc for item in sublist] self.pops_inh = [item for sublist in self.pops_inh for item in sublist] print "len(self.pops_exc)",len(self.pops_exc) print "len(self.pops_inh)",len(self.pops_inh) # Make connections ------------------------------------------------------------------------- self.pars.neurons_tot = len(self.pops_exc) + len(self.pops_inh) self.pars.pops_exc = self.pops_exc self.pars.pops_inh = self.pops_inh weights_exc = np.random.uniform(low=0.5,high=1.5,size=len(self.pops_exc)) delays_exc = np.ones(len(self.pops_exc)) nest.Connect(self.pg_exc, self.pops_exc,syn_spec={'weight':[ [x] for x in weights_exc],'delay':[[x] for x in delays_exc]}) weights_inh = np.random.uniform(low=0.5,high=1.5,size=len(self.pops_inh)) delays_inh = np.ones(len(self.pops_inh)) self.weights_exc = weights_exc self.weights_inh = weights_inh self.delays_inh = delays_inh self.delays_exc = delays_exc nest.Connect(self.pg_inh, self.pops_inh,syn_spec={'weight':[ [x] for x in weights_inh],'delay':[[x] for x in delays_inh]}) print "STN params" print nest.GetStatus([self.pops_exc[-1]]) print "GPe params" print nest.GetStatus([self.pops_inh[-1]]) #STN connections num_stn_gpe = int(pars.epsilon_stn_gpe * len(self.pops_inh)) nest.Connect(self.pops_exc,self.pops_inh,conn_spec={'rule':'fixed_outdegree','outdegree':num_stn_gpe,'autapses':False,'multapses':False}, syn_spec={'weight':pars.J_stn_gpe, 'delay': pars.del_stn_gpe}) #GPE connections num_gpe_gpe = int(pars.epsilon_gpe_gpe * len(self.pops_inh)) nest.Connect(self.pops_inh,self.pops_inh, conn_spec={'rule':'fixed_outdegree','outdegree':num_gpe_gpe,'autapses':False,'multapses':False}, syn_spec={'weight':pars.J_gpe_gpe, 'delay': pars.del_gpe_gpe}) num_gpe_stn = int(pars.epsilon_gpe_stn* len(self.pops_exc)) nest.Connect(self.pops_inh,self.pops_exc, conn_spec={'rule':'fixed_outdegree','outdegree':num_gpe_stn,'autapses':False,'multapses':False}, syn_spec={'weight':pars.J_gpe_stn, 'delay': pars.del_gpe_stn}) self.record_spikes = self.pops_exc+self.pops_inh sd = nest.Create('spike_detector',1) nest.SetStatus(sd,{'to_file':True,'to_memory':True,'withgid':True}) nest.Connect(self.record_spikes,sd) self.recorders['sd'] = sd if self.pars.record_vm != []: vm = nest.Create('voltmeter',1) #print 'Id of vm recorder: ',vm nest.SetStatus(vm,{'withtime':True,'withgid':True,'to_file':True,'to_memory':False}) nest.Connect(vm,self.pars.record_vm) nest.SetStatus(self.pars.record_vm,{'V_th':1000.}) # record free Vm self.recorders['vm'] = vm self.build_net_time = time.time()-start_build_net
class analyze_data(object): pars = params_d.Parameters([]) def __init__(self, prefix, netPars, seed, data_path=''): if not data_path == '': #self.data_path = pars.data_path self.data_path = data_path self.pars.prefix = prefix # Also copy the GIDs of excitatory and inhibitory pops, to plot them in separate colors if "pops_exc" in dir(netPars): self.pops_exc = netPars.pops_exc if "pops_inh" in dir(netPars): self.pops_inh = netPars.pops_inh self.pars.T_total = netPars.T_total self.seed = seed self.events = {} def get_pop_spikes(self, spikes, nmax, pop_id='all'): if pop_id == 'all': #neuron_nr = self.pars['neurons_tot'] neuron_nr = len(self.pars.pops_exc) + len(self.pars.pops_inh) else: if pop_id == []: print 'population not found' return [], [], [] else: #pop_id = pop_id[0] if nmax == []: idx = (spikes[:, 0] >= self.pars[pop_id][0]) & ( spikes[:, 0] <= self.pars[pop_id][-1]) else: idx = (spikes[:, 0] >= pop_id[0]) & (spikes[:, 0] < pop_id[0] + nmax) neuron_nr = nmax neuron_nr = min(pop_id[-1] - pop_id[0] + 1, nmax) spikes = spikes[idx] return pop_id, spikes, neuron_nr def load_info(self, prefix, data_path=[]): fname = self.data_path + prefix + '.info' fh = open(fname, 'r') info1 = cp.load(fh) return (info1) def load_spikes(self): if not 'spikes' in self.events.keys(): fname = self.pars.data_path + self.pars.prefix + '_spikes.npy' print "fname", fname spikes = np.load(fname) self.events['spikes'] = spikes return def load_vm(self): if not 'vm' in self.events.keys(): fname = self.data_path + self.pars['prefix'] + '_vm.npy' vm = np.load(fname) self.events['vm'] = vm if len(vm) > 0: print 'Imported Vm data' else: print 'Vm array is empty !' return def plot_raster(self, nmax=100, kernel_w=1): ''' plot raster of spikes for all neurons from 1-nr''' self.load_spikes() spikes = self.events['spikes'] if len(spikes) == 0: print 'Plot raster: spike array is empty !' return # JB: Separate the spikes of the two populations # Assuming the GIDs assigned are contigous, if thats not the case, this will fail idxpopE = np.logical_and(spikes[:, 0] >= self.pops_exc[0], spikes[:, 0] <= self.pops_exc[-1]) idxpopI = np.logical_and(spikes[:, 0] >= self.pops_inh[0], spikes[:, 0] <= self.pops_inh[-1]) pl.plot(spikes[idxpopE, 1], spikes[idxpopE, 0], 'r.', label='STN') pl.plot(spikes[idxpopI, 1], spikes[idxpopI, 0], 'b.', label='GPe') pl.legend() pl.show() def plot_vm(self, nid=[]): self.load_vm() self.load_spikes() vm = self.events['vm'] spikes = self.events['spikes'] if nid != []: idx1 = vm[:, 0] == nid idx2 = spikes[:, 0] == nid else: idx1 = vm[:, 0] > 0 idx2 = idx1 offset = np.max(vm[idx1, 2]) * 0.8 pl.plot(vm[idx1, 1], vm[idx1, 2]) pl.plot(spikes[idx2, 1], spikes[idx2, 0] * 0 + offset, 'ro') pl.show() #def comp_psth1(self,fig,pop_id='all',nmax=100,form = '-',kernel_w=1,res=0.1,plot_fl=1, label = 'one',kernel = 'normal', time_range = [], color=np.array([1.,1.,1.])/255.,binsize=100.): def comp_psth1(self, pop_id='all', nmax=100, form='-', kernel_w=1, res=0.1, plot_fl=1, label='one', kernel='normal', time_range=[], color=np.array([1., 1., 1.]) / 255., binsize=50.): ''' compute psth''' self.load_spikes() spikes = self.events['spikes'] if len(spikes) == 0: print 'Comp psth: spike array is empty!' return [], [] pop_id, spikes, neuron_nr = self.get_pop_spikes(spikes, nmax, pop_id) print neuron_nr if spikes == []: return [], [] if time_range != []: idx = (spikes[:, 1] > time_range[0]) & (spikes[:, 1] <= time_range[1]) spikes = spikes[idx] sim_time = time_range[1] - time_range[0] else: sim_time = self.pars.T_total psth, xx = np.histogram(spikes[:, 1], bins=np.arange(0, sim_time, binsize)) #fig.plot(xx[:-1],psth/((binsize/1000.)*neuron_nr),'-',linewidth=2.0,color=color) return psth / ((binsize / 1000.) * neuron_nr), xx def comp_psth(self, pop_id='all', nmax=100, form='-', kernel_w=1, res=0.1, plot_fl=1, label='one', kernel='normal', time_range=[], color=np.array([1., 1., 1.]) / 255.): ''' compute psth''' self.load_spikes() spikes = self.events['spikes'] if len(spikes) == 0: print 'Comp psth: spike array is empty!' return [], [] pop_id, spikes, neuron_nr = self.get_pop_spikes(spikes, nmax, pop_id) print neuron_nr if spikes == []: return [], [] if time_range != []: idx = (spikes[:, 1] > time_range[0]) & (spikes[:, 1] <= time_range[1]) spikes = spikes[idx] sim_time = time_range[1] - time_range[0] else: sim_time = self.pars.T_total if plot_fl: pl.plot(xx, psth, form, lw=3., label=label, color=color) pl.xlim([0, self.pars.T_sim + self.pars.T_wup]) return (psth, xx) def comp_mean_rate(self, time_range=[], pop_id='all', nmax=100): self.load_spikes() spikes = self.events['spikes'] if len(spikes) == 0: print 'Comp mean rate: spike array is empty !' return np.nan pop_id, spikes, neuron_nr = self.get_pop_spikes(spikes, nmax, pop_id) if len(spikes) == 0: return np.nan if time_range != []: idx = (spikes[:, 1] > time_range[0]) & (spikes[:, 1] <= time_range[1]) spikes = spikes[idx] if time_range == []: total_time = self.pars['T_total'] else: total_time = time_range[1] - time_range[0] mean_rate = 1. * len(spikes) / neuron_nr / total_time * 1e3 return mean_rate # To compute the power spectral density and return the two biggest values def psd(self, bin_w=100., nmax=4000, time_range=[], pop_id='all'): self.load_spikes() spikes = self.events['spikes'] pop_id, spikes, neuron_nr = self.get_pop_spikes(spikes, nmax, pop_id) if len(spikes) == 0: print 'psd: spike array is empty' return np.nan, np.nan, np.nan, np.nan if time_range != []: idx = (spikes[:, 1] > time_range[0]) & (spikes[:, 1] <= time_range[1]) spikes = spikes[idx] if time_range == []: total_time = self.pars.T_total else: total_time = time_range[1] - time_range[0] if len(spikes) == 0: print 'psd: spike array is empty' return np.nan, np.nan, np.nan, np.nan ids = np.unique(spikes[:, 0])[:nmax] nr_neurons = len(ids) #psd, max_value, freq,h = misc2.psd_sp(spikes[:,1],nr_bins,nr_neurons) if time_range == []: bins = np.arange(self.pars.T_wup, self.pars.T_total, bin_w) else: bins = np.arange(time_range[0], time_range[1], bin_w) a, b = np.histogram(spikes[:, 1], bins) ff = abs(np.fft.fft(a - np.mean(a)))**2 Fs = 1. / (bin_w * 0.001) freq2 = np.fft.fftfreq(len(bins))[0:len(bins / 2) + 1] freq = np.linspace(0, Fs / 2, len(ff) / 2 + 1) px = ff[0:len(ff) / 2 + 1] max_px = np.max(px[1:]) idx = px == max_px corr_freq = freq[pl.find(idx)] new_px = px max_pow = new_px[pl.find(idx)] return new_px, freq, freq2, corr_freq[0], max_pow def spec_entropy(self, nmax, bin_w=5., pop_id='pops_exc', time_range=[], freq_range=[]): '''Function to calculate the spectral entropy''' power, freq, freq2, dummy, dummy = self.psd(pop_id=pop_id, bin_w=bin_w, time_range=time_range, nmax=nmax) #print freq if freq_range != []: power = power[(freq > freq_range[0]) & (freq < freq_range[1])] freq = freq[(freq > freq_range[0]) & (freq < freq_range[1])] k = len(freq) power = power / sum(power) sum_power = 0 for ii in range(k): sum_power += (power[ii] * np.log(power[ii])) spec_ent = -(sum_power / np.log(k)) return spec_ent, power, freq, freq2
't_ref': 5.0, 'g_L': 10.0, 'C_m': 200.0, 'E_ex': 0.0, 'E_in': -80.0, 'tau_syn_ex': 5.0, 'tau_syn_in': 10.0, 'tau_minus': 20.0 } connect_stn_gpe = True connect_poisson_bkg = True poi_rate_bkg_gpe = np.arange(300., 1500., 200.) pars = params_d.Parameters( ) # The param files object with all common parameters, like simtimes, number of neurons in GPe/STN, connectivity, delays etc simtime = pars.T_total # Ngpe = pars.order * pars.N[1] Nstn = pars.order * pars.N[0] print "Ngpe", Ngpe print "Nstn", Nstn def runSim(params): stn_inp_rate = params["stn_inp"] seed = params["seed"] for ii in range(len(poi_rate_bkg_gpe)): nest.ResetKernel() path = os.getcwd()
reload(adata) import pdb import pylab as pl import matplotlib.cm as cm import os import pickle import matplotlib.pyplot as plt num_threads = 8 run_everything = sys.argv[ 1] #"y" - It will calculate the avg firing rate and spectral entropies from the raw data files and saves it in a dictionary called Data_<ref>.pickle. Should be ran at least once with "y". After that can be run with "n" where it reads the Data_<ref>.pickle and plots the figure ref = sys.argv[ 2] # refractory period. This means there should be raw data files from simulations for the particular refractory period. pars = params_d.Parameters() def get_sim_3d(match_pars, seed): res = adata.analyze_data(prefix1, data_path=path1, netPars=pars, seed=seed) return res def concatFiles(path, prefix): fhand = file(path + prefix + '.gdf', 'a+') for x in xrange(num_threads): filename = path + "/" + prefix + "-{}.gdf".format(x) if os.stat(filename).st_size == 0: continue temp = np.loadtxt(filename) np.savetxt(fhand, temp)
'E_ex': 0.0, 'E_in': -80.0, 'tau_syn_ex': 5.0, 'tau_syn_in': 10.0, 'tau_minus': 20.0, 'spb': 1.0 } connect_stn_gpe = True connect_poisson_bkg = True # lesser resolution for firing rates to STN and GPe poi_rate_bkg_gpe = np.arange(300., 1500., 200.) poi_rate_bkg_stn = np.arange(1000., 2000., 200.) pars = params_d.Parameters( ) # Also get the connectivity, delays and synaptic strengths from here pars.T_sim = 1000 simtime = pars.T_sim + pars.T_wup Ngpe = pars.order * pars.N[1] Nstn = pars.order * pars.N[0] print "Ngpe", Ngpe print "Nstn", Nstn seed_path = "" seeds = pickle.load(open(seed_path + "/seeds.pickle", "r"))[:3] def runSim(samp=False, new_params=[]): stn_fr_inps_all = np.zeros( (len(seeds), len(poi_rate_bkg_gpe), len(poi_rate_bkg_stn)))