def randgen(std): pl.seed(int(std*100)) a = 0.2*pl.cos(x)*pl.sqrt(repeats) b = pl.zeros(npts) for r in range(repeats): b += (0.5-pl.rand(npts))*std return a+b
def make_white_noise_stimuli(cell, input_idx, max_freq, weight=0.0005): """ Makes a white noise input synapse to the cell """ plt.seed(1234) # Make an array with sinusoids with equal amplitude but random phases. tot_ntsteps = round((cell.tstopms - cell.tstartms) / cell.timeres_NEURON + 1) I = np.zeros(tot_ntsteps) tvec = np.arange(tot_ntsteps) * cell.timeres_NEURON for freq in xrange(1, max_freq + 1): I += np.sin(2 * np.pi * freq * tvec/1000. + 2*np.pi*np.random.random()) input_array = weight * I noiseVec = neuron.h.Vector(input_array) # Make the synapse i = 0 syn = None for sec in cell.allseclist: for seg in sec: if i == input_idx: syn = neuron.h.ISyn(seg.x, sec=sec) i += 1 if syn is None: raise RuntimeError("Wrong stimuli index") syn.dur = 1E9 syn.delay = 0 noiseVec.play(syn._ref_amp, cell.timeres_NEURON) return cell, syn, noiseVec
def gather_results(self, randseed=None, results=None): if randseed is not None: pl.seed(randseed) if results is None: results = pl.rand(len(self.data['Results'])) self.data['Results'][-len(results):] = results return results
def test_linearized(): cell_params_hoc['custom_code'] = [join(model_path, 'custom_codes.hoc'), join(model_path, 'biophys3_Ih_linearized_mod.hoc')] cell_params_py['custom_fun_args'] = [{'conductance_type': 'Ih_linearized', 'hold_potential': -70}] plt.seed(0) neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params_hoc) insert_synapses(synapseParameters_AMPA, cell, **insert_synapses_AMPA_args) insert_synapses(synapseParameters_NMDA, cell, **insert_synapses_NMDA_args) insert_synapses(synapseParameters_GABA_A, cell, **insert_synapses_GABA_A_args) cell.simulate(rec_vmem=True, rec_imem=True) plot_cell(cell, '5_linearized_hoc') plt.seed(0) neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params_py) insert_synapses(synapseParameters_AMPA, cell, **insert_synapses_AMPA_args) insert_synapses(synapseParameters_NMDA, cell, **insert_synapses_NMDA_args) insert_synapses(synapseParameters_GABA_A, cell, **insert_synapses_GABA_A_args) cell.simulate(rec_vmem=True, rec_imem=True) plot_cell(cell, '5_linearized_py')
def make_white_noise_stimuli(cell, input_idx, max_freq, weight=0.0005): """ Makes a white noise input synapse to the cell """ plt.seed(1234) # Make an array with sinusoids with equal amplitude but random phases. tot_ntsteps = round((cell.tstop - cell.tstart) / cell.dt + 1) I = np.zeros(tot_ntsteps) tvec = np.arange(tot_ntsteps) * cell.dt for freq in range(1, max_freq + 1): I += np.sin(2 * np.pi * freq * tvec/1000. + 2*np.pi*np.random.random()) input_array = weight * I noiseVec = neuron.h.Vector(input_array) # Make the synapse i = 0 syn = None for sec in cell.allseclist: for seg in sec: if i == input_idx: syn = neuron.h.ISyn(seg.x, sec=sec) i += 1 if syn is None: raise RuntimeError("Wrong stimuli index") syn.dur = 1E9 syn.delay = 0 noiseVec.play(syn._ref_amp, cell.dt) return cell, syn, noiseVec
def _make_white_noise_stimuli(self, cell, input_idx, weight=None): if self.input_type == 'white_noise': input_scaling = 0.0005 max_freq = 500 plt.seed(1234) input_array = input_scaling * (self._make_WN_input(cell, max_freq)) print(1000 * np.std(input_array)) elif self.input_type == 'real_wn': tot_ntsteps = round((cell.tstopms - cell.tstartms)/cell.timeres_NEURON + 1) input_scaling = .1 input_array = input_scaling * (np.random.random(tot_ntsteps) - 0.5) else: raise RuntimeError("Unrecognized input_type!") noise_vec = neuron.h.Vector(input_array) if weight is None else neuron.h.Vector(input_array * weight) i = 0 syn = None for sec in cell.allseclist: for seg in sec: if i == input_idx: print("Input inserted in ", sec.name()) syn = neuron.h.ISyn(seg.x, sec=sec) # print "Dist: ", nrn.distance(seg.x) i += 1 if syn is None: raise RuntimeError("Wrong stimuli index") syn.dur = 1E9 syn.delay = 0 noise_vec.play(syn._ref_amp, cell.timeres_NEURON) return cell, syn, noise_vec
def createCellsFixedNum(self): ''' Create population cells based on fixed number of cells''' cellModelClass = Cell cells = [] seed(f.sim.id32('%d'%(f.cfg['randseed']+self.tags['numCells']))) randLocs = rand(self.tags['numCells'], 3) # create random x,y,z locations for icoord, coord in enumerate(['x', 'y', 'z']): if coord+'Range' in self.tags: # if user provided absolute range, convert to normalized self.tags[coord+'normRange'] = [point / f.net.params['size'+coord.upper()] for point in self.tags[coord+'Range']] if coord+'normRange' in self.tags: # if normalized range, rescale random locations minv = self.tags[coord+'normRange'][0] maxv = self.tags[coord+'normRange'][1] randLocs[:,icoord] = randLocs[:,icoord] / (maxv-minv) + minv for i in xrange(int(f.rank), f.net.params['scale'] * self.tags['numCells'], f.nhosts): gid = f.lastGid+i self.cellGids.append(gid) # add gid list of cells belonging to this population - not needed? cellTags = {k: v for (k, v) in self.tags.iteritems() if k in f.net.params['popTagsCopiedToCells']} # copy all pop tags to cell tags, except those that are pop-specific cellTags['xnorm'] = randLocs[i,0] # set x location (um) cellTags['ynorm'] = randLocs[i,1] # set y location (um) cellTags['znorm'] = randLocs[i,2] # set z location (um) cellTags['x'] = f.net.params['sizeX'] * randLocs[i,0] # set x location (um) cellTags['y'] = f.net.params['sizeY'] * randLocs[i,1] # set y location (um) cellTags['z'] = f.net.params['sizeZ'] * randLocs[i,2] # set z location (um) if 'propList' not in cellTags: cellTags['propList'] = [] # initalize list of property sets if doesn't exist cells.append(cellModelClass(gid, cellTags)) # instantiate Cell object if f.cfg['verbose']: print('Cell %d/%d (gid=%d) of pop %s, on node %d, '%(i, f.net.params['scale'] * self.tags['numCells']-1, gid, self.tags['popLabel'], f.rank)) f.lastGid = f.lastGid + self.tags['numCells'] return cells
def test_subspace_det_algo1_siso(self): """ Subspace deterministic algorithm (SISO). """ ss1 = sysid.StateSpaceDiscreteLinear( A=0.9, B=0.5, C=1, D=0, Q=0.01, R=0.01, dt=0.1) pl.seed(1234) prbs1 = sysid.prbs(1000) def f_prbs(t, x, i): "input function" #pylint: disable=unused-argument, unused-variable return prbs1[i] tf = 10 data = ss1.simulate(f_u=f_prbs, x0=pl.matrix(0), tf=tf) ss1_id = sysid.subspace_det_algo1( y=data.y, u=data.u, f=5, p=5, s_tol=1e-1, dt=ss1.dt) data_id = ss1_id.simulate(f_u=f_prbs, x0=0, tf=tf) nrms = sysid.subspace.nrms(data_id.y, data.y) self.assertGreater(nrms, 0.9) if ENABLE_PLOTTING: pl.plot(data_id.t.T, data_id.x.T, label='id') pl.plot(data.t.T, data.x.T, label='true') pl.legend() pl.grid()
def add_noise_to_cube(data, beamfwhm_pix, fluxmap=None): import pylab as pl pl.seed() s = data.shape noise = pl.randn(s[0], s[1], s[2]) noisescale = 1. if type(fluxmap) != type(None): noisescale = 1.26 * fluxmap**2 z = pl.where(pl.isnan(noisescale)) if len(z[0]) > 0: noisescale[z] = 1. # from astropy.convolution import convolve_fft,Gaussian2DKernel # psf=Gaussian2DKernel(stddev=beamfwhm_pix/2.354) # for i in range(s[0]): # ASSUMES FIRST AXIS IS VEL # noise[i]=convolve_fft(noise[i]/noisescale,psf)#,interpolate_nan=True) from scipy.ndimage.filters import gaussian_filter for i in range(s[0]): # ASSUMES FIRST AXIS IS VEL noise[i] = gaussian_filter(noise[i], beamfwhm_pix / 2.354) / noisescale def mad(data, axis=None): return pl.nanmedian(pl.absolute(data - pl.nanmedian(data, axis)), axis) rms = mad(data) # rms of original cube current_rms = mad(noise) noise = rms * noise / current_rms # scale the noise to have the same rms as the data - there's a sqrt(2) problem I think return noise + data
def make_white_noise(cell, weight, input_idx): max_freq = 510 plt.seed(1234) tot_ntsteps = round((cell.tstopms - cell.tstartms) / cell.timeres_NEURON + 1) input_array = np.zeros(tot_ntsteps) tvec = np.arange(tot_ntsteps) * cell.timeres_NEURON for freq in xrange(1, max_freq + 1): input_array += np.sin(2 * np.pi * freq * tvec / 1000. + 2 * np.pi * np.random.random()) input_array *= weight noiseVec = neuron.h.Vector(input_array) i = 0 syn = None for sec in cell.allseclist: for seg in sec: if i == input_idx: print "Input inserted in ", sec.name() syn = neuron.h.ISyn(seg.x, sec=sec) i += 1 if syn is None: raise RuntimeError("Wrong stimuli index") syn.dur = 1E9 syn.delay = 0 noiseVec.play(syn._ref_amp, cell.timeres_NEURON) return cell, syn, noiseVec
def process(data): pl.seed(data) output = 0 for i in pl.arange(1e6): this = pl.randn() # print('%s: %s' % (i, this)) output += this return output
def _run_single_wn_simulation(self, mu, input_idx, distribution, tau_w): plt.seed(1234) electrode = LFPy.RecExtElectrode(**self.electrode_parameters) # neuron.h('forall delete_section()') cell = self._return_cell(self.holding_potential, 'generic', mu, distribution, tau_w) # self._quickplot_setup(cell, electrode) cell, syn, noiseVec = self._make_white_noise_stimuli(cell, input_idx) print("Starting simulation ...") cell.simulate(rec_imem=True, rec_vmem=True, electrode=electrode) self.save_neural_sim_single_input_data(cell, electrode, input_idx, mu, distribution, tau_w)
def computation(seed=0, n=1000): # Make graph pl.seed(int(seed)) fig = pl.figure() ax = fig.add_subplot(111) xdata = pl.randn(n) ydata = pl.randn(n) colors = sc.vectocolor(pl.sqrt(xdata**2+ydata**2)) ax.scatter(xdata, ydata, c=colors) # Convert to FE graphjson = sw.mpld3ify(fig, jsonify=False) # Convert to dict return graphjson # Return the JSON representation of the Matplotlib figure
def test_subspace_det_algo1_mimo(self): """ Subspace deterministic algorithm (MIMO). """ ss2 = sysid.StateSpaceDiscreteLinear(A=pl.matrix([[0, 0.1, 0.2], [0.2, 0.3, 0.4], [0.4, 0.3, 0.2]]), B=pl.matrix([[1, 0], [0, 1], [0, -1]]), C=pl.matrix([[1, 0, 0], [0, 1, 0]]), D=pl.matrix([[0, 0], [0, 0]]), Q=pl.diag([0.01, 0.01, 0.01]), R=pl.diag([0.01, 0.01]), dt=0.1) pl.seed(1234) prbs1 = sysid.prbs(1000) prbs2 = sysid.prbs(1000) def f_prbs_2d(t, x, i): "input function" #pylint: disable=unused-argument i = i % 1000 return 2 * pl.matrix([prbs1[i] - 0.5, prbs2[i] - 0.5]).T tf = 8 data = ss2.simulate(f_u=f_prbs_2d, x0=pl.matrix([0, 0, 0]).T, tf=tf) ss2_id = sysid.subspace_det_algo1(y=data.y, u=data.u, f=5, p=5, s_tol=0.1, dt=ss2.dt) data_id = ss2_id.simulate(f_u=f_prbs_2d, x0=pl.matrix(pl.zeros(ss2_id.A.shape[0])).T, tf=tf) nrms = sysid.nrms(data_id.y, data.y) self.assertGreater(nrms, 0.9) if ENABLE_PLOTTING: for i in range(2): pl.figure() pl.plot(data_id.t.T, data_id.y[i, :].T, label='$y_{:d}$ true'.format(i)) pl.plot(data.t.T, data.y[i, :].T, label='$y_{:d}$ id'.format(i)) pl.legend() pl.grid()
def test_active_no_input(): plt.seed(0) cell_params_hoc['custom_code'] = [join(model_path, 'custom_codes.hoc'), join(model_path, 'biophys3_active.hoc')] cell_params_py['custom_fun_args'] = [{'conductance_type': 'active'}] neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params_hoc) cell.simulate(rec_vmem=True, rec_imem=True) plot_cell(cell, '3_active_no_input_hoc') plt.seed(0) neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params_py) cell.simulate(rec_vmem=True, rec_imem=True) plot_cell(cell, '3_active_no_input_py')
def test_subspace_det_algo1_mimo(self): """ Subspace deterministic algorithm (MIMO). """ ss2 = sysid.StateSpaceDiscreteLinear( A=pl.matrix([[0, 0.1, 0.2], [0.2, 0.3, 0.4], [0.4, 0.3, 0.2]]), B=pl.matrix([[1, 0], [0, 1], [0, -1]]), C=pl.matrix([[1, 0, 0], [0, 1, 0]]), D=pl.matrix([[0, 0], [0, 0]]), Q=pl.diag([0.01, 0.01, 0.01]), R=pl.diag([0.01, 0.01]), dt=0.1) pl.seed(1234) prbs1 = sysid.prbs(1000) prbs2 = sysid.prbs(1000) def f_prbs_2d(t, x, i): "input function" #pylint: disable=unused-argument i = i%1000 return 2*pl.matrix([prbs1[i]-0.5, prbs2[i]-0.5]).T tf = 8 data = ss2.simulate( f_u=f_prbs_2d, x0=pl.matrix([0, 0, 0]).T, tf=tf) ss2_id = sysid.subspace_det_algo1( y=data.y, u=data.u, f=5, p=5, s_tol=0.1, dt=ss2.dt) data_id = ss2_id.simulate( f_u=f_prbs_2d, x0=pl.matrix(pl.zeros(ss2_id.A.shape[0])).T, tf=tf) nrms = sysid.nrms(data_id.y, data.y) self.assertGreater(nrms, 0.9) if ENABLE_PLOTTING: for i in range(2): pl.figure() pl.plot(data_id.t.T, data_id.y[i, :].T, label='$y_{:d}$ true'.format(i)) pl.plot(data.t.T, data.y[i, :].T, label='$y_{:d}$ id'.format(i)) pl.legend() pl.grid()
def build_rep_trace(noise=1., seed_val=2931): p.seed(seed_val) height = 1. tau_1 = 10. tau_2 = 5. start = 30. offset = 50. repetitions = 100 result = p.empty(repetitions * len(times)) for i in xrange(repetitions): v = noisy_psp(height, tau_1, tau_2, start, offset, times, noise) result[i * len(times): (i + 1) * len(times)] = v return result
def test_linearized_no_input(): cell_params_hoc['custom_code'] = [join(model_path, 'custom_codes.hoc'), join(model_path, 'biophys3_Ih_linearized_mod.hoc')] cell_params_py['custom_fun_args'] = [{'conductance_type': 'Ih_linearized', 'hold_potential': -70}] # plt.seed(0) # neuron.h('forall delete_section()') # cell = LFPy.Cell(**cell_params_hoc) # cell.simulate(rec_vmem=True, rec_imem=True) # plot_cell(cell, '6_linearized_no_input_hoc') plt.seed(0) neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params_py) cell.simulate(rec_vmem=True, rec_imem=True) plot_cell(cell, '6_linearized_no_input_py')
def demo(): """ Example showing the relationship between alpha and sigma in the random walk posterior distribution. The lag 1 autocorrelation coefficient R^2 is approximately 1-alpha. """ from numpy import mean, std, sum import pylab from matplotlib.ticker import MaxNLocator pylab.seed(10) # Pick a pretty starting point # Generate chains n = 5000 mu = [0, 5, 10, 15, 20] sigma = [0.138, 0.31, 0.45, 0.85, 1] alpha = [0.01, 0.05, 0.1, 0.5, 1] chains = walk(n, mu=mu, sigma=sigma, alpha=alpha) # Compute lag 1 correlation coefficient m, s = mean(chains, axis=0), std(chains, ddof=1, axis=0) r2 = sum((chains[1:] - m) * (chains[:-1] - m), axis=0) / ((n - 2) * s**2) r2[abs(r2) < 0.01] = 0 # Plot chains ax_data = pylab.axes([0.05, 0.05, 0.65, 0.9]) # x,y,w,h ax_data.plot(chains) textkw = dict(xytext=(30, 0), textcoords='offset points', verticalalignment='center', backgroundcolor=(0.8, 0.8, 0.8, 0.8)) label = r'$\ \alpha\,%.2f\ \ \sigma\,%.3f\ \ ' \ r'R^2\,%.2f\ \ avg\,%.2f\ \ std\,%.2f\ $' for m, s, a, r2, em, es in zip(mu, sigma, alpha, r2, m, s): pylab.annotate(label % (a, s, r2, em - m, es), xy=(0, m), **textkw) # Plot histogram ax_hist = pylab.axes([0.75, 0.05, 0.2, 0.9], sharey=ax_data) ax_hist.hist(chains.flatten(), 100, orientation='horizontal') pylab.setp(ax_hist.get_yticklabels(), visible=False) ax_hist.xaxis.set_major_locator(MaxNLocator(3)) pylab.show()
def demo(): """ Example showing the relationship between alpha and sigma in the random walk posterior distribution. The lag 1 autocorrelation coefficient R^2 is approximately 1-alpha. """ from numpy import mean, std, sum import pylab from matplotlib.ticker import MaxNLocator pylab.seed(10) # Pick a pretty starting point # Generate chains n = 5000 mu = [0, 5, 10, 15, 20] sigma = [0.138, 0.31, 0.45, 0.85, 1] alpha = [0.01, 0.05, 0.1, 0.5, 1] chains = walk(n, mu=mu, sigma=sigma, alpha=alpha) # Compute lag 1 correlation coefficient m, s = mean(chains, axis=0), std(chains, ddof=1, axis=0) r2 = sum((chains[1:]-m)*(chains[:-1]-m), axis=0) / ((n-2)*s**2) r2[abs(r2) < 0.01] = 0 # Plot chains ax_data = pylab.axes([0.05, 0.05, 0.65, 0.9]) # x,y,w,h ax_data.plot(chains) textkw = dict(xytext=(30, 0), textcoords='offset points', verticalalignment='center', backgroundcolor=(0.8, 0.8, 0.8, 0.8)) label = r'$\ \alpha\,%.2f\ \ \sigma\,%.3f\ \ ' \ r'R^2\,%.2f\ \ avg\,%.2f\ \ std\,%.2f\ $' for m, s, a, r2, em, es in zip(mu, sigma, alpha, r2, m, s): pylab.annotate(label % (a, s, r2, em-m, es), xy=(0, m), **textkw) # Plot histogram ax_hist = pylab.axes([0.75, 0.05, 0.2, 0.9], sharey=ax_data) ax_hist.hist(chains.flatten(), 100, orientation='horizontal') pylab.setp(ax_hist.get_yticklabels(), visible=False) ax_hist.xaxis.set_major_locator(MaxNLocator(3)) pylab.show()
def test_est_dtlnorm(ns, gap, rseed, method, print_gap): seed(rseed) mean_0 = 8.2345 sd_0 = 2.2371 thres_0 = [5082.456, Inf] x_0 = rtlnorm(ns, mu=mean_0, sigma=sd_0, thres=thres_0) for n in arange(0, ns, gap)[1:]: x_1 = x_0[arange(n)] par_est_0 = est_dtlnorm(x_1, thres_0, method) if print_gap: logging.info(" likelihood: " + str(m_nl_dtlnorm(x_1, par_est_0[0], par_est_0[1], thres_0))) logging.info(" " + method + ": ") logging.info(" deviation: " + str(par_est_0/array([mean_0, sd_0]) -1 )) logging.info(" likelihood: " + str(m_nl_dtlnorm(x_0, par_est_0[0], par_est_0[1], thres_0)))
def _test_unwrap(): pl.seed(1) xs = pl.cumsum(scipy.stats.norm.rvs(scale=1000, size=10000)) axes = pl.subplot(411) pl.plot(xs) xs %= 2**16 pl.subplot(412, sharex=axes) pl.plot(xs) in_place = False if in_place: pl.subplot(413, sharex=axes) unwrap(xs, 0, 2**16, True) pl.plot(xs) pl.subplot(414, sharex=axes) pl.plot(xs) else: pl.subplot(413, sharex=axes) pl.plot(unwrap(xs, 0, 2**16)) pl.subplot(414, sharex=axes) pl.plot(xs) pl.show()
def simulate_synaptic_input(input_idx, holding_potential, use_channels, cellname): timeres = 2**-4 cut_off = 0 tstopms = 100 tstartms = -cut_off model_path = cellname cell_params = { 'morphology': join(model_path, '%s.hoc' % cellname), #'rm' : 30000, # membrane resistance #'cm' : 1.0, # membrane capacitance #'Ra' : 100, # axial resistance 'v_init': holding_potential, # initial crossmembrane potential 'passive': False, # switch on passive mechs 'nsegs_method': 'lambda_f', # method for setting number of segments, 'lambda_f': 100, # segments are isopotential at this frequency 'timeres_NEURON': timeres, # dt of LFP and NEURON simulation. 'timeres_python': timeres, 'tstartms': tstartms, # start time, recorders start at t=0 'tstopms': tstopms, 'custom_fun': [active_declarations], # will execute this function 'custom_fun_args': [{'use_channels': use_channels, 'cellname': cellname, 'hold_potential': holding_potential}], } cell = LFPy.Cell(**cell_params) plt.seed(1234) print input_idx, holding_potential sim_params = {'rec_vmem': True, 'rec_imem': True} make_syaptic_stimuli(cell, input_idx) cell.simulate(**sim_params) plt.subplot(211, title='Soma') plt.plot(cell.tvec, cell.vmem[0, :], label='%d %d mV %s' % (input_idx, holding_potential, str(use_channels))) plt.subplot(212, title='Input idx %d' % input_idx) plt.plot(cell.tvec, cell.vmem[input_idx, :], label='%d %d mV %s' % (input_idx, holding_potential, str(use_channels)))
def test_steady_state(input_idx, hold_potential, cellname): timeres = 2**-4 cut_off = 0 tstopms = 500 tstartms = -cut_off model_path = cellname cell_params = { 'morphology': join(model_path, '%s.hoc' % cellname), #'rm' : 30000, # membrane resistance #'cm' : 1.0, # membrane capacitance #'Ra' : 100, # axial resistance 'v_init': hold_potential, # initial crossmembrane potential 'passive': False, # switch on passive mechs 'nsegs_method': 'lambda_f', # method for setting number of segments, 'lambda_f': 100, # segments are isopotential at this frequency 'timeres_NEURON': timeres, # dt of LFP and NEURON simulation. 'timeres_python': timeres, 'tstartms': tstartms, # start time, recorders start at t=0 'tstopms': tstopms, 'custom_fun': [active_declarations], # will execute this function 'custom_fun_args': [{'use_channels': ['Ih', 'Im', 'INaP'], 'cellname': cellname, 'hold_potential': hold_potential}], } cell = LFPy.Cell(**cell_params) area_study(cell) plt.seed(1234) print input_idx, hold_potential sim_params = {'rec_vmem': True, 'rec_imem': True} cell.simulate(**sim_params) [plt.plot(cell.tvec, cell.vmem[idx, :]) for idx in xrange(len(cell.xmid))] plt.show() img = plt.scatter(cell.xmid, cell.zmid, c=cell.vmem[:, -1], edgecolor='none') plt.axis('equal') plt.colorbar(img) plt.show()
def _run_distributed_synaptic_simulation(self, mu, input_sec, distribution, tau_w, weight): plt.seed(1234) tau = '%1.2f' % tau_w if type(tau_w) in [int, float] else tau_w sim_name = '%s_%s_%s_%1.1f_%+d_%s_%s_%1.4f' % (self.cell_name, self.input_type, input_sec, mu, self.holding_potential, distribution, tau, weight) # Sometimes we do not want to redo simulations if they are already done # if os.path.isfile(join(self.sim_folder, 'sig_%s.npy' % sim_name)): # print "Skipping ", mu, input_sec, distribution, tau_w, weight, 'sig_%s.npy' % sim_name # return electrode = LFPy.RecExtElectrode(**self.electrode_parameters) cell = self._return_cell(self.holding_potential, 'generic', mu, distribution, tau_w) cell, syn, noiseVec = self._make_distributed_synaptic_stimuli(cell, input_sec, weight) print("Starting simulation ...") cell.simulate(rec_imem=True, rec_vmem=True, electrode=electrode) self.save_neural_sim_single_input_data(cell, electrode, input_sec, mu, distribution, tau_w, weight) neuron.h('forall delete_section()') del cell, syn, noiseVec, electrode
def test_subspace_det_algo1_siso(self): """ Subspace deterministic algorithm (SISO). """ ss1 = sysid.StateSpaceDiscreteLinear(A=0.9, B=0.5, C=1, D=0, Q=0.01, R=0.01, dt=0.1) pl.seed(1234) prbs1 = sysid.prbs(1000) def f_prbs(t, x, i): "input function" #pylint: disable=unused-argument, unused-variable return prbs1[i] tf = 10 data = ss1.simulate(f_u=f_prbs, x0=pl.matrix(0), tf=tf) ss1_id = sysid.subspace_det_algo1(y=data.y, u=data.u, f=5, p=5, s_tol=1e-1, dt=ss1.dt) data_id = ss1_id.simulate(f_u=f_prbs, x0=0, tf=tf) nrms = sysid.subspace.nrms(data_id.y, data.y) self.assertGreater(nrms, 0.9) if ENABLE_PLOTTING: pl.plot(data_id.t.T, data_id.x.T, label='id') pl.plot(data.t.T, data.x.T, label='true') pl.legend() pl.grid()
def test_active_orig(): plt.seed(0) cell_params_hoc['custom_code'] = [join(model_path, 'custom_codes.hoc'), join(model_path, 'biophys3_active.hoc')] cell_params_py['custom_fun_args'] = [{'conductance_type': 'active'}] neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params_hoc) insert_synapses(synapseParameters_AMPA, cell, **insert_synapses_AMPA_args) insert_synapses(synapseParameters_NMDA, cell, **insert_synapses_NMDA_args) insert_synapses(synapseParameters_GABA_A, cell, **insert_synapses_GABA_A_args) cell.simulate(rec_vmem=True, rec_imem=True) plot_cell(cell, '1_active_hoc') plt.seed(0) neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params_py) insert_synapses(synapseParameters_AMPA, cell, **insert_synapses_AMPA_args) insert_synapses(synapseParameters_NMDA, cell, **insert_synapses_NMDA_args) insert_synapses(synapseParameters_GABA_A, cell, **insert_synapses_GABA_A_args) cell.simulate(rec_vmem=True, rec_imem=True) plot_cell(cell, '1_active_py')
def labelPlot(numFlips, numTrials, mean, sd): pylab.title(str(numTrials) + ' trials of ' + str(numFlips) + ' flips each') pylab.xlabel('Fraction of Heads') pylab.ylabel('Number of Trials') xmin, xmax = pylab.xlim() ymin, ymax = pylab.ylim() #add a text box pylab.text(xmin + (xmax-xmin)*0.02, (ymax-ymin)/2, 'Mean = ' + str(round(mean, 4)) + '\nSD = ' + str(round(sd, 4))) def makePlots(numFlips1, numFlips2, numTrials): val1, mean1, sd1 = flipSim(numFlips1, numTrials) #forcing the x axis the same as previous one pylab.hist(val1, bins = 21)## makes visually differences in different sd xmin,xmax = pylab.xlim() ymin,ymax = pylab.ylim() labelPlot(numFlips1, numTrials, mean1, sd1) pylab.figure() val2, mean2, sd2 = flipSim(numFlips2, numTrials) pylab.hist(val2, bins = 21) pylab.xlim(xmin, xmax) ymin, ymax = pylab.ylim() labelPlot(numFlips2, numTrials, mean2, sd2) pylab.seed(0) makePlots(100,1000,100000) pylab.show()
home = os.path.expanduser('~') path = os.path.join(home, 'umb', 'CRCNS/trunk') if not path in sys.path: sys.path.append(path) from testdata.cellsimmethods import \ cellsim_active, draw_rand_pos, shufflemorphos, \ shufflecustom_codes, collect_data from testdata.nestfun import run_brunel_delta_nest, gdfFilesProcessing from time import time #set some seeds SEED = 12345678 NESTSEED = SEED pl.seed(SEED) ################# Initialization of MPI stuff ################################## COMM = MPI.COMM_WORLD SIZE = COMM.Get_size() RANK = COMM.Get_rank() MASTER_MODE = COMM.rank == 0 print 'SIZE %i, RANK %i, MASTER_MODE: %s' % (SIZE, RANK, str(MASTER_MODE)) #print out memory consumption etc every ten seconds if RANK == 0 or RANK == 8: if sys.platform == 'darwin': pass else: os.system("vmstat 10 -S M &")
for noiseLevel in [0,0.1,0.2,0.3,0.4,0.5]: crossscore1=0; crossscore2=0; crossscore3=0; crossscore4=0 for count in range(5): # Standard deviation of each feature st=np.tile(X.std(axis=0),(X.shape[0],1)) hst=np.tile(HX.std(axis=0),(HX.shape[0],1)) # Creating noisy samples pylab.seed(rs+count) nX=X+pylab.randn(*X.shape)*st*noiseLevel nHX=HX+pylab.randn(*HX.shape)*hst*noiseLevel coldModel=InitModel(DecisionTreeClassifier) hotModel=InitModel(DecisionTreeClassifier); coldModellda=InitModel(LDA) hotModellda=InitModel(LDA) coldModel.fit(nX,Y) hotModel.fit(nHX,HY) coldModellda.fit(nX,Y) hotModellda.fit(nHX,HY) crossscore1+=coldModel.score(HX,HY)*100 crossscore2+=hotModel.score(X,Y)*100
def c0_thread(task_queue, randomseed, done_queue): for n in iter(task_queue.get, 'STOP'): print '\nCell number %s out of %d.' % (n, pop_params['n'] - 1) pl.seed(randomseed - n) cell = LFPy.Cell(**cellparams) soma_pos = { 'xpos': pop_soma_pos0['xpos'][n], 'ypos': pop_soma_pos0['ypos'][n], 'zpos': pop_soma_pos0['zpos'][n] } cell.set_pos(**soma_pos) cell.color = 'g' # make list of cells with morphology rotation file L4_pc = [ fname.split('.rot')[0] + '.hoc' for fname in glob(os.path.join('morphologies', '*.rot')) ] if cellparams['morphology'] not in L4_pc: rotation = random_rot_angles() else: rotation = random_rot_angles(x=False, y=False, z=True) cell.set_rotation(**rotation) #Manage probabilities of synapse positions if section_syn == 'somaproximal': idxs = get_idx_proximal(cell, r=50.) else: idxs = cell.get_idx(section=section_syn) P = cell.get_rand_prob_area_norm_from_idx(idx=idxs) idx = [] if disttype == 'hard_cyl': rad_xy = pl.sqrt(cell.xmid[idxs]**2 + cell.ymid[idxs]**2) indices = pl.where( pl.array(rad_xy < sigma[1], dtype=int) * \ pl.array(cell.zmid[idxs] <= sigma[0]-my, dtype=int) * \ pl.array(cell.zmid[idxs] > -sigma[0]-my, dtype=int) == 1) W = pl.zeros(idxs.size) W[indices] = 1 for i in xrange(idxs.size): if W[i] == 1: # synapse allowed for j in xrange(pl.poisson(mean_n_syn)): base_prob = P[i] / P.sum() if pl.rand() < base_prob: idx.append(i) allidx_e0 = idxs[idx] elif disttype == 'hard_sphere': rad_xyz = pl.sqrt(cell.xmid[idxs]**2 + cell.ymid[idxs]**2 + (cell.zmid[idxs] - my)**2) indices = pl.where(pl.array(rad_xyz < sigma, dtype=int) == 1) W = pl.zeros(idxs.size) W[indices] = 1 for i in xrange(idxs.size): if W[i] == 1: for j in xrange(pl.poisson(mean_n_syn)): base_prob = P[i] / P.sum() if pl.rand() < base_prob: idx.append(i) allidx_e0 = idxs[idx] elif disttype == 'anisotrop': W = pl.exp(-cell.xmid[idxs]**2/(2*sigma[0]**2)) * \ pl.exp(-cell.ymid[idxs]**2/(2*sigma[1]**2)) * \ pl.exp(-(cell.zmid[idxs]-my)**2/(2*sigma[2]**2)) PW = P * W for i in xrange(idxs.size): for j in xrange(pl.poisson(mean_n_syn)): base_prob = PW[i] / P.sum() if pl.rand() < base_prob: idx.append(i) allidx_e0 = idxs[idx] cell.strip_hoc_objects() done_queue.put([cell, rotation, soma_pos, allidx_e, allidx_e0])
def plot_pop(do_show=False, pause=0.2): ''' Plot an example population ''' plotconnections = True n = 5000 alpha = 0.5 # indices = pl.arange(1000) pl.seed(1) indices = pl.randint(0, n, 20) max_contacts = {'S': 20, 'W': 10} population = sp.make_population(n=n, max_contacts=max_contacts) nside = np.ceil(np.sqrt(n)) x, y = np.meshgrid(np.arange(nside), np.arange(nside)) x = x.flatten()[:n] y = y.flatten()[:n] people = list(population.values()) for p, person in enumerate(people): person['loc'] = dict(x=x[p], y=y[p]) ages = np.array([person['age'] for person in people]) f_inds = [ind for ind, person in enumerate(people) if not person['sex']] m_inds = [ind for ind, person in enumerate(people) if person['sex']] if do_show: use_terrain = False if use_terrain: import matplotlib.pyplot as plt import matplotlib.colors as colors colors_undersea = plt.cm.terrain(np.linspace(0, 0.17, 256)) colors_land = plt.cm.terrain(np.linspace(0.25, 1, 256)) all_colors = np.vstack((colors_undersea, colors_land)) terrain_map = colors.LinearSegmentedColormap.from_list( 'terrain_map', all_colors) pl.set_cmap(terrain_map) fig = pl.figure(figsize=(24, 18)) pl.subplot(111) minval = 0 # ages.min() maxval = 100 # ages.min() colors = sc.vectocolor(ages, minval=minval, maxval=maxval) for i, inds in enumerate([f_inds, m_inds]): pl.scatter(x[inds], y[inds], marker='os'[i], c=colors[inds]) pl.clim([minval, maxval]) pl.colorbar() if plotconnections: lcols = dict(H=[0, 0, 0], S=[0, 0.5, 1], W=[0, 0.7, 0], C=[1, 1, 0]) for index in indices: person = people[index] contacts = person['contacts'] lines = [] for lkey in lcols.keys(): for contactkey in contacts[lkey]: contact = population[contactkey] tmp = pl.plot( [person['loc']['x'], contact['loc']['x']], [person['loc']['y'], contact['loc']['y']], c=lcols[lkey], alpha=alpha) lines.append(tmp) if pause: pl.pause(pause) return fig
def createCellsDensity(self): ''' Create population cells based on density''' cellModelClass = Cell cells = [] volume = f.net.params['sizeY']/1e3 * f.net.params['sizeX']/1e3 * f.net.params['sizeZ']/1e3 # calculate full volume for coord in ['x', 'y', 'z']: if coord+'Range' in self.tags: # if user provided absolute range, convert to normalized self.tags[coord+'normRange'] = [point / f.net.params['size'+coord.upper()] for point in self.tags[coord+'Range']] if coord+'normRange' in self.tags: # if normalized range, rescale volume minv = self.tags[coord+'normRange'][0] maxv = self.tags[coord+'normRange'][1] volume = volume * (maxv-minv) funcLocs = None # start with no locations as a function of density function if isinstance(self.tags['density'], str): # check if density is given as a function strFunc = self.tags['density'] # string containing function strVars = [var for var in ['xnorm', 'ynorm', 'znorm'] if var in strFunc] # get list of variables used if not len(strVars) == 1: print 'Error: density function (%s) for population %s does not include "xnorm", "ynorm" or "znorm"'%(strFunc,self.tags['popLabel']) return coordFunc = strVars[0] lambdaStr = 'lambda ' + coordFunc +': ' + strFunc # convert to lambda function densityFunc = eval(lambdaStr) minRange = self.tags[coordFunc+'Range'][0] maxRange = self.tags[coordFunc+'Range'][1] interval = 0.001 # interval of location values to evaluate func in order to find the max cell density maxDensity = max(map(densityFunc, (arange(minRange, maxRange, interval)))) # max cell density maxCells = volume * maxDensity # max number of cells based on max value of density func seed(f.sim.id32('%d' % f.cfg['randseed'])) # reset random number generator locsAll = minRange + ((maxRange-minRange)) * rand(int(maxCells), 1) # random location values locsProb = array(map(densityFunc, locsAll)) / maxDensity # calculate normalized density for each location value (used to prune) allrands = rand(len(locsProb)) # create an array of random numbers for checking each location pos makethiscell = locsProb>allrands # perform test to see whether or not this cell should be included (pruning based on density func) funcLocs = [locsAll[i] for i in range(len(locsAll)) if i in array(makethiscell.nonzero()[0],dtype='int')] # keep only subset of yfuncLocs based on density func self.tags['numCells'] = len(funcLocs) # final number of cells after pruning of location values based on density func if f.cfg['verbose']: print 'Volume=%.2f, maxDensity=%.2f, maxCells=%.0f, numCells=%.0f'%(volume, maxDensity, maxCells, self.tags['numCells']) else: # NO ynorm-dep self.tags['numCells'] = int(self.tags['density'] * volume) # = density (cells/mm^3) * volume (mm^3) # calculate locations of cells seed(f.sim.id32('%d'%(f.cfg['randseed']+self.tags['numCells']))) randLocs = rand(self.tags['numCells'], 3) # create random x,y,z locations for icoord, coord in enumerate(['x', 'y', 'z']): if coord+'normRange' in self.tags: # if normalized range, rescale random locations minv = self.tags[coord+'normRange'][0] maxv = self.tags[coord+'normRange'][1] randLocs[:,icoord] = randLocs[:,icoord] * (maxv-minv) + minv if funcLocs and coordFunc == coord+'norm': # if locations for this coordinate calcualated using density function randLocs[:,icoord] = funcLocs if f.cfg['verbose'] and not funcLocs: print 'Volume=%.4f, density=%.2f, numCells=%.0f'%(volume, self.tags['density'], self.tags['numCells']) for i in xrange(int(f.rank), self.tags['numCells'], f.nhosts): gid = f.lastGid+i self.cellGids.append(gid) # add gid list of cells belonging to this population - not needed? cellTags = {k: v for (k, v) in self.tags.iteritems() if k in f.net.params['popTagsCopiedToCells']} # copy all pop tags to cell tags, except those that are pop-specific cellTags['xnorm'] = randLocs[i,0] # calculate x location (um) cellTags['ynorm'] = randLocs[i,1] # calculate x location (um) cellTags['znorm'] = randLocs[i,2] # calculate z location (um) cellTags['x'] = f.net.params['sizeX'] * randLocs[i,0] # calculate x location (um) cellTags['y'] = f.net.params['sizeY'] * randLocs[i,1] # calculate x location (um) cellTags['z'] = f.net.params['sizeZ'] * randLocs[i,2] # calculate z location (um) if 'propList' not in cellTags: cellTags['propList'] = [] # initalize list of property sets if doesn't exist cells.append(cellModelClass(gid, cellTags)) # instantiate Cell object if f.cfg['verbose']: print('Cell %d/%d (gid=%d) of pop %s, pos=(%2.f, %2.f, %2.f), on node %d, '%(i, self.tags['numCells']-1, gid, self.tags['popLabel'],cellTags['x'], cellTags['ynorm'], cellTags['z'], f.rank)) f.lastGid = f.lastGid + self.tags['numCells'] return cells
#!/usr/bin/env python # importing some modules, setting some matplotlib values for pl.plot. import pylab as pl import LFPy #load compiled mechs from the mod-folder LFPy.cell.neuron.load_mechanisms("../mod") pl.rcParams.update({'font.size' : 10, 'figure.figsize' : [16,9],'wspace' : 0.5 ,'hspace' : 0.5}) #seed for random generation pl.seed(9876543210) #plot pops up by itself pl.interactive(1) ################################################################################ # A couple of function declarations ################################################################################ def plotstuff(): fig = pl.figure(figsize=[12, 8]) ax = fig.add_axes([0.1, 0.7, 0.5, 0.2]) ax.plot(cell.tvec,cell.somav) ax.set_xlabel('Time [ms]') ax.set_ylabel('Soma pot. [mV]') ax = fig.add_axes([0.1, 0.4, 0.5, 0.2]) for i in xrange(len(cell.synapses)):
# Create the class instances to be used for the tests OT = create_DTK() OM = create_OM() ########################################## ### Run tests ########################################## if 'initial_points' in torun: # Tests to run doprint = False doplot = False doassert = True # Choose samples pl.seed(randseed) dtk_samples_df = OT.choose_initial_samples() pl.seed(randseed) om_samples = OM.sample_hypersphere() dtk_samples = dtk_samples_df.to_numpy() # Tests if doprint: print(dtk_samples) print(om_samples) if doplot: fig = pl.figure() pl.subplot(2, 1, 1) pl.hist(dtk_samples[:, 0], bins=100) pl.subplot(2, 1, 2)
def gather_results(self, randseed=None, results=None): if randseed is not None: pl.seed(randseed) if results is None: results = pl.rand(self.mp.N) return results
for i in PSet.iterkeys(): vars()[i] = PSet[i] psetid = PSet['uuid'] print('Current simulation are using ParameterSet:') print PSet.pretty() #create folder to save data if it doesnt exist datafolder = os.path.join('savedata', psetid) if not os.path.isdir(datafolder): os.system('mkdir %s' % datafolder) print 'created folder %s!' % datafolder # set global seed pl.seed(seed=randomseed) ################################################################################ # Simulation setup ################################################################################ cellparams = { 'morphology': morphology, 'timeres_NEURON': 0.025, 'timeres_python': 0.025, 'custom_code': custom_code, 'rm': rm, 'cm': cm, 'Ra': Ra, 'e_pas': v_init, 'v_init': v_init, 'tstartms': -1,
sd = stdDev(fracHeads) return (fracHeads, mean, sd) def labelPlot(numFlips, numTrials, mean, sd): pylab.title(str(numTrials) + ' trials of ' + str(numFlips) + ' flips each') pylab.xlabel('Fraction of Heads') pylab.ylabel('Number of Trials') xmin, xmax = pylab.xlim() ymin, ymax = pylab.ylim() pylab.text(xmin + (xmax - xmin) * 0.02, (ymax - ymin) / 2, 'Mean = ' + str(round(mean, 4)) + '\nSD = ' + str(round(sd, 4))) def makePlots(numFlips1, numFlips2, numTrials): val1, mean1, sd1 = flipSim(numFlips1, numTrials) pylab.hist(val1, bins=21) xmin, xmax = pylab.xlim() ymin, ymax = pylab.ylim() # labelPlot(numFlips1, numTrials, mean1, sd1) pylab.figure() val2, mean2, sd2 = flipSim(numFlips2, numTrials) pylab.hist(val2, bins=21) pylab.xlim(0, 1) ymin, ymax = pylab.ylim() # labelPlot(numFlips2, numTrials, mean2, sd2) pylab.seed(0) makePlots(100, 1000, 10000) pylab.show()
__author__ = 'torbjone' """ Test if hay model can be made uniform by tampering with the static currents """ from os.path import join import LFPy import neuron import pylab as plt from hay_active_declarations import * plt.seed(0) timeres = 2**-4 cut_off = 0 tstopms = 1000 tstartms = -cut_off model_path = join('lfpy_version') neuron.load_mechanisms(join('mod')) neuron.load_mechanisms('..') # Synaptic parameters taken from Hendrickson et al 2011 # Excitatory synapse parameters: synapseParameters_AMPA = { 'e': 0, #reversal potential 'syntype': 'Exp2Syn', #conductance based exponential synapse 'tau1': 1., #Time constant, rise 'tau2': 3., #Time constant, decay 'weight': 0.005, #Synaptic weight 'color': 'r', #for plt.plot 'marker': '.', #for plt.plot
def replot_ZAP(input_idx, hold_potential, use_channels, cellname): timeres = 2**-4 cut_off = 0 tstopms = 20000 tstartms = -cut_off model_path = cellname cell_params = { 'morphology': join(model_path, '%s.hoc' % cellname), #'rm' : 30000, # membrane resistance #'cm' : 1.0, # membrane capacitance #'Ra' : 100, # axial resistance 'v_init': hold_potential, # initial crossmembrane potential 'passive': False, # switch on passive mechs 'nsegs_method': 'lambda_f', # method for setting number of segments, 'lambda_f': 100, # segments are isopotential at this frequency 'timeres_NEURON': timeres, # dt of LFP and NEURON simulation. 'timeres_python': 1., 'tstartms': tstartms, # start time, recorders start at t=0 'tstopms': tstopms, 'custom_fun': [active_declarations], # will execute this function 'custom_fun_args': [{ 'use_channels': use_channels, 'cellname': cellname, 'hold_potential': hold_potential }], } neuron.h('forall delete_section()') cell = LFPy.Cell(**cell_params) if cellname == 'c12861': apic_stim_idx = cell.get_idx('apic[66]')[0] elif cellname == 'n120': apic_stim_idx = cell.get_idx('apic[1]')[-1] else: raise RuntimeError("Cellname not recognized!") if input_idx == 'apic': input_idx = apic_stim_idx figfolder = join(model_path, 'verifications') plt.seed(1) apic_tuft_idx = cell.get_closest_idx(-100, 500, 0) trunk_idx = cell.get_closest_idx(0, 300, 0) axon_idx = cell.get_idx('axon_IS')[0] basal_idx = cell.get_closest_idx(-50, -100, 0) soma_idx = 0 print input_idx, hold_potential idx_list = np.array([ soma_idx, apic_stim_idx, apic_tuft_idx, trunk_idx, axon_idx, basal_idx ]) input_scaling = .01 cell, stim = make_ZAP_stimuli(cell, input_idx, input_scaling) simfolder = join(model_path, 'simresults') simname = join(simfolder, 'ZAP_%d_%1.3f' % (input_idx, input_scaling)) if 'use_channels' in cell_params['custom_fun_args'][0] and \ len(cell_params['custom_fun_args'][0]['use_channels']) > 0: for ion in cell_params['custom_fun_args'][0]['use_channels']: simname += '_%s' % ion else: simname += '_passive' if 'hold_potential' in cell_params['custom_fun_args'][0]: simname += '_%+d' % cell_params['custom_fun_args'][0]['hold_potential'] cell, stim = loaddata_ZAP(cell, simname, input_idx, stim) plot_ZAP(cell, input_idx, input_scaling, idx_list, cell_params, figfolder, stim.i, 15)