def __init__(self, name, **kwargs): ''' Constructor Arguments: ids provide ids if nodes already created model my_nest model type, can be a list n number of model to create, can be a list params common parameters for model to be set mm_dt multimeter recording precision sname file basename spath Path to save file at sd boolean, True if spikes should me recorded mm boolean, True if mulitmeter should record ''' model = kwargs.get('model', 'iaf_neuron') n = kwargs.get('n', 1) params = kwargs.get('params', {}) ids = kwargs.get('ids', my_nest.Create(model, n, params)) self._ids = slice(ids[0], ids[-1], 1) self.local_ids = [] for _id in self.ids: if my_nest.GetStatus([_id], 'local'): self.local_ids.append(_id) self.model = model self.name = name self.n = n self.sets = kwargs.get('sets', [misc.my_slice(0, n, 1)])
def voltage_response(self, currents, times, start, sim_time, id): scg = my_nest.Create('step_current_generator', n=1) my_nest.SetStatus(scg, { 'amplitude_times': times, 'amplitude_values': currents }) rec = my_nest.GetStatus([id])[0]['receptor_types'] my_nest.Connect(scg, [id], params={'receptor_type': rec['CURR']}) my_nest.MySimulate(sim_time) self.get_signal('v', 'V_m', start=start, stop=sim_time) self.get_signal('s') #, start=start, stop=sim_time) self.signals['V_m'].my_set_spike_peak(15, spkSignal=self.signals['spikes']) voltage = self.signals['V_m'][id].signal times = numpy.arange(0, len(voltage) * self.mm['params']['interval'], self.mm['params']['interval']) if len(times) != len(voltage): raise Exception('The vectors has to be the same length') return times, voltage
def create_mm(self, name, d_add, **kw): model = name + '_multimeter' if model not in my_nest.Models(): my_nest.CopyModel('multimeter', model) d = { 'active': False, 'id': [], 'model': name + '_multimeter', 'params': { 'record_from': ['V_m'], 'start': 0.0, 'stop': numpy.inf, 'interval': 1., 'to_file': False, 'to_memory': True } } # recodring interval (dt) d = misc.dict_update(d, d_add) if d['active']: _id = my_nest.Create(model, params=d['params']) if 'slice' in kw.keys(): my_nest.DivergentConnect(_id, self.ids[kw['slice']]) else: my_nest.DivergentConnect(_id, self.ids) d.update({'id': _id, 'model': model}) return d
def sim_group(sim_time, *args, **kwargs): g = MyNetworkNode(*args, **kwargs) df = my_nest.GetDefaults('my_aeif_cond_exp')['receptor_types'] inp_ex = my_nest.Create('poisson_generator', params={'rate': 30.}) inp_ih = my_nest.Create('poisson_generator', params={'rate': 30.}) for pre in inp_ex: for post in inp_ih: my_nest.Connect(pre, post, {'receptor_type': df['g_AMPA_1']}) for pre in inp_ex: for post in inp_ih: my_nest.Connect(pre, post, {'receptor_type': df['g_GABAA_1']}) my_nest.Simulate(sim_time) return g
def set_spike_times(self, rates=[], times=[], t_stop=None, ids=None, seed=None, idx=None): t_starts = times t_stops = list(times[1:]) + list([t_stop]) params = [{ 'rate': v[0], 'start': v[1], 'stop': v[2] } for v in zip(rates, t_starts, t_stops)] if len(params) == 1: ids = my_nest.Create('poisson_generator', len(params), params[0]) else: ids = my_nest.Create('poisson_generator', len(params), params) self.ids = list(ids) self.local_ids = list(ids) # Nedd to put on locals also
def create_sd(self, name, d_add): model = name + '_spike_detector' if model not in my_nest.Models(): my_nest.CopyModel('spike_detector', model) d = { 'active': False, 'params': { "withgid": True, 'to_file': False, 'start': 0.0, 'stop': numpy.inf, 'to_memory': True } } d = misc.dict_update(d, d_add) if d['active']: _id = my_nest.Create(model, params=d['params']) my_nest.ConvergentConnect(self.ids, _id) d.update({'id': _id, 'model': model}) return d
def set_spike_times(self, rates=[], times=[], t_stop=None, ids=None, seed=None, idx=None): df = my_nest.GetDefaults(self.model) if ids is None and (not idx is None): tmp_ids = numpy.array(self.ids) ids = list(tmp_ids[idx]) if ids is None: ids = self.ids #print df.keys() #print df['model'] # Spike generator if 'spike_generator' == df['model']: for id in ids: seed = random_integers(0, 10**5) spikeTimes = misc.inh_poisson_spikes(rates, times, t_stop=t_stop, n_rep=1, seed=seed) if any(spikeTimes): rs = my_nest.GetKernelStatus('resolution') n_dec = int(-numpy.log10(rs)) spikeTimes = numpy.round(spikeTimes, n_dec) my_nest.SetStatus([id], params={'spike_times': spikeTimes}) # MIP elif 'mip_generator' == df['model']: c = df['p_copy'] seed = random_integers(0, 10**6) new_ids = [] t_starts = times t_stops = times[1:] + [t_stop] for id in ids: i = 0 for r, start, stop in rates, t_starts, t_stops: r_mother = r / c params = { 'rate': r_mother, 'start': start, 'stop': stop, 'p_copy': c, 'mother_seed': seed } if i == 0: nest.SetStatus(id, params) else: new_id = my_nest.Create('mip_generator', 1, params) new_ids.append(new_id) self.ids.append(new_ids) # Poisson generator elif 'poisson_generator' == df['model']: t_starts = times t_stops = list(times[1:]) + list([t_stop]) for i in range(len(ids)): id = ids[i] model = my_nest.GetStatus([id], 'model')[0] if model == 'parrot_neuron': j = 1 else: ids[i] = my_nest.Create('parrot_neuron')[0] my_nest.Connect([id], [ids[i]]) #self.ids_mul_visited[id]=True j = 0 for r, start, stop in zip(rates, t_starts, t_stops): params = {'rate': r, 'start': start, 'stop': stop} if j == 0: #print my_nest.GetStatus([id]) my_nest.SetStatus([id], params) else: if params['rate'] != 0: #print params new_id = my_nest.Create('poisson_generator', 1, params) #self.ids_mul[ids[i]].append(new_id[0]) my_nest.Connect(new_id, [ids[i]]) j += 1 if not idx is None: tmp_ids = numpy.array(self.ids) tmp_ids[idx] = list(ids) self.ids = list(tmp_ids) else: self.ids = list(ids) self.local_ids = list(self.ids) # Nedd to put on locals also
def __init__(self, model='iaf_neuron', n=1, params={}, mm_dt=1.0, sname='', spath='', sname_nb=0, sd=False, sd_params={}, mm=False, record_from=[], ids=[]): ''' Constructor Arguments: ids provide ids if nodes already created model my_nest model type, can be a list n number of model to create, can be a list params common parameters for model to be set mm_dt multimeter recording precision sname file basename spath Path to save file at sd boolean, True if spikes should me recorded mm boolean, True if mulitmeter should record ''' self.connections = { } # Set after network has been built with FindConnections self.ids = [] self.local_ids = [] self.mm = [] # Id of multimeter self.mm_dt = 0 # Recording interval multimeter self.model = model self.params = [] self.record_from = [] self.receptor_types = {} self.recordables = {} self.sd = [] # Id of spike detector self.sd_params = sd_params self.sname_nb = sname_nb # number for sname string self.sname = '' # Specific file basename self.spath = '' # Path to save file at self.signals = { } # dictionary with signals for current, conductance, voltage or spikes if not self.sname: self.sname = model + '-' + str(sname_nb) + '-' else: self.sname = self.snam + '-' + str(sname_nb) + '-' # If no spath is provided current path plus data_tmp is set to # spath. if spath is '': self.spath = os.getcwd() + '/output_tmp' else: self.spath = spath # Create save dir if it do not exist try: msg = os.system('mkdir ' + self.spath + ' 2>/dev/null') except: pass if ids: self.ids = ids else: self.ids = my_nest.Create(model, n, params) # Create models # Get local ids on this processor. Necessary to have for mpi run. for id in self.ids: nodetype = my_nest.GetStatus([id])[0]['model'] if nodetype != 'proxynode': self.local_ids.append(id) self.params = my_nest.GetStatus(self.ids) # Pick out recordables and receptor types using first model. try: self.recordables = my_nest.GetDefaults(model)['recordables'] except: pass try: self.receptor_types = my_nest.GetDefaults(model)['receptor_types'] except: pass if self.recordables: if any(record_from): self.record_from = record_from else: self.record_from = self.recordables # Add spike detector if sd: self.sd = my_nest.Create("spike_detector") self.sd_params.update({"withgid": True}) my_nest.SetStatus(self.sd, self.sd_params) my_nest.ConvergentConnect(self.ids, self.sd) # Record with multimeter from first neuron if mm: self.mm = my_nest.Create("multimeter") self.mm_dt = mm_dt # Recording interval my_nest.SetStatus(self.mm, { 'interval': self.mm_dt, 'record_from': self.record_from }) my_nest.DivergentConnect(self.mm, self.ids)
def IV_I_clamp(self, I_vec, id=None, tStim=2000): ''' Assure no simulations has been run before this (reset kernel). Function that creates I-V by injecting hyperpolarizing currents and then measuring steady-state membrane (current clamp). Each trace is preceded and followed by 1/5 of the simulation time of no stimulation Inputs: I_vec - step currents to inject id - id of neuron to use for calculating I-F relation tStim - lenght of each step current stimulation in ms Returns: I_vec - current for each voltage vSteadyState - steady state voltage Examples: >> n = my_nest.Create('izhik_cond_exp') >> sc = [ float( x ) for x in range( -300, 100, 50 ) ] >> tr_t, tr_v, v_ss = IV_I_clamp( id = n, tSim = 500, I_vec = sc ): ''' vSteadyState = [] if not id: id = self.ids[0] if isinstance(id, int): id = [id] tAcum = 1 # accumulated simulation time, step_current_generator # recuires it to start at t>0 scg = my_nest.Create('step_current_generator') rec = my_nest.GetStatus(id)[0]['receptor_types'] my_nest.Connect(scg, id, params={'receptor_type': rec['CURR']}) ampTimes = [] ampValues = [] for I_e in I_vec: ampTimes.extend([float(tAcum)]) ampValues.extend([float(I_e)]) tAcum += tStim my_nest.SetStatus(scg, params={ 'amplitude_times': ampTimes, 'amplitude_values': ampValues }) my_nest.Simulate(tAcum) self.get_signal('v', 'V_m', stop=tAcum) # retrieve signal self.get_signal('s') if 0 < self.signals['spikes'].mean_rate(): print 'hej' tAcum = 1 for I_e in I_vec: if 0 >= self.signals['spikes'].mean_rate(tAcum + 10, tAcum + tStim): signal = self.signals['V_m'].my_time_slice( tAcum + 10, tAcum + tStim) vSteadyState.append(signal[1].signal[-1]) tAcum += tStim I_vec = I_vec[0:len(vSteadyState)] return I_vec, vSteadyState
def I_PSE(self, I_vec, synapse_model, id=0, receptor='I_GABAA_1'): ''' Assure no simulations has been run before this (reset kernel). Function creates relation between maz size of postsynaptic event (current, conductance, etc). The type is set by receptor. Inputs: I_vec - step currents to clamp at id - id of neuron to use for calculating I-F relation If not providet id=ids[0] Returns: v_vec - voltage clamped at size_vec - PSE size at each voltage Examples: >> n = my_nest.Create('izhik_cond_exp') >> sc = [ float( x ) for x in range( -300, 100, 50 ) ] >> tr_t, tr_v, v_ss = IV_I_clamp( id = n, tSim = 500, I_vec = sc ): ''' vSteadyState = [] if not id: id = self.ids[0] if isinstance(id, int): id = [id] simTime = 700. # ms spikes_at = numpy.arange(500., len(I_vec) * simTime, simTime) # ms voltage = [] # mV pse = [] # post synaptic event sg = my_nest.Create('spike_generator', params={'spike_times': spikes_at}) my_nest.Connect(sg, id, model=synapse_model) simTimeTot = 0 for I_e in I_vec: my_nest.SetStatus(self[:], params={'I_e': float(I_e)}) my_nest.MySimulate(simTime) simTimeTot += simTime self.get_signal(receptor[0].lower(), receptor, stop=simTimeTot) # retrieve signal simTimeAcum = 0 for I_e in I_vec: size = [] signal = self.signals[receptor].my_time_slice( 400 + simTimeAcum, 700 + simTimeAcum) simTimeAcum += simTime # First signal object at position 1 clamped_at = signal[1].signal[999] minV = min(signal[1].signal) maxV = max(signal[1].signal) if abs(minV - clamped_at) < abs(maxV - clamped_at): size.append(max(signal[1].signal) - clamped_at) else: size.append(min(signal[1].signal) - clamped_at) voltage.append(clamped_at) pse.append(size[0]) return voltage, pse
def set_spike_times(self, rates=[], times=[], t_stop=None, ids=None, seed=None, idx=None): df = my_nest.GetDefaults(self.input_model)['model'] if ids is None and (not idx is None): tmp_ids = numpy.array(self.ids) ids = list(tmp_ids[idx]) if ids is None: ids = self.ids # Spike generator if 'spike_generator' == self.type_model: for id in ids: seed = random_integers(0, 10**5) spikeTimes = misc.inh_poisson_spikes(rates, times, t_stop=t_stop, n_rep=1, seed=seed) if any(spikeTimes): my_nest.SetStatus([id], params={'spike_times': spikeTimes}) # MIP elif 'mip_generator' == self.type_model: c = df['p_copy'] seed = random_integers(0, 10**6) new_ids = [] t_starts = times t_stops = times[1:] + [t_stop] for id in ids: i = 0 for r, start, stop in rates, t_starts, t_stops: r_mother = r / c params = { 'rate': r_mother, 'start': start, 'stop': stop, 'p_copy': c, 'mother_seed': seed } if i == 0: my_nest.SetStatus(id, params) else: new_id = my_nest.Create('mip_generator', 1, params) new_ids.append(new_id) self.ids.append(new_ids) # Poisson generator elif self.type_model in ['my_poisson_generator', 'poisson_generator']: t_starts = times t_stops = list(times[1:]) + list([t_stop]) params = [{ 'rate': v[0], 'start': v[1], 'stop': v[2] } for v in zip(rates, t_starts, t_stops)] if len(params) == 1: source_nodes = my_nest.Create('poisson_generator', len(params), params[0]) * len(ids) else: source_nodes = my_nest.Create('poisson_generator', len(params), params) * len(ids) target_nodes = numpy.array([[id_] * len(rates) for id_ in ids]) target_nodes = list( numpy.reshape(target_nodes, len(rates) * len(ids), order='C')) # pp(my_nest.GetStatus([2])) my_nest.Connect(source_nodes, target_nodes) generators = [] if hash(tuple(ids)) in self.ids_generator.keys(): generators = self.ids_generator[hash(tuple(idx))] generators = list(set(source_nodes).union(generators)) self.ids_generator[hash(tuple(idx))] = sorted(generators) self.local_ids = list(self.ids) # Nedd to put on locals also elif 'poisson_generator_dynamic' == self.type_model: source_nodes = my_nest.Create(self.type_model, 1, { 'timings': times, 'rates': rates }) * len(ids) target_nodes = ids my_nest.Connect(source_nodes, target_nodes) generators = [] if hash(tuple(ids)) in self.ids_generator.keys(): generators = self.ids_generator[hash(tuple(idx))] generators = list(set(source_nodes).union(generators)) self.ids_generator[hash(tuple(idx))] = sorted(generators) # v=my_nest.GetStatus(ids, 'local') # self.local_ids=[_id for _id in zip(ids,v) if # Nedd to put on locals also else: msg = 'type_model ' + self.type_model + ' is not accounted for in set_spike_times' raise ValueError(msg)