def create_cache(configuration): model_paths = load_model_paths() neuron_counts = _read_neuron_counts(configuration) hidden_layer = create_non_input_layer(model_paths, neuron_counts.hidden, 'hidden') hidden_layer.Itau = get_current(15.3e-3) * amp number_of_output_neurons = 1 layers_connected_to_output = _get_layers_connected_to_output_count( configuration) output_layer = create_non_input_layer( model_paths, number_of_output_neurons, 'output', num_inputs=layers_connected_to_output) hidden_to_output_synapses = create_hidden_to_output_synapses( 'main', hidden_layer, output_layer, model_paths, neuron_counts) input_layer = create_input_layer('main', neuron_counts.input) input_to_hidden_synapses = create_input_to_hidden_synapses( name='main', input_layer=input_layer, hidden_layer=hidden_layer, model_paths=model_paths, neuron_counts=neuron_counts) spike_monitors = SpikeMonitors(hidden=SpikeMonitor(hidden_layer), output=SpikeMonitor(output_layer)) network = Network(input_layer, input_to_hidden_synapses, hidden_layer, spike_monitors.hidden, spike_monitors.output, output_layer, hidden_to_output_synapses) if should_add_artifact_filter(configuration): add_artifact_filter_to_network(model_paths, input_layer, output_layer, network) advanced_artifact_filter_input_layer = add_advanced_artifact_filter_to_network( network, output_layer, model_paths, neuron_counts ) if should_add_advanced_artifact_filter(configuration) else None network.store() return Cache( model_paths=model_paths, neuron_counts=neuron_counts, spike_monitors=spike_monitors, network=network, input_layer=input_layer, advanced_artifact_filter_input_layer= advanced_artifact_filter_input_layer, )
def run_network(traj): """Runs brian network consisting of 200 inhibitory IF neurons""" eqs = ''' dv/dt=(v0-v)/(5*ms) : volt (unless refractory) v0 : volt ''' group = NeuronGroup(100, model=eqs, threshold='v>10 * mV', reset='v = 0*mV', refractory=5 * ms) group.v0 = traj.par.v0 group.v = np.random.rand(100) * 10.0 * mV syn = Synapses(group, group, on_pre='v-=1*mV') syn.connect('i != j', p=0.2) spike_monitor = SpikeMonitor(group, variables=['v']) voltage_monitor = StateMonitor(group, 'v', record=True) pop_monitor = PopulationRateMonitor(group, name='pop' + str(traj.v_idx)) net = Network(group, syn, spike_monitor, voltage_monitor, pop_monitor) net.run(0.25 * second, report='text') traj.f_add_result(Brian2MonitorResult, 'spikes', spike_monitor) traj.f_add_result(Brian2MonitorResult, 'v', voltage_monitor) traj.f_add_result(Brian2MonitorResult, 'pop', pop_monitor)
def create_net(): # Use a bit of a complicated spike and connection pattern with # heterogeneous delays # Note: it is important that all objects have the same name, this would # be the case if we were running this in a new process but to not rely # on garbage collection we will assign explicit names here source = SpikeGeneratorGroup( 5, np.arange(5).repeat(3), [3, 4, 1, 2, 3, 7, 5, 4, 1, 0, 5, 9, 7, 8, 9] * ms, name='source') target = NeuronGroup(10, 'v:1', name='target') synapses = Synapses(source, target, model='w:1', on_pre='v+=w', name='synapses') synapses.connect('j>=i') synapses.w = 'i*1.0 + j*2.0' synapses.delay = '(5-i)*ms' state_mon = StateMonitor(target, 'v', record=True, name='statemonitor') input_spikes = SpikeMonitor(source, name='input_spikes') net = Network(source, target, synapses, state_mon, input_spikes) return net
def test_store_restore_magic(): source = NeuronGroup(10, '''dv/dt = rates : 1 rates : Hz''', threshold='v>1', reset='v=0') source.rates = 'i*100*Hz' target = NeuronGroup(10, 'v:1') synapses = Synapses(source, target, model='w:1', pre='v+=w', connect='i==j') synapses.w = 'i*1.0' synapses.delay = 'i*ms' state_mon = StateMonitor(target, 'v', record=True) spike_mon = SpikeMonitor(source) store() # default time slot run(10*ms) store('second') run(10*ms) v_values = state_mon.v[:, :] spike_indices, spike_times = spike_mon.it_ restore() # Go back to beginning assert magic_network.t == 0*ms run(20*ms) assert defaultclock.t == 20*ms assert_equal(v_values, state_mon.v[:, :]) assert_equal(spike_indices, spike_mon.i[:]) assert_equal(spike_times, spike_mon.t_[:]) # Go back to middle restore('second') assert magic_network.t == 10*ms run(10*ms) assert defaultclock.t == 20*ms assert_equal(v_values, state_mon.v[:, :]) assert_equal(spike_indices, spike_mon.i[:]) assert_equal(spike_times, spike_mon.t_[:])
def test_plot_monitors(): set_device('runtime') group = NeuronGroup(10, 'dv/dt = -v/(10*ms) : volt', threshold='False', method='linear') group.v = np.linspace(0, 1, 10)*mV spike_mon = SpikeMonitor(group) rate_mon = PopulationRateMonitor(group) state_mon = StateMonitor(group, 'v', record=[3, 5]) run(10*ms) # Just checking whether the plotting does not fail with an error and that # it retuns an Axis object as promised ax = brian_plot(spike_mon) assert isinstance(ax, matplotlib.axes.Axes) plt.close() ax = plot_raster(spike_mon.i, spike_mon.t) assert isinstance(ax, matplotlib.axes.Axes) plt.close() ax = brian_plot(rate_mon) assert isinstance(ax, matplotlib.axes.Axes) plt.close() ax = plot_rate(rate_mon.t, rate_mon.rate) assert isinstance(ax, matplotlib.axes.Axes) plt.close() ax = brian_plot(state_mon) assert isinstance(ax, matplotlib.axes.Axes) plt.close() ax = plot_state(state_mon.t, state_mon.v.T) assert isinstance(ax, matplotlib.axes.Axes)
def _add_monitors(self, traj, network, network_dict): """Adds monitors to the network""" neurons_e = network_dict['neurons_e'] monitor_list = [] # Spiketimes self.spike_monitor = SpikeMonitor(neurons_e) monitor_list.append(self.spike_monitor) # Membrane Potential self.V_monitor = StateMonitor(neurons_e, 'V', record=list(traj.neuron_records)) monitor_list.append(self.V_monitor) # Exc. syn .Current self.I_syn_e_monitor = StateMonitor(neurons_e, 'I_syn_e', record=list(traj.neuron_records)) monitor_list.append(self.I_syn_e_monitor) # Inh. syn. Current self.I_syn_i_monitor = StateMonitor(neurons_e, 'I_syn_i', record=list(traj.neuron_records)) monitor_list.append(self.I_syn_i_monitor) # Add monitors to network and dictionary network.add(*monitor_list) network_dict['monitors'] = monitor_list
def __init__(self, group=None): if group is None: # default to Izhikevich model neuron eq = '''dv/dt = (0.04*active/ms/mV + 0.04/ms/mV)*v**2+(5/ms)*v+140*mV/ms-w + I : volt (unless refractory) dw/dt = (0.02/ms)*((0.2/ms)*v-w) : volt/second active : 1 I : volt/second''' # create 1 neuron with 1 output self.group = Neuron( NeuronGroup(1, eq, threshold='v > 50*mV', reset='v = -50*mV', refractory=10 * ms, method='rk2')) else: self.group = group self.state_monitor = StateMonitor(self.group, 'v', record=True) # monitor voltages self.spike_monitor = SpikeMonitor(self.group) self.operator = NetworkOperation(self.update_active, dt=100 * ms) # initialise network object for neuron to run in and add elements self.network = Network() self.network.add(self.group) self.network.add(self.state_monitor) self.network.add(self.spike_monitor) self.network.add(self.operator) self.input = 0
def setup_spikes(request): def fin(): reinit_devices() request.addfinalizer(fin) EL = -70 * mV VT = -50 * mV DeltaT = 2 * mV C = 1 * nF gL = 30 * nS I = TimedArray(input_current, dt=0.01 * ms) model = Equations(''' dv/dt = (gL*(EL-v)+gL*DeltaT*exp((v-VT)/DeltaT) + I(t))/C : volt ''') group = NeuronGroup(1, model, threshold='v > -50*mV', reset='v = -70*mV', method='exponential_euler') group.v = -70 * mV spike_mon = SpikeMonitor(group) run(60 * ms) spikes = getattr(spike_mon, 't_') return spike_mon, spikes
def simulation1(flag_device=False, path="", rec_idx=idx_to_record): if flag_device: set_device('neuroml2', filename=LEMS_OUTPUT_FILENAME) n = 100 duration = 1 * second tau = 10 * ms eqs = ''' dv/dt = (v0 - v) / tau : volt (unless refractory) v0 : volt ''' group = NeuronGroup(n, eqs, threshold='v > 10*mV', reset='v = 0*mV', refractory=5 * ms, method='linear') group.v = 0 * mV group.v0 = '20*mV * i / (N-1)' statemonitor = StateMonitor(group, 'v', record=rec_idx) spikemonitor = SpikeMonitor(group, record=rec_idx) run(duration) if not flag_device: recvec = [] for ri in rec_idx: recvec.append(statemonitor[ri].v) recvec = np.asarray(recvec) return recvec else: return None
def test_spikemonitor(): """ Test collector function for SpikeMonitor """ # example 1 grp = NeuronGroup(5, '''dv/dt = (v0 - v)/tau :volt''', method='exact', threshold='v > v_th', reset='v = v0', name="My_Neurons") tau = 10 * ms v0 = -70 * mV v_th = 800 * mV mon = SpikeMonitor(grp, 'v', record=[0, 4]) mon_dict = collect_SpikeMonitor(mon) assert mon_dict['source'] == 'My_Neurons' assert mon_dict['variables'].sort() == ['i', 't', 'v'].sort() assert mon_dict['record'] == [0, 4] assert mon_dict['event'] == 'spike' assert mon_dict['when'] == 'thresholds' assert mon_dict['order'] == 1 # example 2 pos = PoissonGroup(5, rates=100 * Hz) smon = SpikeMonitor(pos, record=[0, 1, 2, 3, 4]) smon_dict = collect_SpikeMonitor(smon) assert smon_dict['source'] == pos.name assert 'i' in smon_dict['variables'] assert smon_dict['record'] == [0, 1, 2, 3, 4] assert smon_dict['when'] == 'thresholds' assert smon_dict['order'] == 1 # example 3 spk = SpikeGeneratorGroup(10, [2, 6, 8], [5 * ms, 10 * ms, 15 * ms]) spkmon = SpikeMonitor(spk, ['t', 'i'], record=0) smon_dict = collect_SpikeMonitor(spkmon) assert smon_dict['record'] == np.array([0]) assert 't' in smon_dict['variables'] assert smon_dict['source'] == spk.name assert smon_dict['when'] == 'thresholds' assert smon_dict['order'] == 1
def setup_input_layer(components, connectivity, mismatch, Ninp, currents, wGen): """ Setup the input layer consisting of a spike generator with 4 neurons that project to 2 input neurons through excitatory and inhibitory synapses Parameters ---------- components : object previously defined network components connectivity : object contains the two connectivity matrices as i and j indices to be used in the connect method of the synapse object in Brian2 mismatch : dict dictionary with percentual standard deviation of mismatch to add to reservoir neurons and synapses properties Ninp : int number of input neurons currents : dict dictionary with values of different currents for the input and reservoir neurons and synapses wGen : float weight of the generator synapses Returns ------- components : dict network components """ gInp = Neurons(Ninp, equation_builder=DPI(num_inputs=2), refractory=0.0 * ms, name='gInp') if 'gInp' in mismatch.keys(): gInp.add_mismatch(mismatch['gInp'], seed=42) if 'gInp' in currents.keys(): for (key, value) in currents['gInp'].items(): setattr(gInp, key, value) sGenInp = Connections(components['generator'], gInp, equation_builder=DPISyn(), method='euler', name='sGenInp') sGenInp.connect(i=connectivity['gen_inp']['i'], j=connectivity['gen_inp']['j']) sGenInp.weight = wGen * connectivity['gen_inp']['w'] mInp = SpikeMonitor(gInp, name='mInp') smInp = StateMonitor(gInp, ['Imem'], name='smInp', record=True) components['layers']['gInp'] = gInp components['synapses']['sGenInp'] = sGenInp components['monitors']['mInp'] = mInp components['monitors']['smInp'] = smInp return components
def brian2_spike_monitors(populations: Union[MFPopulation, List[MFPopulation]], n: int = 15) -> List[SpikeMonitor]: populations = listify(populations) return [ SpikeMonitor(p.brian2[:n], name=f'spike_{p.ref}') for p in populations ]
def create_net(): G = NeuronGroup(10, 'v: 1', threshold='False') dependent_objects = [ StateMonitor(G, 'v', record=True), SpikeMonitor(G), PopulationRateMonitor(G), Synapses(G, G, pre='v+=1', connect=True) ] return dependent_objects
def echo_spike(p, tStim, tTot): start_scope() prefs.codegen.target = "numpy" ################################ # Compute properties ################################ dvInpSpike = p['nu_DV'] # Voltage increase per noise spike dvExcSpike = p['LIF_DV'] # Voltage increase per lateral spike # dvExcSpike = p['LIF_T_0'] / (1.0 * p['N'] * p['p_conn']) # Voltage increase per lateral spike print("typical spike threshold", p['LIF_T_0']) print("typical potential change per noise spike", dvInpSpike) print("typical potential change per lateral spike", dvExcSpike) ################################ # Create input population ################################ nTStimPost = int(tTot / tStim) - 1 # After input switched off, 0-input will be repeated nTStimPost*tStim timesteps patternInp = (np.random.uniform(0, 1, p['N']) < p['nu_p']).astype(int) pattern0 = np.zeros(p['N']) rates_all = np.array([patternInp] + [pattern0]*nTStimPost) * p['nu_FREQ'] rateTimedArray = TimedArray(rates_all, dt=tStim) gInp = PoissonGroup(p['N'], rates="rateTimedArray(t, i)") # gInp = PoissonGroup(p['N'], p['nu_FREQ']) ################################ # Create reservoir population ################################ gExc = brian2wrapper.NeuronGroupLIF(p['N'], p['LIF_V_0'], p['LIF_T_0'], p['LIF_V_TAU']) ################################ # Create synapses ################################ sInpExc = Synapses(gInp, gExc, on_pre='v_post += dvInpSpike', method='exact') sExcExc = Synapses(gExc, gExc, on_pre='v_post += dvExcSpike', method='exact') ################################ # Connect synapses ################################ # * Input and LIF one-to-one # * LIF neurons to each other sparsely sInpExc.connect(j='i') sExcExc.connect(p=p['p_conn']) ################################ # Init Monitors ################################ #spikemonInp = SpikeMonitor(gInp) spikemonExc = SpikeMonitor(gExc) ################################ # Run simulation ################################ run(tTot) return np.array(spikemonExc.i), np.array(spikemonExc.t)
def test_store_restore_to_file_differing_nets(): # Check that the store/restore mechanism is not used with differing # networks filename = tempfile.mktemp(suffix='state', prefix='brian_test') source = SpikeGeneratorGroup(5, [0, 1, 2, 3, 4], [0, 1, 2, 3, 4] * ms, name='source_1') mon = SpikeMonitor(source, name='monitor') net = Network(source, mon) net.store(filename=filename) source_2 = SpikeGeneratorGroup(5, [0, 1, 2, 3, 4], [0, 1, 2, 3, 4] * ms, name='source_2') mon = SpikeMonitor(source_2, name='monitor') net = Network(source_2, mon) assert_raises(KeyError, lambda: net.restore(filename=filename)) net = Network(source) # Without the monitor assert_raises(KeyError, lambda: net.restore(filename=filename))
def test_simple_syntax(): """ Simple example """ set_device('markdown') N = 10 tau = 10 * ms v_th = 0.9 * volt v_rest = -79 * mV eqn = 'dv/dt = (v_th - v)/tau :volt' refractory = 'randn() * tau / N' rates = 'rand() * 5 * Hz' group = NeuronGroup(N, eqn, method='euler', threshold='v > v_th', reset='v = v_rest; v = rand() * v_rest', refractory=refractory, events={'custom': 'v > v_th + 10 * mV', 'custom_1': 'v > v_th - 10 * mV'}) group.run_on_event('custom', 'v = v_rest') group.run_on_event('custom_1', 'v = v_rest - 0.001 * mV') spikegen = SpikeGeneratorGroup(N, [0, 1, 2], [1, 2, 3] * ms, period=5 * ms) po_grp = PoissonGroup(N - 1, rates=rates) syn = Synapses(spikegen, group, model='w :volt', on_pre='v = rand() * w + v_th; v = rand() * w', on_post='v = rand() * w + v_rest; v = rand() * w', delay=tau, method='euler') group.v[:] = v_rest group.v['i%2 == 0'] = 'rand() * v_rest' group.v[0:5] = 'v_rest + 10 * mV' condition = 'abs(i-j)<=5' syn.connect(condition=condition, p=0.999, n=2) syn.w = '1 * mV' net = Network(group, spikegen, po_grp, syn) mon = StateMonitor(syn, 'w', record=True) mon2 = SpikeMonitor(po_grp) mon3 = EventMonitor(group, 'custom') net.add(mon, mon2, mon3) net.run(0.01 * ms) md_str = device.md_text assert _markdown_lint(md_str) check = 'randn({sin({$w$}|$v_rest$ - $v$|/{\tau}})})' assert _markdown_lint(check) # check invalid strings with pytest.raises(SyntaxError): check = '**Initializing values at starting:*' assert _markdown_lint(check) check = '- Variable v$ of with $-79. mV$ to all members' assert _markdown_lint(check) check = 'randn({sin(})})' assert _markdown_lint(check) check = 'randn({sin({$w$}|$v_rest$ - $v$|/{\tau})})' assert _markdown_lint(check) device.reinit()
def setup_simulator(self, network_name, n_neurons, output_var, param_init, calc_gradient=False, optimize=True, online_error=False, level=1): simulator = setup_fit() namespace = get_full_namespace( { 'input_var': self.input_traces, 'n_traces': self.n_traces }, level=level + 1) if hasattr(self, 't_start'): # OnlineTraceFitter namespace['t_start'] = self.t_start if network_name != 'generate' and self.output_var != 'spikes': namespace['output_var'] = TimedArray(self.output.transpose(), dt=self.dt) neurons = self.setup_neuron_group(n_neurons, namespace, calc_gradient=calc_gradient, optimize=optimize, online_error=online_error) if output_var == 'spikes': monitor = SpikeMonitor(neurons, name='monitor') else: record_vars = [output_var] if calc_gradient: record_vars.extend( [f'S_{output_var}_{p}' for p in self.parameter_names]) monitor = StateMonitor(neurons, record_vars, record=True, name='monitor', dt=self.dt) network = Network(neurons, monitor) if calc_gradient: param_init = dict(param_init) param_init.update( get_sensitivity_init(neurons, self.parameter_names, param_init)) simulator.initialize(network, param_init, name=network_name) return simulator
def test_magic_collect(): ''' Make sure all expected objects are collected in a magic network ''' P = PoissonGroup(10, rates=100*Hz) G = NeuronGroup(10, 'v:1', threshold='False') S = Synapses(G, G, '') state_mon = StateMonitor(G, 'v', record=True) spike_mon = SpikeMonitor(G) rate_mon = PopulationRateMonitor(G) objects = collect() assert len(objects) == 6, ('expected %d objects, got %d' % (6, len(objects)))
def add_to_network(self, traj, network, current_subrun, subrun_list, network_dict): if current_subrun.v_annotations.order == 0: prm = PopulationRateMonitor(network_dict['group']) self.monitors['prm'] = prm network.add(prm) elif current_subrun.v_annotations.order == 1: spm = SpikeMonitor(network_dict['group'], variables='v') sm = StateMonitor(network_dict['group'], variables='v', record=True) self.monitors['spm'] = spm self.monitors['sm'] = sm network.add(spm) network.add(sm)
def test_store_restore_to_file(): filename = tempfile.mktemp(suffix='state', prefix='brian_test') source = NeuronGroup(10, '''dv/dt = rates : 1 rates : Hz''', threshold='v>1', reset='v=0') source.rates = 'i*100*Hz' target = NeuronGroup(10, 'v:1') synapses = Synapses(source, target, model='w:1', pre='v+=w', connect='i==j') synapses.w = 'i*1.0' synapses.delay = 'i*ms' state_mon = StateMonitor(target, 'v', record=True) spike_mon = SpikeMonitor(source) net = Network(source, target, synapses, state_mon, spike_mon) net.store(filename=filename) # default time slot net.run(10 * ms) net.store('second', filename=filename) net.run(10 * ms) v_values = state_mon.v[:, :] spike_indices, spike_times = spike_mon.it_ net.restore(filename=filename) # Go back to beginning assert defaultclock.t == 0 * ms assert net.t == 0 * ms net.run(20 * ms) assert_equal(v_values, state_mon.v[:, :]) assert_equal(spike_indices, spike_mon.i[:]) assert_equal(spike_times, spike_mon.t_[:]) # Go back to middle net.restore('second', filename=filename) assert defaultclock.t == 10 * ms assert net.t == 10 * ms net.run(10 * ms) assert_equal(v_values, state_mon.v[:, :]) assert_equal(spike_indices, spike_mon.i[:]) assert_equal(spike_times, spike_mon.t_[:]) try: os.remove(filename) except OSError: pass
def monitor_spikes(self, obj, record=True, name=''): """ Adds a `SpikeMonitor` to the network. Parameters ---------- obj : str Name of the object whose spikes to monitor. record : bool, optional IDs to monitor. Record everything if True. name : str, optional Name of monitor. Takes on '``obj`` _spike_monitor' if not specified. """ mon_name = '{}_spike_monitor'.format(obj) if name == '' else name self.add(SpikeMonitor(self[obj], record=record, name=mon_name)) self._event_monitors[mon_name] = 'spike'
def run_network(): monitor_dict = {} defaultclock.dt = 0.01 * ms C = 281 * pF gL = 30 * nS EL = -70.6 * mV VT = -50.4 * mV DeltaT = 2 * mV tauw = 40 * ms a = 4 * nS b = 0.08 * nA I = 8 * nA Vcut = "vm>2*mV" # practical threshold condition N = 10 reset = 'vm=Vr;w+=b' eqs = """ dvm/dt=(gL*(EL-vm)+gL*DeltaT*exp((vm-VT)/DeltaT)+I-w)/C : volt dw/dt=(a*(vm-EL)-w)/tauw : amp Vr:volt """ neuron = NeuronGroup(N, model=eqs, threshold=Vcut, reset=reset) neuron.vm = EL neuron.w = a * (neuron.vm - EL) neuron.Vr = linspace(-48.3 * mV, -47.7 * mV, N) # bifurcation parameter #run(25*msecond,report='text') # we discard the first spikes MSpike = SpikeMonitor(neuron, variables=['vm']) # record Vr and w at spike times MPopRate = PopulationRateMonitor(neuron) MMultiState = StateMonitor(neuron, ['w', 'vm'], record=[6, 7, 8, 9]) run(10 * msecond, report='text') monitor_dict['SpikeMonitor'] = MSpike monitor_dict['MultiState'] = MMultiState monitor_dict['PopulationRateMonitor'] = MPopRate return monitor_dict
def test_magic_collect(): ''' Make sure all expected objects are collected in a magic network ''' P = PoissonGroup(10, rates=100 * Hz) G = NeuronGroup(10, 'v:1') S = Synapses(G, G, '') G_runner = G.custom_operation('') S_runner = S.custom_operation('') state_mon = StateMonitor(G, 'v', record=True) spike_mon = SpikeMonitor(G) rate_mon = PopulationRateMonitor(G) objects = collect() assert len(objects) == 8, ('expected %d objects, got %d' % (8, len(objects)))
def test_store_restore(): source = NeuronGroup(10, '''dv/dt = rates : 1 rates : Hz''', threshold='v>1', reset='v=0') source.rates = 'i*100*Hz' target = NeuronGroup(10, 'v:1') synapses = Synapses(source, target, model='w:1', on_pre='v+=w') synapses.connect(j='i') synapses.w = 'i*1.0' synapses.delay = 'i*ms' state_mon = StateMonitor(target, 'v', record=True) spike_mon = SpikeMonitor(source) net = Network(source, target, synapses, state_mon, spike_mon) net.store() # default time slot net.run(10 * ms) net.store('second') net.run(10 * ms) v_values = state_mon.v[:, :] spike_indices, spike_times = spike_mon.it_ net.restore() # Go back to beginning assert defaultclock.t == 0 * ms assert net.t == 0 * ms net.run(20 * ms) assert_equal(v_values, state_mon.v[:, :]) assert_equal(spike_indices, spike_mon.i[:]) assert_equal(spike_times, spike_mon.t_[:]) # Go back to middle net.restore('second') assert defaultclock.t == 10 * ms assert net.t == 10 * ms net.run(10 * ms) assert_equal(v_values, state_mon.v[:, :]) assert_equal(spike_indices, spike_mon.i[:]) assert_equal(spike_times, spike_mon.t_[:]) # Go back again (see github issue #681) net.restore('second') assert defaultclock.t == 10 * ms assert net.t == 10 * ms
def setup_generator(components): """ Setup the spike generator with 4+1 neurons and a spike monitor. The first 4 neurons represent the 4 input components in the following order: [I.up, I.dn, Q.up, Q.dn]. The last neuron fires a stop signal at the end of each input stimulus Parameters ---------- components : object previously defined network components Returns ------- components : dict network components """ gGen = SpikeGeneratorGroup(5, np.array([]), np.array([]) * ms, name='gGen') mGen = SpikeMonitor(gGen, name='mGen') components['generator'] = gGen components['monitors']['mGen'] = mGen return components
def run_net(traj): """Creates and runs BRIAN network based on the parameters in `traj`.""" eqs = traj.eqs # Create a namespace dictionairy namespace = traj.Net.f_to_dict(short_names=True, fast_access=True) # Create the Neuron Group neuron = NeuronGroup(traj.N, model=eqs, threshold=traj.Vcut, reset=traj.reset, namespace=namespace) neuron.vm = traj.EL neuron.w = traj.a * (neuron.vm - traj.EL) neuron.Vr = linspace(-48.3 * mV, -47.7 * mV, traj.N) # bifurcation parameter # Run the network initially for 100 milliseconds print('Initial Run') net = Network(neuron) net.run(100 * ms, report='text') # we discard the first spikes # Create a Spike Monitor MSpike = SpikeMonitor(neuron) net.add(MSpike) # Create a State Monitor for the membrane voltage, record from neurons 1-3 MStateV = StateMonitor(neuron, variables=['vm'], record=[1, 2, 3]) net.add(MStateV) # Now record for 500 milliseconds print('Measurement run') net.run(500 * ms, report='text') # Add the BRAIN monitors traj.v_standard_result = Brian2MonitorResult traj.f_add_result('SpikeMonitor', MSpike) traj.f_add_result('StateMonitorV', MStateV)
def run_net(tr): # prefs.codegen.target = 'numpy' # prefs.codegen.target = 'cython' set_device('cpp_standalone', directory='./builds/%.4d' % (tr.v_idx), build_on_run=False) print("Started process with id ", str(tr.v_idx)) T = tr.T1 + tr.T2 + tr.T3 namespace = tr.netw.f_to_dict(short_names=True, fast_access=True) namespace['idx'] = tr.v_idx defaultclock.dt = tr.netw.sim.dt GExc = NeuronGroup( N=tr.N_e, model=tr.condlif_sig, threshold=tr.nrnEE_thrshld, reset=tr.nrnEE_reset, #method=tr.neuron_method, namespace=namespace) GInh = NeuronGroup( N=tr.N_i, model=tr.condlif_sig, threshold='V > Vt', reset='V=Vr_i', #method=tr.neuron_method, namespace=namespace) # set initial thresholds fixed, init. potentials uniformly distrib. GExc.sigma, GInh.sigma = tr.sigma_e, tr.sigma_i GExc.Vt, GInh.Vt = tr.Vt_e, tr.Vt_i GExc.V , GInh.V = np.random.uniform(tr.Vr_e/mV, tr.Vt_e/mV, size=tr.N_e)*mV, \ np.random.uniform(tr.Vr_i/mV, tr.Vt_i/mV, size=tr.N_i)*mV print("need to fix?") synEE_pre_mod = mod.synEE_pre synEE_post_mod = mod.synEE_post if tr.PInp_mode == 'pool': PInp = PoissonGroup(tr.NPInp, rates=tr.PInp_rate, namespace=namespace) sPN = Synapses(target=GExc, source=PInp, model=tr.poisson_mod, on_pre='ge_post += a_EPoi', namespace=namespace) sPN_src, sPN_tar = generate_connections(N_tar=tr.N_e, N_src=tr.NPInp, p=tr.p_EPoi) elif tr.PInp_mode == 'indep': PInp = PoissonGroup(tr.N_e, rates=tr.PInp_rate, namespace=namespace) sPN = Synapses(target=GExc, source=PInp, model=tr.poisson_mod, on_pre='ge_post += a_EPoi', namespace=namespace) sPN_src, sPN_tar = range(tr.N_e), range(tr.N_e) sPN.connect(i=sPN_src, j=sPN_tar) if tr.PInp_mode == 'pool': sPNInh = Synapses(target=GInh, source=PInp, model=tr.poisson_mod, on_pre='ge_post += a_EPoi', namespace=namespace) sPNInh_src, sPNInh_tar = generate_connections(N_tar=tr.N_i, N_src=tr.NPInp, p=tr.p_EPoi) elif tr.PInp_mode == 'indep': PInp_inh = PoissonGroup(tr.N_i, rates=tr.PInp_rate, namespace=namespace) sPNInh = Synapses(target=GInh, source=PInp_inh, model=tr.poisson_mod, on_pre='ge_post += a_EPoi', namespace=namespace) sPNInh_src, sPNInh_tar = range(tr.N_i), range(tr.N_i) sPNInh.connect(i=sPNInh_src, j=sPNInh_tar) if tr.stdp_active: synEE_pre_mod = '''%s %s''' % (synEE_pre_mod, mod.synEE_pre_STDP) synEE_post_mod = '''%s %s''' % (synEE_post_mod, mod.synEE_post_STDP) if tr.synEE_rec: synEE_pre_mod = '''%s %s''' % (synEE_pre_mod, mod.synEE_pre_rec) synEE_post_mod = '''%s %s''' % (synEE_post_mod, mod.synEE_post_rec) # E<-E advanced synapse model, rest simple SynEE = Synapses( target=GExc, source=GExc, model=tr.synEE_mod, on_pre=synEE_pre_mod, on_post=synEE_post_mod, #method=tr.synEE_method, namespace=namespace) SynIE = Synapses(target=GInh, source=GExc, on_pre='ge_post += a_ie', namespace=namespace) SynEI = Synapses(target=GExc, source=GInh, on_pre='gi_post += a_ei', namespace=namespace) SynII = Synapses(target=GInh, source=GInh, on_pre='gi_post += a_ii', namespace=namespace) if tr.strct_active: sEE_src, sEE_tar = generate_full_connectivity(tr.N_e, same=True) SynEE.connect(i=sEE_src, j=sEE_tar) SynEE.syn_active = 0 else: srcs_full, tars_full = generate_full_connectivity(tr.N_e, same=True) SynEE.connect(i=srcs_full, j=tars_full) SynEE.syn_active = 0 sIE_src, sIE_tar = generate_connections(tr.N_i, tr.N_e, tr.p_ie) sEI_src, sEI_tar = generate_connections(tr.N_e, tr.N_i, tr.p_ei) sII_src, sII_tar = generate_connections(tr.N_i, tr.N_i, tr.p_ii, same=True) SynIE.connect(i=sIE_src, j=sIE_tar) SynEI.connect(i=sEI_src, j=sEI_tar) SynII.connect(i=sII_src, j=sII_tar) tr.f_add_result('sIE_src', sIE_src) tr.f_add_result('sIE_tar', sIE_tar) tr.f_add_result('sEI_src', sEI_src) tr.f_add_result('sEI_tar', sEI_tar) tr.f_add_result('sII_src', sII_src) tr.f_add_result('sII_tar', sII_tar) SynEE.a = tr.a_ee SynEE.insert_P = tr.insert_P SynEE.p_inactivate = tr.p_inactivate # make synapse active at beginning SynEE.run_regularly(tr.synEE_p_activate, dt=T, when='start', order=-100) # synaptic scaling if tr.netw.config.scl_active: SynEE.summed_updaters['Asum_post']._clock = Clock( dt=tr.dt_synEE_scaling) SynEE.run_regularly(tr.synEE_scaling, dt=tr.dt_synEE_scaling, when='end') # intrinsic plasticity if tr.netw.config.it_active: GExc.h_ip = tr.h_ip GExc.run_regularly(tr.intrinsic_mod, dt=tr.it_dt, when='end') # structural plasticity if tr.netw.config.strct_active: if tr.strct_mode == 'zero': if tr.turnover_rec: strct_mod = '''%s %s''' % (tr.strct_mod, tr.turnover_rec_mod) else: strct_mod = tr.strct_mod SynEE.run_regularly(strct_mod, dt=tr.strct_dt, when='end') elif tr.strct_mode == 'thrs': if tr.turnover_rec: strct_mod_thrs = '''%s %s''' % (tr.strct_mod_thrs, tr.turnover_rec_mod) else: strct_mod_thrs = tr.strct_mod_thrs SynEE.run_regularly(strct_mod_thrs, dt=tr.strct_dt, when='end') # -------------- recording ------------------ #run(tr.sim.preT) GExc_recvars = [] if tr.memtraces_rec: GExc_recvars.append('V') if tr.vttraces_rec: GExc_recvars.append('Vt') if tr.getraces_rec: GExc_recvars.append('ge') if tr.gitraces_rec: GExc_recvars.append('gi') GInh_recvars = GExc_recvars GExc_stat = StateMonitor(GExc, GExc_recvars, record=[0, 1, 2], dt=tr.GExc_stat_dt) GInh_stat = StateMonitor(GInh, GInh_recvars, record=[0, 1, 2], dt=tr.GInh_stat_dt) SynEE_recvars = [] if tr.synee_atraces_rec: SynEE_recvars.append('a') if tr.synee_Apretraces_rec: SynEE_recvars.append('Apre') if tr.synee_Aposttraces_rec: SynEE_recvars.append('Apost') SynEE_stat = StateMonitor(SynEE, SynEE_recvars, record=range(tr.n_synee_traces_rec), when='end', dt=tr.synEE_stat_dt) GExc_spks = SpikeMonitor(GExc) GInh_spks = SpikeMonitor(GInh) PInp_spks = SpikeMonitor(PInp) GExc_rate = PopulationRateMonitor(GExc) GInh_rate = PopulationRateMonitor(GInh) PInp_rate = PopulationRateMonitor(PInp) SynEE_a = StateMonitor(SynEE, ['a', 'syn_active'], record=range(tr.N_e * (tr.N_e - 1)), dt=T / tr.synee_a_nrecpoints, when='end', order=100) if tr.PInp_mode == 'indep': net = Network(GExc, GInh, PInp, sPN, sPNInh, SynEE, SynEI, SynIE, SynII, GExc_stat, GInh_stat, SynEE_stat, SynEE_a, GExc_spks, GInh_spks, PInp_spks, GExc_rate, GInh_rate, PInp_rate, PInp_inh) else: net = Network(GExc, GInh, PInp, sPN, sPNInh, SynEE, SynEI, SynIE, SynII, GExc_stat, GInh_stat, SynEE_stat, SynEE_a, GExc_spks, GInh_spks, PInp_spks, GExc_rate, GInh_rate, PInp_rate) net.run(tr.sim.T1, report='text') # SynEE_a.record_single_timestep() recorders = [ GExc_spks, GInh_spks, PInp_spks, SynEE_stat, GExc_stat, GInh_stat ] rate_recorders = [GExc_rate, GInh_rate, PInp_rate] for rcc in recorders: rcc.active = False net.run(tr.sim.T2, report='text') recorders = [ SynEE_stat, GExc_stat, GInh_stat, GExc_rate, GInh_rate, PInp_rate ] for rcc in recorders: rcc.active = True if tr.spks_rec: GExc_spks.active = True GInh_spks.active = True # PInp_spks.active=True net.run(tr.sim.T3, report='text') device.build(directory='../builds/%.4d' % (tr.v_idx), clean=True) # save monitors as raws in build directory raw_dir = '../builds/%.4d/raw/' % (tr.v_idx) if not os.path.exists(raw_dir): os.makedirs(raw_dir) with open(raw_dir + 'namespace.p', 'wb') as pfile: pickle.dump(namespace, pfile) with open(raw_dir + 'gexc_stat.p', 'wb') as pfile: pickle.dump(GExc_stat.get_states(), pfile) with open(raw_dir + 'ginh_stat.p', 'wb') as pfile: pickle.dump(GInh_stat.get_states(), pfile) with open(raw_dir + 'synee_stat.p', 'wb') as pfile: pickle.dump(SynEE_stat.get_states(), pfile) with open(raw_dir + 'synee_a.p', 'wb') as pfile: pickle.dump(SynEE_a.get_states(), pfile) with open(raw_dir + 'gexc_spks.p', 'wb') as pfile: pickle.dump(GExc_spks.get_states(), pfile) with open(raw_dir + 'ginh_spks.p', 'wb') as pfile: pickle.dump(GInh_spks.get_states(), pfile) with open(raw_dir + 'pinp_spks.p', 'wb') as pfile: pickle.dump(PInp_spks.get_states(), pfile) with open(raw_dir + 'gexc_rate.p', 'wb') as pfile: pickle.dump(GExc_rate.get_states(), pfile) pickle.dump(GExc_rate.smooth_rate(width=25 * ms), pfile) with open(raw_dir + 'ginh_rate.p', 'wb') as pfile: pickle.dump(GInh_rate.get_states(), pfile) pickle.dump(GInh_rate.smooth_rate(width=25 * ms), pfile) with open(raw_dir + 'pinp_rate.p', 'wb') as pfile: pickle.dump(PInp_rate.get_states(), pfile) pickle.dump(PInp_rate.smooth_rate(width=25 * ms), pfile) # ----------------- add raw data ------------------------ fpath = '../builds/%.4d/' % (tr.v_idx) from pathlib import Path Path(fpath + 'turnover').touch() turnover_data = np.genfromtxt(fpath + 'turnover', delimiter=',') os.remove(fpath + 'turnover') with open(raw_dir + 'turnover.p', 'wb') as pfile: pickle.dump(turnover_data, pfile) Path(fpath + 'spk_register').touch() spk_register_data = np.genfromtxt(fpath + 'spk_register', delimiter=',') os.remove(fpath + 'spk_register') with open(raw_dir + 'spk_register.p', 'wb') as pfile: pickle.dump(spk_register_data, pfile)
def lson(lsoInputSpkFileTuple, temp_degC=37): defaultclock.dt = 0.02 * ms # for better precision neuron_type = 'type2' # medium-fast membrane f0 180-260Hz, CF4kHz #temp_degC=37. Vrest = -63.6 * mV # resting potential for type1c from RM2003 nLsons = 1 # number of LSO neurons nGbcsCo = 4 # nGbcsIp = 4 # nSbcsCo = 4 nSbcsIp = 4 # nSbcs = 4 # nAnfsPerSbc = 3 # nGbcs = 4 # nAnfsPerGbc=30 nAnfsPerInputFile = 40 nGbcsCoPerLson = 8 # Gjoni et al. 2018 nSbcsIpPerLson = 40 # Gjoni et al. 2018 # sbCoSpkFile = inputSpkFileTuple[0] # sbIpSpkFile = lsoInputSpkFileTuple[1] # gbCoSpkFile = lsoInputSpkFileTuple[2] # gbIpSpkFile = inputSpkFileTuple[3] anCoSpkFile = lsoInputSpkFileTuple[0] anIpSpkFile = lsoInputSpkFileTuple[1] # sbCoIdxSpktimeArray = np.loadtxt(sbCoSpkFile) # sbCoCellIndices = sbCoIdxSpktimeArray[:, 0].astype(int) # sbCoSpkTimes = sbCoIdxSpktimeArray[:, 1] * ms # sbCoSpkGenGrp = SpikeGeneratorGroup(nSbcsCo, sbCoCellIndices, sbCoSpkTimes) gbCoIdxSpktimeArray = np.loadtxt(anCoSpkFile) gbCoCellIndices = gbCoIdxSpktimeArray[:, 0].astype(int) # For now, spiketimes from Zilany AN in SECONDS, so * 1000*ms gbCoSpkTimes = gbCoIdxSpktimeArray[:, 1] * 1000. * ms gbCoSpkGenGrp = SpikeGeneratorGroup(nAnfsPerInputFile, gbCoCellIndices, gbCoSpkTimes) sbIpIdxSpktimeArray = np.loadtxt(anIpSpkFile) sbIpCellIndices = sbIpIdxSpktimeArray[:, 0].astype(int) # For now, spiketimes from Zilany AN in SECONDS, so * 1000*ms sbIpSpkTimes = sbIpIdxSpktimeArray[:, 1] * 1000. * ms sbIpSpkGenGrp = SpikeGeneratorGroup(nAnfsPerInputFile, sbIpCellIndices, sbIpSpkTimes) # gbIpIdxSpktimeArray = np.loadtxt(gbIpSpkFile) # gbIpCellIndices = gbIpIdxSpktimeArray[:, 0].astype(int) # gbIpSpkTimes = gbIpIdxSpktimeArray[:, 1] * ms # gbIpSpkGenGrp = SpikeGeneratorGroup(nGbcsIp, gbIpCellIndices, gbIpSpkTimes) # anfIdxSpktimeArray = np.loadtxt(anfSpkFile) # anfIndices = anfIdxSpktimeArray[:, 0].astype(int) # nANF = 132 # #anfSpkTimes = [i * second for i in anfIdxSpktimeArray[:, 1]] # anfSpkTimes = anfIdxSpktimeArray[:, 1] * 1000*ms # anfSpkGeneratorGrp = SpikeGeneratorGroup(nANF, anfIndices, anfSpkTimes) # Membrane and Ion-Channel parameters C = 12 * pF EH = -43 * mV EK = -70 * mV # -77*mV in mod file EL = -65 * mV ENa = 55 * mV # 55*mV in RM2003; 50*mv by Brette nf = 0.85 # proportion of n vs p kinetics zss = 0.5 # steady state inactivation of glt # default temp_degC = 37., human body temperature in degree celcius # q10 for ion-channel time-constants (RM2003, p.3106): q10 = 3.**((temp_degC - 22.) / 10.) # q10 for ion-channel gbar parameters (RM2003, p.3106): q10gbar = 2.**((temp_degC - 22.) / 10.) # hcno current (octopus cell) frac = 0.0 qt = 4.5**((temp_degC - 33.) / 10.) # Synaptic parameters: Es_e = 0. * mV tausE = 0.5 * ms Es_i = -90 * mV tausI = 1.0 * ms '''Synaptic weights are unitless according to Brian2. The effective unit is siemens, so they can work in amp, volt, siemens eqns. We multiply synaptic weight w_e by unit siemens when adding it to g_e. We use a local variable w_e for synaptic weight rather than the standard w:''' w_elson = 5e-9 w_ilson = 50e-9 # 6e-9 @ 200Hz; 12e-9 @ 600 Hz '''Here's why: The synapses sbc3SynE.w synaptic weight references the Same State Variable as as the neuron group sbc3Grp.w (klt activation w). Update sbc3Grp.w, and you set sbc3SynE.w to same value, and vice versa. This way klt activation and synaptic weight are identical and quite ridiculous. So use a local variable other than w for the synaptic weight!''' # Maximal conductances of different cell types in nS maximal_conductances = dict( type1c=(1000, 150, 0, 0, 0.5, 0, 2), type1t=(1000, 80, 0, 65, 0.5, 0, 2), type12=(1000, 150, 20, 0, 2, 0, 2), type21=(1000, 150, 35, 0, 3.5, 0, 2), type2=(1000, 150, 200, 0, 20, 0, 2), type2g1p5x=(1000, 150, 300, 0, 30, 0, 2), type2g0p5x=(1000, 150, 100, 0, 10, 0, 2), type2o=(1000, 150, 600, 0, 0, 40, 2) # octopus cell ) gnabar, gkhtbar, gkltbar, gkabar, ghbar, gbarno, gl = [ x * nS for x in maximal_conductances[neuron_type] ] # Classical Na channel eqs_na = """ ina = gnabar*m**3*h*(ENa-v) : amp dm/dt=q10*(minf-m)/mtau : 1 dh/dt=q10*(hinf-h)/htau : 1 minf = 1./(1+exp(-(vu + 38.) / 7.)) : 1 hinf = 1./(1+exp((vu + 65.) / 6.)) : 1 mtau = ((10. / (5*exp((vu+60.) / 18.) + 36.*exp(-(vu+60.) / 25.))) + 0.04)*ms : second htau = ((100. / (7*exp((vu+60.) / 11.) + 10.*exp(-(vu+60.) / 25.))) + 0.6)*ms : second """ # KHT channel (delayed-rectifier K+) eqs_kht = """ ikht = gkhtbar*(nf*n**2 + (1-nf)*p)*(EK-v) : amp dn/dt=q10*(ninf-n)/ntau : 1 dp/dt=q10*(pinf-p)/ptau : 1 ninf = (1 + exp(-(vu + 15) / 5.))**-0.5 : 1 pinf = 1. / (1 + exp(-(vu + 23) / 6.)) : 1 ntau = ((100. / (11*exp((vu+60) / 24.) + 21*exp(-(vu+60) / 23.))) + 0.7)*ms : second ptau = ((100. / (4*exp((vu+60) / 32.) + 5*exp(-(vu+60) / 22.))) + 5)*ms : second """ # Ih channel (subthreshold adaptive, non-inactivating) eqs_ih = """ ih = ghbar*r*(EH-v) : amp dr/dt=q10*(rinf-r)/rtau : 1 rinf = 1. / (1+exp((vu + 76.) / 7.)) : 1 rtau = ((100000. / (237.*exp((vu+60.) / 12.) + 17.*exp(-(vu+60.) / 14.))) + 25.)*ms : second """ # KLT channel (low threshold K+) eqs_klt = """ iklt = gkltbar*w**4*z*(EK-v) : amp dw/dt=q10*(winf-w)/wtau : 1 dz/dt=q10*(zinf-z)/ztau : 1 winf = (1. / (1 + exp(-(vu + 48.) / 6.)))**0.25 : 1 zinf = zss + ((1.-zss) / (1 + exp((vu + 71.) / 10.))) : 1 wtau = ((100. / (6.*exp((vu+60.) / 6.) + 16.*exp(-(vu+60.) / 45.))) + 1.5)*ms : second ztau = ((1000. / (exp((vu+60.) / 20.) + exp(-(vu+60.) / 8.))) + 50)*ms : second """ # Ka channel (transient K+) eqs_ka = """ ika = gkabar*a**4*b*c*(EK-v): amp da/dt=q10*(ainf-a)/atau : 1 db/dt=q10*(binf-b)/btau : 1 dc/dt=q10*(cinf-c)/ctau : 1 ainf = (1. / (1 + exp(-(vu + 31) / 6.)))**0.25 : 1 binf = 1. / (1 + exp((vu + 66) / 7.))**0.5 : 1 cinf = 1. / (1 + exp((vu + 66) / 7.))**0.5 : 1 atau = ((100. / (7*exp((vu+60) / 14.) + 29*exp(-(vu+60) / 24.))) + 0.1)*ms : second btau = ((1000. / (14*exp((vu+60) / 27.) + 29*exp(-(vu+60) / 24.))) + 1)*ms : second ctau = ((90. / (1 + exp((-66-vu) / 17.))) + 10)*ms : second """ # Leak eqs_leak = """ ileak = gl*(EL-v) : amp """ # h current for octopus cells eqs_hcno = """ ihcno = gbarno*(h1*frac + h2*(1-frac))*(EH-v) : amp dh1/dt=(hinfno-h1)/tau1 : 1 dh2/dt=(hinfno-h2)/tau2 : 1 hinfno = 1./(1+exp((vu+66.)/7.)) : 1 tau1 = bet1/(qt*0.008*(1+alp1))*ms : second tau2 = bet2/(qt*0.0029*(1+alp2))*ms : second alp1 = exp(1e-3*3*(vu+50)*9.648e4/(8.315*(273.16+temp_degC))) : 1 bet1 = exp(1e-3*3*0.3*(vu+50)*9.648e4/(8.315*(273.16+temp_degC))) : 1 alp2 = exp(1e-3*3*(vu+84)*9.648e4/(8.315*(273.16+temp_degC))) : 1 bet2 = exp(1e-3*3*0.6*(vu+84)*9.648e4/(8.315*(273.16+temp_degC))) : 1 """ eqs = """ dv/dt = (ileak + ina + ikht + iklt + ika + ih + ihcno + I + Is_e + Is_i)/C : volt Is_e = gs_e * (Es_e - v) : amp gs_e : siemens Is_i = gs_i * (Es_i - v) : amp gs_i : siemens vu = v/mV : 1 # unitless v I : amp """ #Added Is_i to RM2003 eqs += eqs_leak + eqs_ka + eqs_na + eqs_ih + eqs_klt + eqs_kht + eqs_hcno lsonGrp = NeuronGroup(nLsons, eqs, method='exponential_euler', threshold='v > -30*mV', refractory='v > -45*mV') #gbcGrp.I = 2500.0*pA lsonGrp.I = 0.0 * pA # Initialize model near v_rest with no inputs lsonGrp.v = Vrest #vu = EL/mV # unitless v vu = lsonGrp.v / mV # unitless v lsonGrp.m = 1. / (1 + exp(-(vu + 38.) / 7.)) lsonGrp.h = 1. / (1 + exp((vu + 65.) / 6.)) lsonGrp.n = (1 + exp(-(vu + 15) / 5.))**-0.5 lsonGrp.p = 1. / (1 + exp(-(vu + 23) / 6.)) lsonGrp.r = 1. / (1 + exp((vu + 76.) / 7.)) lsonGrp.w = (1. / (1 + exp(-(vu + 48.) / 6.)))**0.25 lsonGrp.z = zss + ((1. - zss) / (1 + exp((vu + 71.) / 10.))) lsonGrp.a = (1. / (1 + exp(-(vu + 31) / 6.)))**0.25 lsonGrp.b = 1. / (1 + exp((vu + 66) / 7.))**0.5 lsonGrp.c = 1. / (1 + exp((vu + 66) / 7.))**0.5 lsonGrp.h1 = 1. / (1 + exp((vu + 66.) / 7.)) lsonGrp.h2 = 1. / (1 + exp((vu + 66.) / 7.)) #lsonGrp.gs_e = 0.0*siemens #netGbcEq = Network(gbcGrp, report='text') #netGbcEq.run(50*ms, report='text') lsonSynI = Synapses(gbCoSpkGenGrp, lsonGrp, model='''dg_i/dt = -g_i/tausI : siemens (clock-driven) gs_i_post = g_i : siemens (summed)''', on_pre='g_i += w_ilson*siemens', method='exact') lsonSynI.connect(i=np.arange(nGbcsCoPerLson), j=0) lsonSynE = Synapses(sbIpSpkGenGrp, lsonGrp, model='''dg_e/dt = -g_e/tausE : siemens (clock-driven) gs_e_post = g_e : siemens (summed)''', on_pre='g_e += w_elson*siemens', method='exact') lsonSynE.connect(i=np.arange(nSbcsIpPerLson), j=0) lsonSpks = SpikeMonitor(lsonGrp) lsonState = StateMonitor(lsonGrp, ['v', 'gs_e'], record=True) run(300 * ms, report='text') # Console Output Won't Clear from Script # Memory issue with so many repeated simulations: # Comment out the plt commands #plt.plot(lsonState.t / ms, lsonState[0].v / mV) #plt.xlabel('t (ms)') #plt.ylabel('v (mV)') #plt.show() # Output file - EIPD in output filename. Spiketimes in file EPhsStrCo = anCoSpkFile[27:31] EPhsStrIp = anIpSpkFile[27:31] if (EPhsStrCo[0] == 'N'): EPhsIntCo = -1 * int(EPhsStrCo[1:4]) else: EPhsIntCo = int(EPhsStrCo[0:3]) if (EPhsStrIp[0] == 'N'): EPhsIntIp = -1 * int(EPhsStrIp[1:4]) else: EPhsIntIp = int(EPhsStrIp[0:3]) # EIPD = (EPhsIntCo - EPhsIntIp) % 360 EIPDint = (EPhsIntCo - EPhsIntIp) # unwrapped Envelope IPD #EIPDstr = str(EIPDint) if (EIPDint == 15): EIPDstr = 'EIPDP015' elif (EIPDint == 30): EIPDstr = 'EIPDP030' elif (EIPDint == 45): EIPDstr = 'EIPDP045' elif (EIPDint == 60): EIPDstr = 'EIPDP060' elif (EIPDint == 75): EIPDstr = 'EIPDP075' elif (EIPDint == 90): EIPDstr = 'EIPDP090' elif (EIPDint == -15): EIPDstr = 'EIPDN015' elif (EIPDint == -30): EIPDstr = 'EIPDN030' elif (EIPDint == -45): EIPDstr = 'EIPDN045' elif (EIPDint == -60): EIPDstr = 'EIPDN060' elif (EIPDint == -75): EIPDstr = 'EIPDN075' elif (EIPDint == -90): EIPDstr = 'EIPDN090' elif (EIPDint > 0): EIPDstr = 'EIPDP' + str(EIPDint) elif (EIPDint < 0): EIPDstr = 'EIPDN' + str(-EIPDint) elif (EIPDint == 0): EIPDstr = 'EIPDP000' # if (EIPDint < 0): # EIPDstr = EIPDstr.replace('-','N') # Synaptic parameters in output filename if (abs(round(tausE / ms) - (tausE / ms)) < 0.1): Te = str(round(tausE / ms)) else: Te = str(tausE / ms) Te = Te.replace('.', 'p') if (abs(round(w_elson / 1e-9) - (w_elson / 1e-9)) < 0.1): We = str(round(w_elson / 1e-9)) else: We = str(w_elson / 1e-9) We = We.replace('.', 'p') if (abs(round(tausI / ms) - (tausI / ms)) < 0.1): Ti = str(round(tausI / ms)) else: Ti = str(tausI / ms) Ti = Ti.replace('.', 'p') if (abs(round(w_ilson / 1e-9) - (w_ilson / 1e-9)) < 0.1): Wi = str(round(w_ilson / 1e-9)) else: Wi = str(w_ilson / 1e-9) Wi = Wi.replace('.', 'p') lsonSpkFile = 'Lso2SpTms' + anCoSpkFile[6:13] + anCoSpkFile[ 16: 23] + 'Te' + Te + 'We' + We + 'Ti' + Ti + 'Wi' + Wi + EIPDstr + 'Co' + anCoSpkFile[ 38:40] + anCoSpkFile[23:31] + anCoSpkFile[45:] file0 = open(lsonSpkFile, 'w') for index in range(len(lsonSpks.t)): file0.write( str(lsonSpks.i[index]) + " " + str(lsonSpks.t[index] / ms) + '\n') file0.close() return (lsonGrp, lsonSpks, lsonState) # end of mkgbcs
def run_net(tr): # prefs.codegen.target = 'numpy' # prefs.codegen.target = 'cython' set_device('cpp_standalone', directory='./builds/%.4d' % (tr.v_idx), build_on_run=False) print("Started process with id ", str(tr.v_idx)) namespace = tr.netw.f_to_dict(short_names=True, fast_access=True) namespace['idx'] = tr.v_idx defaultclock.dt = tr.netw.sim.dt GExc = NeuronGroup( N=tr.N_e, model=tr.condlif_sig, threshold=tr.nrnEE_thrshld, reset=tr.nrnEE_reset, #method=tr.neuron_method, namespace=namespace) GInh = NeuronGroup( N=tr.N_i, model=tr.condlif_sig, threshold='V > Vt', reset='V=Vr_i', #method=tr.neuron_method, namespace=namespace) # set initial thresholds fixed, init. potentials uniformly distrib. GExc.sigma, GInh.sigma = tr.sigma_e, tr.sigma_i GExc.Vt, GInh.Vt = tr.Vt_e, tr.Vt_i GExc.V , GInh.V = np.random.uniform(tr.Vr_e/mV, tr.Vt_e/mV, size=tr.N_e)*mV, \ np.random.uniform(tr.Vr_i/mV, tr.Vt_i/mV, size=tr.N_i)*mV synEE_pre_mod = mod.synEE_pre synEE_post_mod = mod.synEE_post if tr.stdp_active: synEE_pre_mod = '''%s %s''' % (synEE_pre_mod, mod.synEE_pre_STDP) synEE_post_mod = '''%s %s''' % (synEE_post_mod, mod.synEE_post_STDP) if tr.synEE_rec: synEE_pre_mod = '''%s %s''' % (synEE_pre_mod, mod.synEE_pre_rec) synEE_post_mod = '''%s %s''' % (synEE_post_mod, mod.synEE_post_rec) # E<-E advanced synapse model, rest simple SynEE = Synapses( target=GExc, source=GExc, model=tr.synEE_mod, on_pre=synEE_pre_mod, on_post=synEE_post_mod, #method=tr.synEE_method, namespace=namespace) SynIE = Synapses(target=GInh, source=GExc, on_pre='ge_post += a_ie', namespace=namespace) SynEI = Synapses(target=GExc, source=GInh, on_pre='gi_post += a_ei', namespace=namespace) SynII = Synapses(target=GInh, source=GInh, on_pre='gi_post += a_ii', namespace=namespace) if tr.strct_active: sEE_src, sEE_tar = generate_full_connectivity(tr.N_e, same=True) SynEE.connect(i=sEE_src, j=sEE_tar) SynEE.syn_active = 0 else: srcs_full, tars_full = generate_full_connectivity(tr.N_e, same=True) SynEE.connect(i=srcs_full, j=tars_full) SynEE.syn_active = 0 sIE_src, sIE_tar = generate_connections(tr.N_i, tr.N_e, tr.p_ie) sEI_src, sEI_tar = generate_connections(tr.N_e, tr.N_i, tr.p_ei) sII_src, sII_tar = generate_connections(tr.N_i, tr.N_i, tr.p_ii, same=True) SynIE.connect(i=sIE_src, j=sIE_tar) SynEI.connect(i=sEI_src, j=sEI_tar) SynII.connect(i=sII_src, j=sII_tar) tr.f_add_result('sIE_src', sIE_src) tr.f_add_result('sIE_tar', sIE_tar) tr.f_add_result('sEI_src', sEI_src) tr.f_add_result('sEI_tar', sEI_tar) tr.f_add_result('sII_src', sII_src) tr.f_add_result('sII_tar', sII_tar) # if tr.strct_active: # SynEE.a = 0 # else: SynEE.a = tr.a_ee SynEE.insert_P = tr.insert_P # make synapse active at beginning #if not tr.strct_active: SynEE.run_regularly(tr.synEE_p_activate, dt=tr.T, when='start', order=-100) # synaptic scaling if tr.netw.config.scl_active: SynEE.summed_updaters['Asum_post']._clock = Clock( dt=tr.dt_synEE_scaling) SynEE.run_regularly(tr.synEE_scaling, dt=tr.dt_synEE_scaling, when='end') # intrinsic plasticity if tr.netw.config.it_active: GExc.h_ip = tr.h_ip GExc.run_regularly(tr.intrinsic_mod, dt=tr.it_dt, when='end') # structural plasticity if tr.netw.config.strct_active: SynEE.run_regularly(tr.strct_mod, dt=tr.strct_dt, when='end') # -------------- recording ------------------ #run(tr.sim.preT) GExc_recvars = [] if tr.memtraces_rec: GExc_recvars.append('V') if tr.vttraces_rec: GExc_recvars.append('Vt') if tr.getraces_rec: GExc_recvars.append('ge') if tr.gitraces_rec: GExc_recvars.append('gi') GInh_recvars = GExc_recvars GExc_stat = StateMonitor(GExc, GExc_recvars, record=[0, 1, 2], dt=tr.GExc_stat_dt) GInh_stat = StateMonitor(GInh, GInh_recvars, record=[0, 1, 2], dt=tr.GInh_stat_dt) SynEE_recvars = [] if tr.synee_atraces_rec: SynEE_recvars.append('a') if tr.synee_Apretraces_rec: SynEE_recvars.append('Apre') if tr.synee_Aposttraces_rec: SynEE_recvars.append('Apost') SynEE_stat = StateMonitor(SynEE, SynEE_recvars, record=range(tr.n_synee_traces_rec), when='end', dt=tr.synEE_stat_dt) GExc_spks = SpikeMonitor(GExc) GInh_spks = SpikeMonitor(GInh) SynEE_a = StateMonitor(SynEE, ['a', 'syn_active'], record=range(tr.N_e * (tr.N_e - 1)), dt=tr.sim.T / 10., when='end', order=100) run(tr.sim.T, report='text') SynEE_a.record_single_timestep() device.build(directory='../builds/%.4d' % (tr.v_idx)) tr.v_standard_result = Brian2MonitorResult tr.f_add_result('GExc_stat', GExc_stat) tr.f_add_result('SynEE_stat', SynEE_stat) print("Saving exc spikes... ", GExc_spks.get_states()['N']) tr.f_add_result('GExc_spks', GExc_spks) tr.f_add_result('GInh_stat', GInh_stat) print("Saving inh spikes... ", GInh_spks.get_states()['N']) tr.f_add_result('GInh_spks', GInh_spks) tr.f_add_result('SynEE_a', SynEE_a) # ----------------- add raw data ------------------------ fpath = '../builds/%.4d/' % (tr.v_idx) from pathlib import Path Path(fpath + 'turnover').touch() turnover_data = np.genfromtxt(fpath + 'turnover', delimiter=',') tr.f_add_result('turnover', turnover_data) os.remove(fpath + 'turnover') Path(fpath + 'spk_register').touch() spk_register_data = np.genfromtxt(fpath + 'spk_register', delimiter=',') tr.f_add_result('spk_register', spk_register_data) os.remove(fpath + 'spk_register')
lsonSynI = Synapses(gbCoSpkGenGrp, lsonGrp, model='''dg_i/dt = -g_i/tausI : siemens (clock-driven) gs_i_post = g_i : siemens (summed)''', on_pre='g_i += w_ilson*siemens', method='exact') lsonSynI.connect(i=np.arange(nGbcsCoPerLson), j=0) #lsonSynE = Synapses(sbIpSpkGenGrp, lsonGrp, # model='''dg_e/dt = -g_e/tausE : siemens (clock-driven) # gs_e_post = g_e : siemens (summed)''', # on_pre='g_e += w_elson*siemens', # method = 'exact') #lsonSynE.connect(i=np.arange(nSbcsIpPerLson), j=0) lsonSpks = SpikeMonitor(lsonGrp) lsonState = StateMonitor(lsonGrp, ['gs_e', 'Is_e', 'gs_i', 'Is_i', 'v'], record=True) run(50 * ms, report='text') plt.plot(lsonState.t / ms, lsonState[0].gs_e / nS) plt.xlabel('t (ms)') plt.ylabel('gs_e (nS)') plt.show() plt.plot(lsonState.t / ms, lsonState[0].Is_e / pA) plt.xlabel('t (ms)') plt.ylabel('Is_e (pA)') plt.show()