Ejemplo n.º 1
0
 def run_simulation():
     G = NeuronGroup(10, 'dv/dt = -v / (10*ms) : 1',
                     reset='v=0', threshold='v>1')
     G.v = np.linspace(0, 1, 10)
     run(1*ms)
     # We return potentially problematic references to a VariableView
     return G.v
def run_network(traj):
    """Runs brian network consisting of
        200 inhibitory IF neurons"""

    eqs = '''
    dv/dt=(v0-v)/(5*ms) : volt (unless refractory)
    v0 : volt
    '''
    group = NeuronGroup(100, model=eqs, threshold='v>10 * mV',
                        reset='v = 0*mV', refractory=5*ms)
    group.v0 = traj.par.v0
    group.v = np.random.rand(100) * 10.0 * mV

    syn = Synapses(group, group, on_pre='v-=1*mV')
    syn.connect('i != j', p=0.2)

    spike_monitor = SpikeMonitor(group, variables=['v'])
    voltage_monitor = StateMonitor(group, 'v', record=True)
    pop_monitor = PopulationRateMonitor(group, name='pop' + str(traj.v_idx))

    net = Network(group, syn, spike_monitor, voltage_monitor, pop_monitor)
    net.run(0.25*second, report='text')

    traj.f_add_result(Brian2MonitorResult, 'spikes',
                      spike_monitor)
    traj.f_add_result(Brian2MonitorResult, 'v',
                      voltage_monitor)
    traj.f_add_result(Brian2MonitorResult, 'pop',
                      pop_monitor)
Ejemplo n.º 3
0
def test_store_restore_magic():
    source = NeuronGroup(10, '''dv/dt = rates : 1
                                rates : Hz''', threshold='v>1', reset='v=0')
    source.rates = 'i*100*Hz'
    target = NeuronGroup(10, 'v:1')
    synapses = Synapses(source, target, model='w:1', pre='v+=w', connect='i==j')
    synapses.w = 'i*1.0'
    synapses.delay = 'i*ms'
    state_mon = StateMonitor(target, 'v', record=True)
    spike_mon = SpikeMonitor(source)
    store()  # default time slot
    run(10*ms)
    store('second')
    run(10*ms)
    v_values = state_mon.v[:, :]
    spike_indices, spike_times = spike_mon.it_

    restore() # Go back to beginning
    assert magic_network.t == 0*ms
    run(20*ms)
    assert defaultclock.t == 20*ms
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back to middle
    restore('second')
    assert magic_network.t == 10*ms
    run(10*ms)
    assert defaultclock.t == 20*ms
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])
Ejemplo n.º 4
0
def test_profile_ipython_html():
    G = NeuronGroup(10, 'dv/dt = -v / (10*ms) : 1', threshold='v>1',
                    reset='v=0', name='profile_test')
    G.v = 1.1
    net = Network(G)
    net.run(1*ms, profile=True)
    summary = profiling_summary(net)
    assert len(summary._repr_html_())
Ejemplo n.º 5
0
def test_get_set_states():
    G = NeuronGroup(10, 'v:1', name='a_neurongroup')
    G.v = 'i'
    net = Network(G)
    states1 = net.get_states()
    states2 = magic_network.get_states()
    states3 = net.get_states(read_only_variables=False)
    assert set(states1.keys()) == set(states2.keys()) == set(states3.keys()) == {'a_neurongroup'}
    assert set(states1['a_neurongroup'].keys()) == set(states2['a_neurongroup'].keys()) == {'i', 'dt', 'N', 't', 'v'}
    assert set(states3['a_neurongroup']) == {'v'}

    # Try re-setting the state
    G.v = 0
    net.set_states(states3)
    assert_equal(G.v, np.arange(10))
Ejemplo n.º 6
0
def test_profile():
    G = NeuronGroup(10, 'dv/dt = -v / (10*ms) : 1', threshold='v>1',
                    reset='v=0', name='profile_test')
    G.v = 1.1
    net = Network(G)
    net.run(1*ms, profile=True)
    # The should be four simulated CodeObjects, one for the group and one each
    # for state update, threshold and reset
    info = net.profiling_info
    info_dict = dict(info)
    assert len(info) == 4
    assert 'profile_test' in info_dict
    assert 'profile_test_stateupdater' in info_dict
    assert 'profile_test_thresholder' in info_dict
    assert 'profile_test_resetter' in info_dict
    assert all([t>=0*second for _, t in info])
def run_network():

    monitor_dict={}
    defaultclock.dt= 0.01*ms

    C=281*pF
    gL=30*nS
    EL=-70.6*mV
    VT=-50.4*mV
    DeltaT=2*mV
    tauw=40*ms
    a=4*nS
    b=0.08*nA
    I=8*nA
    Vcut="vm>2*mV"# practical threshold condition
    N=10

    reset = 'vm=Vr;w+=b'

    eqs="""
    dvm/dt=(gL*(EL-vm)+gL*DeltaT*exp((vm-VT)/DeltaT)+I-w)/C : volt
    dw/dt=(a*(vm-EL)-w)/tauw : amp
    Vr:volt
    """

    neuron=NeuronGroup(N,model=eqs,threshold=Vcut,reset=reset)
    neuron.vm=EL
    neuron.w=a*(neuron.vm-EL)
    neuron.Vr=linspace(-48.3*mV,-47.7*mV,N) # bifurcation parameter

    #run(25*msecond,report='text') # we discard the first spikes

    MSpike=SpikeMonitor(neuron, variables=['vm']) # record Vr and w at spike times
    MPopRate = PopulationRateMonitor(neuron)

    MMultiState = StateMonitor(neuron, ['w','vm'], record=[6,7,8,9])


    run(10*msecond,report='text')


    monitor_dict['SpikeMonitor']=MSpike
    monitor_dict['MultiState']=MMultiState
    monitor_dict['PopulationRateMonitor']=MPopRate

    return monitor_dict
Ejemplo n.º 8
0
def test_magic_collect():
    '''
    Make sure all expected objects are collected in a magic network
    '''
    P = PoissonGroup(10, rates=100*Hz)
    G = NeuronGroup(10, 'v:1')
    S = Synapses(G, G, '')
    G_runner = G.custom_operation('')
    S_runner = S.custom_operation('')

    state_mon = StateMonitor(G, 'v', record=True)
    spike_mon = SpikeMonitor(G)
    rate_mon = PopulationRateMonitor(G)

    objects = collect()

    assert len(objects) == 8, ('expected %d objects, got %d' % (8, len(objects)))
Ejemplo n.º 9
0
def test_continuation():
    defaultclock.dt = 1*ms
    G = NeuronGroup(1, 'dv/dt = -v / (10*ms) : 1')
    G.v = 1
    mon = StateMonitor(G, 'v', record=True)
    net = Network(G, mon)
    net.run(2*ms)

    # Run the same simulation but with two runs that use sub-dt run times
    G2 = NeuronGroup(1, 'dv/dt = -v / (10*ms) : 1')
    G2.v = 1
    mon2 = StateMonitor(G2, 'v', record=True)
    net2 = Network(G2, mon2)
    net2.run(0.5*ms)
    net2.run(1.5*ms)

    assert_equal(mon.t[:], mon2.t[:])
    assert_equal(mon.v[:], mon2.v[:])
Ejemplo n.º 10
0
def test_profile():
    G = NeuronGroup(10,
                    'dv/dt = -v / (10*ms) : 1',
                    threshold='v>1',
                    reset='v=0',
                    name='profile_test')
    G.v = 1.1
    net = Network(G)
    net.run(1 * ms, profile=True)
    # The should be four simulated CodeObjects, one for the group and one each
    # for state update, threshold and reset + 1 for the clock
    info = net.profiling_info
    info_dict = dict(info)
    assert len(info) == 4
    assert 'profile_test' in info_dict
    assert 'profile_test_stateupdater' in info_dict
    assert 'profile_test_thresholder' in info_dict
    assert 'profile_test_resetter' in info_dict
    assert all([t >= 0 * second for _, t in info])
Ejemplo n.º 11
0
def test_multiple_runs_defaultclock_incorrect():
    defaultclock.dt = 0.1 * ms
    G = NeuronGroup(1, 'dv/dt = -v / (10*ms) : 1')
    net = Network(G)
    net.run(0.5 * ms)

    # The new dt is not compatible with the previous time since we cannot
    # continue at 0.5ms with a dt of 1ms
    defaultclock.dt = 1 * ms
    assert_raises(ValueError, lambda: net.run(1 * ms))
Ejemplo n.º 12
0
def test_store_restore_to_file():
    filename = tempfile.mktemp(suffix='state', prefix='brian_test')
    source = NeuronGroup(10,
                         '''dv/dt = rates : 1
                                rates : Hz''',
                         threshold='v>1',
                         reset='v=0')
    source.rates = 'i*100*Hz'
    target = NeuronGroup(10, 'v:1')
    synapses = Synapses(source, target, model='w:1', on_pre='v+=w')
    synapses.connect(j='i')
    synapses.w = 'i*1.0'
    synapses.delay = 'i*ms'
    state_mon = StateMonitor(target, 'v', record=True)
    spike_mon = SpikeMonitor(source)
    net = Network(source, target, synapses, state_mon, spike_mon)
    net.store(filename=filename)  # default time slot
    net.run(10 * ms)
    net.store('second', filename=filename)
    net.run(10 * ms)
    v_values = state_mon.v[:, :]
    spike_indices, spike_times = spike_mon.it_

    net.restore(filename=filename)  # Go back to beginning
    assert defaultclock.t == 0 * ms
    assert net.t == 0 * ms
    net.run(20 * ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back to middle
    net.restore('second', filename=filename)
    assert defaultclock.t == 10 * ms
    assert net.t == 10 * ms
    net.run(10 * ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])
    try:
        os.remove(filename)
    except OSError:
        pass
Ejemplo n.º 13
0
def test_store_restore():
    source = NeuronGroup(10,
                         '''dv/dt = rates : 1
                                rates : Hz''',
                         threshold='v>1',
                         reset='v=0')
    source.rates = 'i*100*Hz'
    target = NeuronGroup(10, 'v:1')
    synapses = Synapses(source, target, model='w:1', on_pre='v+=w')
    synapses.connect(j='i')
    synapses.w = 'i*1.0'
    synapses.delay = 'i*ms'
    state_mon = StateMonitor(target, 'v', record=True)
    spike_mon = SpikeMonitor(source)
    net = Network(source, target, synapses, state_mon, spike_mon)
    net.store()  # default time slot
    net.run(10 * ms)
    net.store('second')
    net.run(10 * ms)
    v_values = state_mon.v[:, :]
    spike_indices, spike_times = spike_mon.it_
    net.restore()  # Go back to beginning
    assert defaultclock.t == 0 * ms
    assert net.t == 0 * ms
    net.run(20 * ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back to middle
    net.restore('second')
    assert defaultclock.t == 10 * ms
    assert net.t == 10 * ms
    net.run(10 * ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back again (see github issue #681)
    net.restore('second')
    assert defaultclock.t == 10 * ms
    assert net.t == 10 * ms
Ejemplo n.º 14
0
def test_timedarray_customfunc():
    """
    Test TimedArray and Custom Functions
    """
    # simple timedarray test
    ta = TimedArray([1, 2, 3, 4] * mV, dt=0.1 * ms)
    eqn = 'v = ta(t) :volt'
    G = NeuronGroup(1, eqn, method='euler')
    neuro_dict = collect_NeuronGroup(G, get_local_namespace(0))
    ta_dict = neuro_dict['identifiers']['ta']
    assert ta_dict['name'] == ta.name
    assert (ta_dict['values'] == [1, 2, 3, 4] * mV).all()
    assert float(ta_dict['dt']) == float(ta.dt)
    assert ta_dict['ndim'] == 1
    assert ta_dict['type'] == 'timedarray'

    # test 2
    ta2d = TimedArray([[1, 2], [3, 4], [5, 6]] * mV, dt=1 * ms)
    G2 = NeuronGroup(4, 'v = ta2d(t, i%2) : volt')
    neuro_dict = collect_NeuronGroup(G2, get_local_namespace(0))
    ta_dict = neuro_dict['identifiers']['ta2d']
    assert ta_dict['name'] == ta2d.name
    assert (ta_dict['values'] == [[1, 2], [3, 4], [5, 6]] * mV).all()
    assert float(ta_dict['dt']) == float(ta2d.dt)
    assert ta_dict['ndim'] == 2
    assert ta_dict['type'] == 'timedarray'

    # test 3
    def da(x1, x2):
        return (x1 - x2)

    a = 1 * mV
    b = 1 * mV
    da = Function(da, arg_units=[volt, volt], return_unit=volt)
    grp = NeuronGroup(1, 'v = da(a, b) :volt', method='euler')
    neuro_dict = collect_NeuronGroup(grp, get_local_namespace(0))
    identi = neuro_dict['identifiers']['da']
    assert identi['type'] == 'custom_func'
    assert identi['arg_units'] == da._arg_units
    assert identi['arg_types'] == da._arg_types
    assert identi['return_unit'] == da._return_unit
    assert identi['return_type'] == da._return_type
Ejemplo n.º 15
0
def test_statemonitor():
    """
    Test collect_StateMonitor dictionary representation
    """

    # example 1
    grp = NeuronGroup(10, model='dv/dt = (1 - v) / tau :1', method='euler')
    mon = StateMonitor(grp, 'v', record=True)
    statemon_dict = collect_StateMonitor(mon)

    assert statemon_dict['source'] == grp.name
    assert statemon_dict['record']
    assert statemon_dict['n_indices'] == 10
    assert statemon_dict['variables'] == ['v']
    assert statemon_dict['when'] == 'start'
    assert statemon_dict['order'] == 0

    # exmaple 2
    eqn = '''dvar1/dt = (var1 + 1) / tau :1
    var2 = var1 + 3 :1
    var3 = 2 + var1 :1
    '''
    grp2 = NeuronGroup(10, eqn, method='euler')
    mon2 = StateMonitor(grp2, ['var1', 'var3'],
                        record=[2, 4, 6, 8],
                        dt=1 * second)
    statemon_dict2 = collect_StateMonitor(mon2)

    assert statemon_dict2['source'] == grp2.name
    assert statemon_dict2['variables'].sort() == ['var1', 'var3'].sort()
    assert 'var2' not in statemon_dict2['variables']
    assert not statemon_dict2['record'] is True
    assert (statemon_dict2['record'] == [2, 4, 6, 8]).all()
    assert statemon_dict2['dt'] == 1 * second

    # example 3
    mon3 = StateMonitor(grp, 'v', record=False)
    statemon_dict3 = collect_StateMonitor(mon3)

    assert not statemon_dict3['record'].size
    assert statemon_dict3['when'] == 'start'
    assert statemon_dict3['order'] == 0
Ejemplo n.º 16
0
def example_run(debug=False, **build_options):
    '''
    Run a simple example simulation that test whether the Brian2/Brian2GeNN/GeNN
    pipeline is working correctly.

    Parameters
    ----------
    debug : bool
        Whether to display debug information (e.g. compilation output) during
        the run. Defaults to ``False``.
    build_options : dict
        Additional options that will be forwarded to the ``set_device`` call,
        e.g. ``use_GPU=False``.
    '''
    from brian2.devices.device import set_device, reset_device
    from brian2 import ms, NeuronGroup, run
    from brian2.utils.logger import std_silent
    import numpy as np
    from numpy.testing import assert_allclose
    from tempfile import mkdtemp
    import shutil
    with std_silent(debug):
        test_dir = mkdtemp(prefix='brian2genn_test')
        set_device('genn', directory=test_dir, debug=debug, **build_options)
        N = 100
        tau = 10 * ms
        eqs = '''
        dV/dt = -V/tau: 1
        '''
        G = NeuronGroup(N,
                        eqs,
                        threshold='V>1',
                        reset='V=0',
                        refractory=5 * ms,
                        method='linear')
        G.V = 'i/100.'
        run(1 * ms)
        assert_allclose(G.V, np.arange(100) / 100. * np.exp(-1 * ms / tau))
        shutil.rmtree(test_dir, ignore_errors=True)
        reset_device()
    print('Example run was successful.')
Ejemplo n.º 17
0
def example_run(device_name="cuda_standalone",
                directory=None,
                **build_options):
    """
    Run a simple example simulation to test whether Brian2CUDA is correctly set up.

    Parameters
    ----------
    device_name : str
        What device to use (default: "cuda_standalone").
    directory : str ,optional
        The output directory to write the project to, any existing files will be
        overwritten. If the given directory name is ``None`` (default for this example
        run), then a temporary directory will be used.
    build_options : dict, optional
        Additional options that will be forwarded to the ``device.build`` call,
    """
    from brian2.devices.device import device, set_device
    from brian2 import ms, NeuronGroup, run
    import brian2cuda
    import numpy as np
    from numpy.testing import assert_allclose

    set_device(device_name, build_on_run=False)
    N = 100
    tau = 10 * ms
    G = NeuronGroup(
        N,
        "dv/dt = -v / tau: 1",
        threshold="v > 1",
        reset="v = 0",
        refractory=5 * ms,
        method="linear",
    )
    G.v = "i / 100."
    run(1 * ms)
    device.build(direct_call=False, directory=directory, **build_options)
    assert_allclose(G.v, np.arange(N) / N * np.exp(-1 * ms / tau))
    device.reinit()
    device.activate()
    print("\nExample run was successful.")
Ejemplo n.º 18
0
def test_dt_changes_between_runs():
    defaultclock.dt = 0.1 * ms
    G = NeuronGroup(1, 'v:1')
    mon = StateMonitor(G, 'v', record=True)
    run(.5 * ms)
    defaultclock.dt = .5 * ms
    run(.5 * ms)
    defaultclock.dt = 0.1 * ms
    run(.5 * ms)
    assert len(mon.t[:]) == 5 + 1 + 5
    assert_allclose(
        mon.t[:], [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 1., 1.1, 1.2, 1.3, 1.4] * ms)
def test_initialize_simulation_runtime(setup):
    net, dt, duration = setup
    start_scope()
    rts = RuntimeSimulator()
    assert_raises(TypeError, rts.initialize)

    rts.initialize(net, var_init=None)
    assert (isinstance(rts.networks['fit'], Network))
    assert_raises(KeyError, rts.initialize, empty_net, None)
    wrong_net = Network(NeuronGroup(1, model, name='neurons2'))
    assert_raises(Exception, rts.initialize, wrong_net, None)
    assert_raises(TypeError, rts.initialize, Network)
Ejemplo n.º 20
0
def test_initialize_simulation_standalone(setup):
    start_scope()
    net, _, _ = setup
    sas = CPPStandaloneSimulator()
    assert_raises(TypeError, sas.initialize)
    assert_raises(TypeError, sas.initialize, net)
    assert_raises(KeyError, sas.initialize, empty_net, None)
    wrong_net = Network(NeuronGroup(1, model, name='neurons2'))
    assert_raises(Exception, sas.initialize, wrong_net, None)

    sas.initialize(net, var_init=None, name='test')
    assert (isinstance(sas.networks['test'], Network))
Ejemplo n.º 21
0
def test_progress_report():
    '''
    Very basic test of progress reporting
    '''
    G = NeuronGroup(1, '')
    net = Network(G)

    # No output
    with captured_output() as (out, err):
        net.run(1 * ms, report=None)
    # There should be at least two lines of output
    out, err = out.getvalue(), err.getvalue()
    assert len(out) == 0 and len(err) == 0

    with captured_output() as (out, err):
        net.run(1 * ms)
    # There should be at least two lines of output
    out, err = out.getvalue(), err.getvalue()
    assert len(out) == 0 and len(err) == 0

    # Progress should go to stdout
    with captured_output() as (out, err):
        net.run(1 * ms, report='text')
    # There should be at least two lines of output
    out, err = out.getvalue(), err.getvalue()
    assert len(out.split('\n')) >= 2 and len(err) == 0

    with captured_output() as (out, err):
        net.run(1 * ms, report='stdout')
    # There should be at least two lines of output
    out, err = out.getvalue(), err.getvalue()
    assert len(out.split('\n')) >= 2 and len(err) == 0

    # Progress should go to stderr
    with captured_output() as (out, err):
        net.run(1 * ms, report='stderr')
    # There should be at least two lines of output
    out, err = out.getvalue(), err.getvalue()
    assert len(err.split('\n')) >= 2 and len(out) == 0

    # Custom function
    calls = []

    def capture_progress(elapsed, complete, duration):
        calls.append((elapsed, complete, duration))

    with captured_output() as (out, err):
        net.run(1 * ms, report=capture_progress)
    out, err = out.getvalue(), err.getvalue()

    assert len(err) == 0 and len(out) == 0
    # There should be at least a call for the start and the end
    assert len(calls) >= 2 and calls[0][1] == 0.0 and calls[-1][1] == 1.0
Ejemplo n.º 22
0
def test_store_restore_to_file():
    filename = tempfile.mktemp(suffix='state', prefix='brian_test')
    source = NeuronGroup(10, '''dv/dt = rates : 1
                                rates : Hz''', threshold='v>1', reset='v=0')
    source.rates = 'i*100*Hz'
    target = NeuronGroup(10, 'v:1')
    synapses = Synapses(source, target, model='w:1', on_pre='v+=w')
    synapses.connect(j='i')
    synapses.w = 'i*1.0'
    synapses.delay = 'i*ms'
    state_mon = StateMonitor(target, 'v', record=True)
    spike_mon = SpikeMonitor(source)
    net = Network(source, target, synapses, state_mon, spike_mon)
    net.store(filename=filename)  # default time slot
    net.run(10*ms)
    net.store('second', filename=filename)
    net.run(10*ms)
    v_values = state_mon.v[:, :]
    spike_indices, spike_times = spike_mon.it_

    net.restore(filename=filename) # Go back to beginning
    assert defaultclock.t == 0*ms
    assert net.t == 0*ms
    net.run(20*ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back to middle
    net.restore('second', filename=filename)
    assert defaultclock.t == 10*ms
    assert net.t == 10*ms
    net.run(10*ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])
    try:
        os.remove(filename)
    except OSError:
        pass
Ejemplo n.º 23
0
    def __init__(self, source, n_per_channel=1, params=None):
        params = ZhangSynapse._get_parameters(params)
        c_0, c_1 = params['c_0'], params['c_1']
        s_0, s_1 = params['s_0'], params['s_1']
        R_A = params['R_A']
        ns = dict(s_0=s_0, s_1=s_1, c_0=c_0, c_1=c_1)
        eqs =  '''
        # time-varying discharge rate, input into this model
        s : Hz
        
        # discharge-history effect (Equation 20 in differential equation form)        
        H = c_0*e_0 + c_1*e_1 : 1
        de_0/dt = -e_0/s_0    : 1 (unless refractory)
        de_1/dt = -e_1/s_1    : 1 (unless refractory)

        # final time-varying discharge rate for the Poisson process, equation 19
        R = s * (1 - H) : Hz
        '''
        
        # make sure that the s value is first updated in
        # ZhangSynapseRate, then this NeuronGroup is
        # updated by setting order+1
        @network_operation(dt=source.dt[:], when='start', order=source.order+1)
        def distribute_input():
            self.s[:] = source.s[:].repeat(n_per_channel)
        
        NeuronGroup.__init__(self, len(source) * n_per_channel,
                             model=eqs,
                             threshold='rand()<R*dt',
                             reset='''
                             e_0 = 1
                             e_1 = 1
                             ''',
                             refractory=R_A,
                             dt=source.dt[:], order=source.order+1,
                             namespace=ns,
                             method='euler',
                             )
        
        self.contained_objects.append(distribute_input)
Ejemplo n.º 24
0
def run_simulation(parameters):
    """Run the simulation.

    parameters -- dictionary with parameters
    """

    equations = []
    for gating_variable in ["m", "n", "h"]:
        equations.append(
            construct_gating_variable_inf_equation(gating_variable))
        equations.append(
            construct_gating_variable_tau_equation(gating_variable))
        equations.append(construct_gating_variable_ode(gating_variable))
    equations += construct_neuron_ode()

    eqs_HH = reduce(operator.add, equations)
    group = NeuronGroup(1, eqs_HH, method='euler', namespace=parameters)

    group.v = parameters["v_initial"]

    group.m = parameters["m_initial"]
    group.n = parameters["n_initial"]
    group.h = parameters["h_initial"]

    statemon = StateMonitor(group, [
        'v', 'I_ext', 'm', 'n', 'h', 'g_K', 'g_Na', 'I_K', 'I_Na', 'I_L',
        'tau_m', 'tau_n', 'tau_h'
    ],
                            record=True)

    defaultclock.dt = parameters["defaultclock_dt"]
    run(parameters["duration"])

    return statemon
Ejemplo n.º 25
0
    def setup_neuron_group(self,
                           n_neurons,
                           namespace,
                           calc_gradient=False,
                           optimize=True,
                           online_error=False,
                           name='neurons'):
        """
        Setup neuron group, initialize required number of neurons, create
        namespace and initialize the parameters.

        Parameters
        ----------
        n_neurons: int
            number of required neurons
        **namespace
            arguments to be added to NeuronGroup namespace

        Returns
        -------
        neurons : ~brian2.groups.neurongroup.NeuronGroup
            group of neurons

        """
        # We only want to specify the method argument if it is not None –
        # otherwise it should use NeuronGroup's default value
        kwds = {}
        if self.method is not None:
            kwds['method'] = self.method
        neurons = NeuronGroup(n_neurons,
                              self.model,
                              threshold=self.threshold,
                              reset=self.reset,
                              refractory=self.refractory,
                              name=name,
                              namespace=namespace,
                              dt=self.dt,
                              **kwds)
        if calc_gradient:
            sensitivity_eqs = get_sensitivity_equations(
                neurons,
                parameters=self.parameter_names,
                optimize=optimize,
                namespace=namespace)
            neurons = NeuronGroup(n_neurons,
                                  self.model + sensitivity_eqs,
                                  threshold=self.threshold,
                                  reset=self.reset,
                                  refractory=self.refractory,
                                  name=name,
                                  namespace=namespace,
                                  dt=self.dt,
                                  **kwds)
        if online_error:
            neurons.run_regularly(
                'total_error += (' + self.output_var +
                '-output_var(t,i % n_traces))**2 * int(t>=t_start)',
                when='end')

        return neurons
Ejemplo n.º 26
0
def test_store_restore_magic():
    source = NeuronGroup(10,
                         '''dv/dt = rates : 1
                                rates : Hz''',
                         threshold='v>1',
                         reset='v=0')
    source.rates = 'i*100*Hz'
    target = NeuronGroup(10, 'v:1')
    synapses = Synapses(source,
                        target,
                        model='w:1',
                        pre='v+=w',
                        connect='i==j')
    synapses.w = 'i*1.0'
    synapses.delay = 'i*ms'
    state_mon = StateMonitor(target, 'v', record=True)
    spike_mon = SpikeMonitor(source)
    store()  # default time slot
    run(10 * ms)
    store('second')
    run(10 * ms)
    v_values = state_mon.v[:, :]
    spike_indices, spike_times = spike_mon.it_

    restore()  # Go back to beginning
    assert magic_network.t == 0 * ms
    run(20 * ms)
    assert defaultclock.t == 20 * ms
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back to middle
    restore('second')
    assert magic_network.t == 10 * ms
    run(10 * ms)
    assert defaultclock.t == 20 * ms
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])
Ejemplo n.º 27
0
def test_small_runs():
    # One long run and multiple small runs should give the same results
    group_1 = NeuronGroup(10, 'dv/dt = -v / (10*ms) : 1')
    group_1.v = '(i + 1) / N'
    mon_1 = StateMonitor(group_1, 'v', record=True)
    net_1 = Network(group_1, mon_1)
    net_1.run(1*second)

    group_2 = NeuronGroup(10, 'dv/dt = -v / (10*ms) : 1')
    group_2.v = '(i + 1) / N'
    mon_2 = StateMonitor(group_2, 'v', record=True)
    net_2 = Network(group_2, mon_2)
    runtime = 1*ms
    while True:
        runtime *= 3
        runtime = min([runtime, 1*second - net_2.t])
        net_2.run(runtime)
        if net_2.t >= 1*second:
            break

    assert_equal(mon_1.t_[:], mon_2.t_[:])
    assert_equal(mon_1.v_[:], mon_2.v_[:])
Ejemplo n.º 28
0
def test_store_restore():
    source = NeuronGroup(10, '''dv/dt = rates : 1
                                rates : Hz''', threshold='v>1', reset='v=0')
    source.rates = 'i*100*Hz'
    target = NeuronGroup(10, 'v:1')
    synapses = Synapses(source, target, model='w:1', on_pre='v+=w')
    synapses.connect(j='i')
    synapses.w = 'i*1.0'
    synapses.delay = 'i*ms'
    state_mon = StateMonitor(target, 'v', record=True)
    spike_mon = SpikeMonitor(source)
    net = Network(source, target, synapses, state_mon, spike_mon)
    net.store()  # default time slot
    net.run(10*ms)
    net.store('second')
    net.run(10*ms)
    v_values = state_mon.v[:, :]
    spike_indices, spike_times = spike_mon.it_
    net.restore() # Go back to beginning
    assert defaultclock.t == 0*ms
    assert net.t == 0*ms
    net.run(20*ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back to middle
    net.restore('second')
    assert defaultclock.t == 10*ms
    assert net.t == 10*ms
    net.run(10*ms)
    assert_equal(v_values, state_mon.v[:, :])
    assert_equal(spike_indices, spike_mon.i[:])
    assert_equal(spike_times, spike_mon.t_[:])

    # Go back again (see github issue #681)
    net.restore('second')
    assert defaultclock.t == 10 * ms
    assert net.t == 10 * ms
Ejemplo n.º 29
0
def test_incorrect_network_use():
    '''Test some wrong uses of `Network` and `MagicNetwork`'''
    assert_raises(TypeError, lambda: Network(name='mynet',
                                             anotherkwd='does not exist'))
    assert_raises(TypeError, lambda: Network('not a BrianObject'))
    net = Network()
    assert_raises(TypeError, lambda: net.add('not a BrianObject'))
    assert_raises(ValueError, lambda: MagicNetwork())
    G = NeuronGroup(10, 'v:1')
    net.add(G)
    assert_raises(TypeError, lambda: net.remove(object()))
    assert_raises(MagicError, lambda: magic_network.add(G))
    assert_raises(MagicError, lambda: magic_network.remove(G))
def run_net(traj):
    """Creates and runs BRIAN network based on the parameters in `traj`."""

    eqs = traj.eqs

    # Create a namespace dictionairy
    namespace = traj.Net.f_to_dict(short_names=True, fast_access=True)
    # Create the Neuron Group
    neuron = NeuronGroup(traj.N,
                         model=eqs,
                         threshold=traj.Vcut,
                         reset=traj.reset,
                         namespace=namespace)
    neuron.vm = traj.EL
    neuron.w = traj.a * (neuron.vm - traj.EL)
    neuron.Vr = linspace(-48.3 * mV, -47.7 * mV,
                         traj.N)  # bifurcation parameter

    # Run the network initially for 100 milliseconds
    print('Initial Run')
    net = Network(neuron)
    net.run(100 * ms, report='text')  # we discard the first spikes

    # Create a Spike Monitor
    MSpike = SpikeMonitor(neuron)
    net.add(MSpike)
    # Create a State Monitor for the membrane voltage, record from neurons 1-3
    MStateV = StateMonitor(neuron, variables=['vm'], record=[1, 2, 3])
    net.add(MStateV)

    # Now record for 500 milliseconds
    print('Measurement run')
    net.run(500 * ms, report='text')

    # Add the BRAIN monitors
    traj.v_standard_result = Brian2MonitorResult
    traj.f_add_result('SpikeMonitor', MSpike)
    traj.f_add_result('StateMonitorV', MStateV)
def test_run_simulation_runtime_var_init(setup):
    _, dt, duration = setup
    start_scope()

    neurons = NeuronGroup(1, model2, name='neurons')
    monitor = StateMonitor(neurons, 'v', record=True, name='statemonitor')
    net = Network(neurons, monitor)

    rts = RuntimeSimulator()
    rts.initialize(net, var_init={'v': -60 * mV})

    rts.run(duration, {'gL': 100, 'C': 10}, ['gL', 'C'], iteration=0)
    v = getattr(rts.statemonitor, 'v')
    assert_equal(np.shape(v), (1, duration / dt))
Ejemplo n.º 32
0
def test_synapse_connect_generator():
    # connector test 3
    start_scope()
    set_device('exporter', build_on_run=False)
    tau = 1 * ms
    eqn = 'dv/dt = (1 - v)/tau :1'
    Source = NeuronGroup(10, eqn, method='exact', threshold='v>0.9')
    S1 = Synapses(Source, Source)
    nett2 = Network(Source, S1)
    S1.connect(j='k for k in range(0, i+1)')
    nett2.run(1 * ms)
    connect3 = device.runs[0]['initializers_connectors'][0]
    assert connect3['j'] == 'k for k in range(0, i+1)'
    device.reinit()
def setup_spikes(request):
    def fin():
        reinit_devices()
    request.addfinalizer(fin)
    EL = -70*mV
    VT = -50*mV
    DeltaT = 2*mV
    C = 1*nF
    gL = 30*nS
    I = TimedArray(input_current, dt=0.01 * ms)
    model = Equations('''
                      dv/dt = (gL*(EL-v)+gL*DeltaT*exp((v-VT)/DeltaT) + I(t))/C : volt
                      ''')
    group = NeuronGroup(1, model,
                        threshold='v > -50*mV',
                        reset='v = -70*mV',
                        method='exponential_euler')
    group.v = -70 * mV
    spike_mon = SpikeMonitor(group)
    run(60*ms)
    spikes = getattr(spike_mon, 't_')

    return spike_mon, spikes
Ejemplo n.º 34
0
def example_run(debug=False, **build_options):
    '''
    Run a simple example simulation that test whether the Brian2/Brian2GeNN/GeNN
    pipeline is working correctly.

    Parameters
    ----------
    debug : bool
        Whether to display debug information (e.g. compilation output) during
        the run. Defaults to ``False``.
    build_options : dict
        Additional options that will be forwarded to the ``set_device`` call,
        e.g. ``use_GPU=False``.
    '''
    from brian2.devices.device import set_device, reset_device
    from brian2 import ms, NeuronGroup, run
    from brian2.utils.logger import std_silent
    import numpy as np
    from numpy.testing import assert_allclose
    from tempfile import mkdtemp
    import shutil
    with std_silent(debug):
        test_dir = mkdtemp(prefix='brian2genn_test')
        set_device('genn', directory=test_dir, debug=debug, **build_options)
        N = 100
        tau = 10*ms
        eqs = '''
        dV/dt = -V/tau: 1
        '''
        G = NeuronGroup(N, eqs, threshold='V>1', reset='V=0', refractory=5 * ms,
                        method='linear')
        G.V = 'i/100.'
        run(1*ms)
        assert_allclose(G.V, np.arange(100)/100.*np.exp(-1*ms/tau))
        shutil.rmtree(test_dir, ignore_errors=True)
        reset_device()
    print('Example run was successful.')
Ejemplo n.º 35
0
def test_scheduling_summary():
    basename = 'name' + str(uuid.uuid4()).replace('-', '_')
    group = NeuronGroup(10,
                        'dv/dt = -v/(10*ms) : 1',
                        threshold='v>1',
                        reset='v=1',
                        name=basename)
    group.run_regularly('v = rand()', dt=defaultclock.dt * 10, when='end')
    state_mon = StateMonitor(group, 'v', record=True, name=basename + '_sm')
    inactive_state_mon = StateMonitor(group,
                                      'v',
                                      record=True,
                                      name=basename + '_sm_ia',
                                      when='after_end')
    inactive_state_mon.active = False
    net = Network(group, state_mon, inactive_state_mon)
    summary_before = scheduling_summary(net)
    assert [entry.name for entry in summary_before.entries] == [
        basename + '_sm', basename + '_stateupdater',
        basename + '_thresholder', basename + '_resetter',
        basename + '_run_regularly', basename + '_sm_ia'
    ]
    assert [entry.when for entry in summary_before.entries] == [
        'start', 'groups', 'thresholds', 'resets', 'end', 'after_end'
    ]
    assert [entry.dt for entry in summary_before.entries] == [
        defaultclock.dt, defaultclock.dt, defaultclock.dt, defaultclock.dt,
        defaultclock.dt * 10, defaultclock.dt
    ]
    assert [entry.active for entry in summary_before.entries
            ] == [True, True, True, True, True, False]
    assert len(str(summary_before))
    assert len(summary_before._repr_html_())
    run(defaultclock.dt)
    summary_after = scheduling_summary(net)
    assert str(summary_after) == str(summary_before)
    assert summary_after._repr_html_() == summary_before._repr_html_()
Ejemplo n.º 36
0
    def __init__(self, filterbank, targetvar, *args, **kwds):
        # Make sure we're not in standalone mode (which won't work)
        if not isinstance(get_device(), RuntimeDevice):
            raise RuntimeError("Cannot use standalone mode with brian2hears")

        self.targetvar = targetvar
        self.filterbank = filterbank
        filterbank.buffer_init()

        # Sanitize the clock - does it have the right dt value?
        if 'clock' in kwds:
            if int(1/kwds['clock'].dt)!=int(filterbank.samplerate):
                raise ValueError('Clock should have 1/dt=samplerate')
        elif 'dt' in kwds:
            if int(1 / kwds['dt']) != int(filterbank.samplerate):
                raise ValueError('Require 1/dt=samplerate')
        else:
            kwds['dt'] = 1/filterbank.samplerate
        
        buffersize = kwds.pop('buffersize', 32)
        if not isinstance(buffersize, int):
            if not have_same_dimensions(buffersize, second):
                raise DimensionMismatchError("buffersize argument should be an integer or in seconds")
            buffersize = int(buffersize*filterbank.samplerate)

        self.buffersize = buffersize

        self.apply_filterbank = ApplyFilterbank(self, targetvar, filterbank, buffersize)

        NeuronGroup.__init__(self, filterbank.nchannels, *args, **kwds)

        if self.variables[targetvar].dim is not DIMENSIONLESS:
            raise DimensionMismatchError("Target variable must be dimensionless")

        apply_filterbank_output = NetworkOperation(self.apply_filterbank.__call__, when='start', clock=self.clock)
        self.contained_objects.append(apply_filterbank_output)
Ejemplo n.º 37
0
def test_magic_collect():
    '''
    Make sure all expected objects are collected in a magic network
    '''
    P = PoissonGroup(10, rates=100*Hz)
    G = NeuronGroup(10, 'v:1', threshold='False')
    S = Synapses(G, G, '')

    state_mon = StateMonitor(G, 'v', record=True)
    spike_mon = SpikeMonitor(G)
    rate_mon = PopulationRateMonitor(G)

    objects = collect()

    assert len(objects) == 6, ('expected %d objects, got %d' % (6, len(objects)))
Ejemplo n.º 38
0
def setup(request):
    dt = 0.1 * ms
    duration = 10 * ms

    neurons = NeuronGroup(1, model, name='neurons')
    monitor = StateMonitor(neurons, 'I', record=True, name='statemonitor')

    net = Network(neurons, monitor)

    def fin():
        reinit_devices()

    request.addfinalizer(fin)

    return net, dt, duration
Ejemplo n.º 39
0
def test_synapse_connect_cond():
    # check connectors
    start_scope()
    set_device('exporter')
    eqn = 'dv/dt = (1 - v)/tau :1'
    tau = 1 * ms
    P = NeuronGroup(5, eqn, method='euler', threshold='v>0.8')
    Q = NeuronGroup(10, eqn, method='euler', threshold='v>0.9')
    w = 1
    tata = 2
    bye = 2
    my_prob = -1
    S = Synapses(P, Q, on_pre='v += w')
    S.connect('tata > bye', p='my_prob', n=5)
    run(1 * ms)
    connect = device.runs[0]['initializers_connectors'][0]
    assert connect['probability'] == 'my_prob'
    assert connect['n_connections'] == 5
    assert connect['type'] == 'connect'
    assert connect['identifiers']['tata'] == bye
    with pytest.raises(KeyError):
        connect['i']
        connect['j']
    device.reinit()
Ejemplo n.º 40
0
def test_ExportDevice_unsupported():
    """
    Test whether unsupported objects for standard format export
    are raising Error
    """
    start_scope()
    set_device('exporter')
    eqn = '''
    v = 1 :1
    g :1
    '''
    G = NeuronGroup(1, eqn)
    _ = PoissonInput(G, 'g', 1, 1 * Hz, 1)
    # with pytest.raises(NotImplementedError):
    run(10 * ms)
Ejemplo n.º 41
0
def test_poissoninput():
    """
    Test collect_PoissonInput()
    """
    # test 1
    start_scope()
    v_th = 1 * volt
    grp = NeuronGroup(10,
                      'dv/dt = (v_th - v)/(10*ms) :volt',
                      method='euler',
                      threshold='v>100*mV',
                      reset='v=0*mV')
    poi = PoissonInput(grp, 'v', 10, 1 * Hz, 'v_th * rand() + 1*mV')
    poi_dict = collect_PoissonInput(poi, get_local_namespace(0))
    assert poi_dict['target'] == grp.name
    assert poi_dict['rate'] == 1 * Hz
    assert poi_dict['N'] == 10
    assert poi_dict['target_var'] == 'v'
    assert poi_dict['when'] == poi.when
    assert poi_dict['order'] == poi.order
    assert poi_dict['dt'] == poi.clock.dt
    assert poi_dict['identifiers']['v_th'] == v_th
    # test 2
    grp2 = NeuronGroup(10,
                       'dv_1_2_3/dt = (v_th - v_1_2_3)/(10*ms) :volt',
                       method='euler',
                       threshold='v_1_2_3>v_th',
                       reset='v_1_2_3=-v_th')
    poi2 = PoissonInput(grp2, 'v_1_2_3', 0, 0 * Hz, v_th)
    poi_dict = collect_PoissonInput(poi2, get_local_namespace(0))
    assert poi_dict['target'] == grp2.name
    assert poi_dict['rate'] == 0 * Hz
    assert poi_dict['N'] == 0
    assert poi_dict['target_var'] == 'v_1_2_3'
    with pytest.raises(KeyError):
        poi_dict['identifiers']
Ejemplo n.º 42
0
    def _build_model(self, traj, brian_list, network_dict):
        """Builds the neuron groups from `traj`.

        Adds the neuron groups to `brian_list` and `network_dict`.

        """

        model = traj.parameters.model

        # Create the equations for both models
        eqs_dict = self._build_model_eqs(traj)

        # Create inhibitory neurons
        eqs_i = eqs_dict['i']
        neurons_i = NeuronGroup(N=model.N_i,
                                model=eqs_i,
                                threshold=model.V_th,
                                reset=model.reset_func,
                                refractory=model.refractory,
                                method='Euler')

        # Create excitatory neurons
        eqs_e = eqs_dict['e']
        neurons_e = NeuronGroup(N=model.N_e,
                                model=eqs_e,
                                threshold=model.V_th,
                                reset=model.reset_func,
                                refractory=model.refractory,
                                method='Euler')

        # Set the bias terms
        neurons_e.mu = rand(
            model.N_e) * (model.mu_e_max - model.mu_e_min) + model.mu_e_min
        neurons_i.mu = rand(
            model.N_i) * (model.mu_i_max - model.mu_i_min) + model.mu_i_min

        # Set initial membrane potentials
        neurons_e.V = rand(model.N_e)
        neurons_i.V = rand(model.N_i)

        # Add both groups to the `brian_list` and the `network_dict`
        brian_list.append(neurons_i)
        brian_list.append(neurons_e)
        network_dict['neurons_e'] = neurons_e
        network_dict['neurons_i'] = neurons_i
def NeuronGroupLIF_HP(N, V0, TAU_V, RATE_AVG, T_MIN, T_MAX, ETA_T):
    nsp = {'V0': V0, 'TAU_V': TAU_V, 'RATE_AVG': RATE_AVG, 'T_MIN': T_MIN, 'T_MAX': T_MAX, 'ETA_T': ETA_T}
    # Membrane potential V leaks over time and is reinforced by input
    # Neural spike rate R counts spikes of this neuron within a decaying time window
    eqs_dyn = '''
    dv/dt = (V0 - v) / TAU_V : volt
    dT/dt = -ETA_T * (T - T_MIN) * (T_MAX - T) * RATE_AVG : volt
    '''

    # On spike, the threshold is increased
    eqs_reset = '''
    v = V0
    T += ETA_T * (T - T_MIN) * (T_MAX - T)
    '''

    return NeuronGroup(N, eqs_dyn, threshold='v > T', reset=eqs_reset, namespace=nsp)
Ejemplo n.º 44
0
def test_spikemonitor():
    """
    Test collector function for SpikeMonitor
    """

    # example 1
    grp = NeuronGroup(5,
                      '''dv/dt = (v0 - v)/tau :volt''',
                      method='exact',
                      threshold='v > v_th',
                      reset='v = v0',
                      name="My_Neurons")
    tau = 10 * ms
    v0 = -70 * mV
    v_th = 800 * mV
    mon = SpikeMonitor(grp, 'v', record=[0, 4])
    mon_dict = collect_SpikeMonitor(mon)

    assert mon_dict['source'] == 'My_Neurons'
    assert mon_dict['variables'].sort() == ['i', 't', 'v'].sort()
    assert mon_dict['record'] == [0, 4]
    assert mon_dict['event'] == 'spike'
    assert mon_dict['when'] == 'thresholds'
    assert mon_dict['order'] == 1

    # example 2
    pos = PoissonGroup(5, rates=100 * Hz)
    smon = SpikeMonitor(pos, record=[0, 1, 2, 3, 4])
    smon_dict = collect_SpikeMonitor(smon)

    assert smon_dict['source'] == pos.name
    assert 'i' in smon_dict['variables']

    assert smon_dict['record'] == [0, 1, 2, 3, 4]
    assert smon_dict['when'] == 'thresholds'
    assert smon_dict['order'] == 1

    # example 3
    spk = SpikeGeneratorGroup(10, [2, 6, 8], [5 * ms, 10 * ms, 15 * ms])
    spkmon = SpikeMonitor(spk, ['t', 'i'], record=0)
    smon_dict = collect_SpikeMonitor(spkmon)

    assert smon_dict['record'] == np.array([0])
    assert 't' in smon_dict['variables']
    assert smon_dict['source'] == spk.name
    assert smon_dict['when'] == 'thresholds'
    assert smon_dict['order'] == 1
Ejemplo n.º 45
0
def test_dt_restore():
    defaultclock.dt = 0.5*ms
    G = NeuronGroup(1, 'dv/dt = -v/(10*ms) : 1')
    mon = StateMonitor(G, 'v', record=True)
    net = Network(G, mon)
    net.store()

    net.run(1*ms)
    assert_equal(mon.t[:], [0, 0.5]*ms)
    defaultclock.dt = 1*ms
    net.run(2*ms)
    assert_equal(mon.t[:], [0, 0.5, 1, 2]*ms)
    net.restore()
    assert_equal(mon.t[:], [])
    net.run(1*ms)
    assert defaultclock.dt == 0.5*ms
    assert_equal(mon.t[:], [0, 0.5]*ms)
Ejemplo n.º 46
0
    def _build_model(self, traj, brian_list, network_dict):
        """Builds the neuron groups from `traj`.

        Adds the neuron groups to `brian_list` and `network_dict`.

        """

        model = traj.parameters.model

        # Create the equations for both models
        eqs_dict = self._build_model_eqs(traj)

        # Create inhibitory neurons
        eqs_i = eqs_dict['i']
        neurons_i = NeuronGroup(N=model.N_i,
                              model = eqs_i,
                              threshold=model.V_th,
                              reset=model.reset_func,
                              refractory=model.refractory,
                              method='Euler')

        # Create excitatory neurons
        eqs_e = eqs_dict['e']
        neurons_e = NeuronGroup(N=model.N_e,
                              model = eqs_e,
                              threshold=model.V_th,
                              reset=model.reset_func,
                              refractory=model.refractory,
                              method='Euler')


        # Set the bias terms
        neurons_e.mu =rand(model.N_e) * (model.mu_e_max - model.mu_e_min) + model.mu_e_min
        neurons_i.mu =rand(model.N_i) * (model.mu_i_max - model.mu_i_min) + model.mu_i_min

        # Set initial membrane potentials
        neurons_e.V = rand(model.N_e)
        neurons_i.V = rand(model.N_i)

        # Add both groups to the `brian_list` and the `network_dict`
        brian_list.append(neurons_i)
        brian_list.append(neurons_e)
        network_dict['neurons_e']=neurons_e
        network_dict['neurons_i']=neurons_i
Ejemplo n.º 47
0
def run_cpp_standalone(params, network_objs):
    import os
    from numpy.fft import rfft, irfft
    from brian2.devices.device import CurrentDeviceProxy
    from brian2.units import Unit
    from brian2 import check_units, implementation, device, prefs, NeuronGroup, Network

    tempdir = os.path.join(params["program_dir"], "cpp_standalone")
    tempdir = os.path.join(tempdir, "c_" + str(params["sigma_c"]) + \
                           "_s_" + str(params["sigma_s"]))
    if not os.path.exists(tempdir):
        os.makedirs(tempdir)


    prefs.codegen.cpp.libraries += ['mkl_gf_lp64', # -Wl,--start-group
                                    'mkl_gnu_thread',
                                    'mkl_core', #  -Wl,--end-group
                                    'iomp5']


    # give extra arguments and path information to the compiler
    extra_incs = ['-I'+os.path.expanduser(s) for s in [ tempdir, "~/intel/mkl/include"]]
    prefs.codegen.cpp.extra_compile_args_gcc = ['-w', '-Ofast', '-march=native'] + extra_incs

    # give extra arguments and path information to the linker
    prefs.codegen.cpp.extra_link_args += ['-L{0}/intel/mkl/lib/intel64'.format(os.path.expanduser('~')),
                                          '-L{0}/intel/lib/intel64'.format(os.path.expanduser('~')),
                                          '-m64', '-Wl,--no-as-needed']

    # Path that the compiled and linked code needs at runtime
    os.environ["LD_LIBRARY_PATH"] = os.path.expanduser('~/intel/mkl/lib/intel64:')
    os.environ["LD_LIBRARY_PATH"] += os.path.expanduser('~/intel/lib/intel64:')

    # Variable definitions
    N = params["NI"] # this is the amount of neurons with variable synaptic strength
    Noffset = params["NE"]
    neurons = network_objs["neurons"]
    params["rho0_dt"] = params["rho_0"]/second * params["rate_interval"]
    mkl_threads = 1


    # Includes the header files in all generated files
    prefs.codegen.cpp.headers += ['<sense.h>',]
    prefs.codegen.cpp.define_macros += [('N_REAL', int(N)),
                                        ('N_CMPLX', int(N/2+1))]
    path_to_sense_hpp = os.path.join(tempdir, 'sense.h')
    path_to_sense_cpp = os.path.join(tempdir, 'sense.cpp')
    with open(path_to_sense_hpp, "w") as f:
        header_code = '''
        #ifndef SENSE_H
        #define SENSE_H
        #include <mkl_service.h>
        #include <mkl_vml.h>
        #include <mkl_dfti.h>
        #include <cstring>
        extern DFTI_DESCRIPTOR_HANDLE hand;
        extern MKL_Complex16 in_cmplx[N_CMPLX], out_cmplx[N_CMPLX], k_cmplx[N_CMPLX];
        DFTI_DESCRIPTOR_HANDLE init_dfti();
        #endif'''
        f.write(header_code)
        #MKL_Complex16 is a type (probably struct)
    with open(path_to_sense_cpp, "w") as f:
        sense_code = '''
        #include <sense.h>
        DFTI_DESCRIPTOR_HANDLE hand;
        MKL_Complex16 in_cmplx[N_CMPLX], out_cmplx[N_CMPLX], k_cmplx[N_CMPLX];
        DFTI_DESCRIPTOR_HANDLE init_dfti()
        {{
            DFTI_DESCRIPTOR_HANDLE hand = 0;
            mkl_set_num_threads({mkl_threads});
            DftiCreateDescriptor(&hand, DFTI_DOUBLE, DFTI_REAL, 1, (MKL_LONG)N_REAL); //MKL_LONG status
            DftiSetValue(hand, DFTI_PLACEMENT, DFTI_NOT_INPLACE);
            DftiSetValue(hand, DFTI_CONJUGATE_EVEN_STORAGE, DFTI_COMPLEX_COMPLEX);
            DftiSetValue(hand, DFTI_BACKWARD_SCALE, 1. / N_REAL);
            //if (0 == status) status = DftiSetValue(hand, DFTI_THREAD_LIMIT, {mkl_threads});
            DftiCommitDescriptor(hand); //if (0 != status) cout << "ERROR, status = " << status << "\\n";
            return hand;
        }} '''.format(mkl_threads=mkl_threads, )
        f.write(sense_code)

    # device_get_array_name will be the function get_array_name() and what it does is getting
    # the string names of brian objects
    device_get_array_name = CurrentDeviceProxy.__getattr__(device, 'get_array_name')
    # instert_code is a function which is used to insert code into the main()
    # function
    insert_code = CurrentDeviceProxy.__getattr__(device, 'insert_code')

    ### Computing the kernel (Owen changed it to a gaussian kernel now)
    # Owen uses a trick here which is he creates a NeuronGroup which doesn't
    # really do anything in the Simulation. It's just a dummy NeuronGroup
    # to hold an array to which he would like to have access to during runtime.
    if params["sigma_s"] == np.infty:
        k = np.ones(N)/N
    elif params["sigma_s"] < 1e-3:
        k = np.zeros(N)
        k[0] = 1
    else:
        intercell = params["x_NI"]
        length = intercell*N
        d = np.linspace(intercell-length/2, length/2, N)
        d = np.roll(d, int(N/2+1))
        k = np.exp(-np.abs(d)/params["sigma_s"])
        k /= k.sum()
    rate_vars =  '''k : 1
                    r_hat : 1
                    r_hat_single : 1'''
    kg = NeuronGroup(N, model=rate_vars, name='kernel_rates')
    kg.active = False
    kg.k = k #kernel in the spatial domain
    network_objs["dummygroup"] = kg



    main_code = '''
    hand = init_dfti();
    DftiComputeForward(hand, brian::{k}, k_cmplx);
    '''.format(k=device_get_array_name(kg.variables['k']))
    insert_code('main', main_code) # DftiComp.. writes its result into k_cmplx
    K = rfft(k)

    # Variable A is a spike counter
    # memset resets the array to zero (memset is defined to take voidpointers)
    # also the star before *brian tells it to not compute the size of the
    # pointer, but what the pointer points to
    # the _num_ thing is that whenever there's an array in brian,
    # it automatically creates an integer of the same name with _num_
    # in front of it (and that is the size)
    custom_code = '''
    double spatial_filter(int)
    {{
        DftiComputeForward(hand, brian::{A}+{Noffset}, in_cmplx);
        vzMul(N_CMPLX, in_cmplx, k_cmplx, out_cmplx);
        DftiComputeBackward(hand, out_cmplx, brian::{r_hat});
        memset(brian::{A}, 0, brian::_num_{A}*sizeof(*brian::{A}));
        return 0;
    }}
    '''.format(A=device_get_array_name(neurons.variables['A']),
               r_hat=device_get_array_name(kg.variables['r_hat']),
               Noffset=Noffset)
    @implementation('cpp', custom_code)
    @check_units(_=Unit(1), result=Unit(1), discard_units=True)
    def spatial_filter(_):
        kg.r_hat = irfft(K * rfft(neurons.A), N).real
        neurons.A = 0
        return 0
    network_objs["neurons"].run_regularly('dummy = spatial_filter()',
                          dt=params["rate_interval"], order=1,
                          name='filterspatial')
    params["spatial_filter"] = spatial_filter

    custom_code = '''
    double update_weights(double w, int32_t i_pre)
    {{
        w += {eta}*(brian::{r_hat}[i_pre] - {rho0_dt});
        return std::max({wmin}, std::min(w, {wmax}));
    }}
    '''.format(r_hat=device_get_array_name(kg.variables['r_hat']),
               eta=params["eta"], rho0_dt = params["rho0_dt"],
               wmin=params["wmin"], wmax=params["wmax"])


    @implementation('cpp', custom_code)
    @check_units(w=Unit(1), i_pre=Unit(1), result=Unit(1), discard_units=True)
    def update_weights(w, i_pre):
        del_W = params["eta"]*(kg.r_hat - rho0_dt)
        w += del_W[i_pre]
        np.clip(w, params["wmin"], params["wmax"], out=w)
        return w
    network_objs["con_ei"].run_regularly('w = update_weights(w, i)',
                                         dt=params["rate_interval"],
                                         when='end', name='weightupdate')
    # i is the presynaptic index (brian
    # knows this automatically, j would be postsynaptic)
    params["update_weights"] = update_weights



    # delete the Monitors from the network objects beacuse we don't want to
    # save these values (for long preparation time it would take too much
    # space in memory).
    monitors = network_objs["monitors"]
    network_objs.pop("monitors")
    net = Network(list(set(network_objs.values())))

    if not params["do_run"]:
        print("Running the network was not desired")
        return

    if params["prep_time"]/second > 0:
        print("Prep time run was desired, adding prep time simulation for " \
              + str(params["prep_time"]/second) + " seconds.")
        net.run(params["prep_time"], report='text', namespace = params)

    # Add the Monitors only now so we don't record unnecessarily much.
    network_objs.update(monitors)
    net.add(list(set(monitors.values())))

    print("Adding recorded simulation time " + str(params["simtime"]/second)
          + " seconds")
    net.run(params["simtime"], report='text', namespace = params)
    additional_source_files = [path_to_sense_cpp,]
    build = CurrentDeviceProxy.__getattr__(device, 'build')
    build(directory=tempdir, compile=True, run=True, debug=False,
          additional_source_files=additional_source_files)
def main(): # pragma: no cover
  from brian2 import start_scope,mvolt,ms,NeuronGroup,StateMonitor,run
  import matplotlib.pyplot as plt
  import neo
  import quantities as pq

  start_scope()
  
  # Izhikevich neuron parameters.  
  a = 0.02/ms
  b = 0.2/ms
  c = -65*mvolt
  d = 6*mvolt/ms
  I = 4*mvolt/ms
  
  # Standard Izhikevich neuron equations.  
  eqs = '''
  dv/dt = 0.04*v**2/(ms*mvolt) + (5/ms)*v + 140*mvolt/ms - u + I : volt
  du/dt = a*((b*v) - u) : volt/second
  '''
  
  reset = '''
  v = c
  u += d
  '''
  
  # Setup and run simulation.  
  G = NeuronGroup(1, eqs, threshold='v>30*mvolt', reset='v = -70*mvolt')
  G.v = -65*mvolt
  G.u = b*G.v
  M = StateMonitor(G, 'v', record=True)
  run(300*ms)
  
  # Store results in neo format.  
  vm = neo.core.AnalogSignal(M.v[0], units=pq.V, sampling_period=0.1*pq.ms)
  
  # Plot results.  
  plt.figure()
  plt.plot(vm.times*1000,vm*1000) # Plot mV and ms instead of V and s.  
  plt.xlabel('Time (ms)')
  plt.ylabel('mv')
  
  # Save results.  
  iom = neo.io.PyNNNumpyIO('spike_extraction_test_data')
  block = neo.core.Block()
  segment = neo.core.Segment()
  segment.analogsignals.append(vm)
  block.segments.append(segment)
  iom.write(block)
  
  # Load results.  
  iom2 = neo.io.PyNNNumpyIO('spike_extraction_test_data.npz')
  data = iom2.read()
  vm = data[0].segments[0].analogsignals[0]
  
  # Plot results. 
  # The two figures should match.   
  plt.figure()
  plt.plot(vm.times*1000,vm*1000) # Plot mV and ms instead of V and s.  
  plt.xlabel('Time (ms)')
  plt.ylabel('mv')
def simulate_brunel_network(
        N_Excit=5000,
        N_Inhib=None,
        N_extern=N_POISSON_INPUT,
        connection_probability=CONNECTION_PROBABILITY_EPSILON,
        w0=SYNAPTIC_WEIGHT_W0,
        g=RELATIVE_INHIBITORY_STRENGTH_G,
        synaptic_delay=SYNAPTIC_DELAY,
        poisson_input_rate=POISSON_INPUT_RATE,
        w_external=None,
        v_rest=V_REST,
        v_reset=V_RESET,
        firing_threshold=FIRING_THRESHOLD,
        membrane_time_scale=MEMBRANE_TIME_SCALE,
        abs_refractory_period=ABSOLUTE_REFRACTORY_PERIOD,
        monitored_subset_size=100,
        random_vm_init=False,
        sim_time=100.*b2.ms):
    """
    Fully parametrized implementation of a sparsely connected network of LIF neurons (Brunel 2000)

    Args:
        N_Excit (int): Size of the excitatory popluation
        N_Inhib (int): optional. Size of the inhibitory population.
            If not set (=None), N_Inhib is set to N_excit/4.
        N_extern (int): optional. Number of presynaptic excitatory poisson neurons. Note: if set to a value,
            this number does NOT depend on N_Excit and NOT depend on connection_probability (this is different
            from the book and paper. Only if N_extern is set to 'None', then N_extern is computed as
            N_Excit*connection_probability.
        connection_probability (float): probability to connect to any of the (N_Excit+N_Inhib) neurons
            CE = connection_probability*N_Excit
            CI = connection_probability*N_Inhib
            Cexternal = N_extern
        w0 (float): Synaptic strength J
        g (float): relative importance of inhibition. J_exc = w0. J_inhib = -g*w0
        synaptic_delay (Quantity): Delay between presynaptic spike and postsynaptic increase of v_m
        poisson_input_rate (Quantity): Poisson rate of the external population
        w_external (float): optional. Synaptic weight of the excitatory external poisson neurons onto all
            neurons in the network. Default is None, in that case w_external is set to w0, which is the
            standard value in the book and in the paper Brunel2000.
            The purpose of this parameter is to see the effect of external input in the
            absence of network feedback(setting w0 to 0mV and w_external>0).
        v_rest (Quantity): Resting potential
        v_reset (Quantity): Reset potential
        firing_threshold (Quantity): Spike threshold
        membrane_time_scale (Quantity): tau_m
        abs_refractory_period (Quantity): absolute refractory period, tau_ref
        monitored_subset_size (int): nr of neurons for which a VoltageMonitor is recording Vm
        random_vm_init (bool): if true, the membrane voltage of each neuron is initialized with a
            random value drawn from Uniform(v_rest, firing_threshold)
        sim_time (Quantity): Simulation time

    Returns:
        (rate_monitor, spike_monitor, voltage_monitor, idx_monitored_neurons)
        PopulationRateMonitor: Rate Monitor
        SpikeMonitor: SpikeMonitor for ALL (N_Excit+N_Inhib) neurons
        StateMonitor: membrane voltage for a selected subset of neurons
        list: index of monitored neurons. length = monitored_subset_size
    """
    if N_Inhib is None:
        N_Inhib = int(N_Excit/4)
    if N_extern is None:
        N_extern = int(N_Excit*connection_probability)
    if w_external is None:
        w_external = w0

    J_excit = w0
    J_inhib = -g*w0

    lif_dynamics = """
    dv/dt = -(v-v_rest) / membrane_time_scale : volt (unless refractory)"""

    network = NeuronGroup(
        N_Excit+N_Inhib, model=lif_dynamics,
        threshold="v>firing_threshold", reset="v=v_reset", refractory=abs_refractory_period,
        method="linear")
    if random_vm_init:
        network.v = random.uniform(v_rest/b2.mV, high=firing_threshold/b2.mV, size=(N_Excit+N_Inhib))*b2.mV
    else:
        network.v = v_rest
    excitatory_population = network[:N_Excit]
    inhibitory_population = network[N_Excit:]

    exc_synapses = Synapses(excitatory_population, target=network, on_pre="v += J_excit", delay=synaptic_delay)
    exc_synapses.connect(p=connection_probability)

    inhib_synapses = Synapses(inhibitory_population, target=network, on_pre="v += J_inhib", delay=synaptic_delay)
    inhib_synapses.connect(p=connection_probability)

    external_poisson_input = PoissonInput(target=network, target_var="v", N=N_extern,
                                          rate=poisson_input_rate, weight=w_external)

    # collect data of a subset of neurons:
    monitored_subset_size = min(monitored_subset_size, (N_Excit+N_Inhib))
    idx_monitored_neurons = sample(range(N_Excit+N_Inhib), monitored_subset_size)
    rate_monitor = PopulationRateMonitor(network)
    # record= some_list is not supported? :-(
    spike_monitor = SpikeMonitor(network, record=idx_monitored_neurons)
    voltage_monitor = StateMonitor(network, "v", record=idx_monitored_neurons)

    b2.run(sim_time)
    return rate_monitor, spike_monitor, voltage_monitor, idx_monitored_neurons
def simulate_wm(
        N_excitatory=1024, N_inhibitory=256,
        N_extern_poisson=1000, poisson_firing_rate=1.4 * b2.Hz, weight_scaling_factor=2.,
        sigma_weight_profile=20., Jpos_excit2excit=1.6,
        stimulus_center_deg=180, stimulus_width_deg=40, stimulus_strength=0.07 * b2.namp,
        t_stimulus_start=0 * b2.ms, t_stimulus_duration=0 * b2.ms,
        distractor_center_deg=90, distractor_width_deg=40, distractor_strength=0.0 * b2.namp,
        t_distractor_start=0 * b2.ms, t_distractor_duration=0 * b2.ms,
        G_inhib2inhib=.35 * 1.024 * b2.nS,
        G_inhib2excit=.35 * 1.336 * b2.nS,
        G_excit2excit=.35 * 0.381 * b2.nS,
        G_excit2inhib=.35 * 1.2 * 0.292 * b2.nS,
        monitored_subset_size=1024, sim_time=800. * b2.ms):
    """
    Args:
        N_excitatory (int): Size of the excitatory population
        N_inhibitory (int): Size of the inhibitory population
        weight_scaling_factor (float): weight prefactor. When increasing the size of the populations,
            the synaptic weights have to be decreased. Using the default values, we have
            N_excitatory*weight_scaling_factor = 2048 and N_inhibitory*weight_scaling_factor=512
        N_extern_poisson (int): Size of the external input population (Poisson input)
        poisson_firing_rate (Quantity): Firing rate of the external population
        sigma_weight_profile (float): standard deviation of the gaussian input profile in
            the excitatory population.
        Jpos_excit2excit (float): Strength of the recurrent input within the excitatory population.
            Jneg_excit2excit is computed from sigma_weight_profile, Jpos_excit2excit and the normalization
            condition.
        stimulus_center_deg (float): Center of the stimulus in [0, 360]
        stimulus_width_deg (float): width of the stimulus. All neurons in
            stimulus_center_deg +\- (stimulus_width_deg/2) receive the same input current
        stimulus_strength (Quantity): Input current to the neurons at stimulus_center_deg +\- (stimulus_width_deg/2)
        t_stimulus_start (Quantity): time when the input stimulus is turned on
        t_stimulus_duration (Quantity): duration of the stimulus.
        distractor_center_deg (float): Center of the distractor in [0, 360]
        distractor_width_deg (float): width of the distractor. All neurons in
            distractor_center_deg +\- (distractor_width_deg/2) receive the same input current
            distractor_strength (Quantity): Input current to the neurons at
            distractor_center_deg +\- (distractor_width_deg/2)
        t_distractor_start (Quantity): time when the distractor is turned on
        t_distractor_duration (Quantity): duration of the distractor.
        G_inhib2inhib (Quantity): projections from inhibitory to inhibitory population (later
            rescaled by weight_scaling_factor)
        G_inhib2excit (Quantity): projections from inhibitory to excitatory population (later
            rescaled by weight_scaling_factor)
        G_excit2excit (Quantity): projections from excitatory to excitatory population (later
            rescaled by weight_scaling_factor)
        G_excit2inhib (Quantity): projections from excitatory to inhibitory population (later
            rescaled by weight_scaling_factor)
        monitored_subset_size (int): nr of neurons for which a Spike- and Voltage monitor
            is registered.
        sim_time (Quantity): simulation time

    Returns:

       results (tuple):
       rate_monitor_excit (Brian2 PopulationRateMonitor for the excitatory population),
        spike_monitor_excit, voltage_monitor_excit, idx_monitored_neurons_excit,\
        rate_monitor_inhib, spike_monitor_inhib, voltage_monitor_inhib, idx_monitored_neurons_inhib,\
        weight_profile_45 (The weights profile for the neuron with preferred direction = 45deg).
    """
    # specify the excitatory pyramidal cells:
    Cm_excit = 0.5 * b2.nF  # membrane capacitance of excitatory neurons
    G_leak_excit = 25.0 * b2.nS  # leak conductance
    E_leak_excit = -70.0 * b2.mV  # reversal potential
    v_firing_threshold_excit = -50.0 * b2.mV  # spike condition
    v_reset_excit = -60.0 * b2.mV  # reset voltage after spike
    t_abs_refract_excit = 2.0 * b2.ms  # absolute refractory period

    # specify the weight profile in the recurrent population
    # std-dev of the gaussian weight profile around the prefered direction
    # sigma_weight_profile = 12.0  # std-dev of the gaussian weight profile around the prefered direction

    #
    # Jneg_excit2excit = 0

    # specify the inhibitory interneurons:
    Cm_inhib = 0.2 * b2.nF
    G_leak_inhib = 20.0 * b2.nS
    E_leak_inhib = -70.0 * b2.mV
    v_firing_threshold_inhib = -50.0 * b2.mV
    v_reset_inhib = -60.0 * b2.mV
    t_abs_refract_inhib = 1.0 * b2.ms

    # specify the AMPA synapses
    E_AMPA = 0.0 * b2.mV
    tau_AMPA = .9 * 2.0 * b2.ms

    # specify the GABA synapses
    E_GABA = -70.0 * b2.mV
    tau_GABA = 10.0 * b2.ms

    # specify the NMDA synapses
    E_NMDA = 0.0 * b2.mV
    tau_NMDA_s = .65 * 100.0 * b2.ms  # orig: 100
    tau_NMDA_x = .94 * 2.0 * b2.ms
    alpha_NMDA = 0.5 * b2.kHz

    # projections from the external population
    G_extern2inhib = 2.38 * b2.nS
    G_extern2excit = 3.1 * b2.nS

    # projectsions from the inhibitory populations
    G_inhib2inhib *= weight_scaling_factor
    G_inhib2excit *= weight_scaling_factor

    # projections from the excitatory population
    G_excit2excit *= weight_scaling_factor
    G_excit2inhib *= weight_scaling_factor  # todo: verify this scaling

    t_stimulus_end = t_stimulus_start + t_stimulus_duration
    t_distractor_end = t_distractor_start + t_distractor_duration
    # compute the simulus index
    stim_center_idx = int(round(N_excitatory / 360. * stimulus_center_deg))
    stim_width_idx = int(round(N_excitatory / 360. * stimulus_width_deg / 2))
    stim_target_idx = [idx % N_excitatory
                       for idx in range(stim_center_idx - stim_width_idx, stim_center_idx + stim_width_idx + 1)]
    # compute the distractor index
    distr_center_idx = int(round(N_excitatory / 360. * distractor_center_deg))
    distr_width_idx = int(round(N_excitatory / 360. * distractor_width_deg / 2))
    distr_target_idx = [idx % N_excitatory for idx in range(distr_center_idx - distr_width_idx,
                                                            distr_center_idx + distr_width_idx + 1)]

    # precompute the weight profile for the recurrent population
    tmp = math.sqrt(2. * math.pi) * sigma_weight_profile * erf(180. / math.sqrt(2.) / sigma_weight_profile) / 360.
    Jneg_excit2excit = (1. - Jpos_excit2excit * tmp) / (1. - tmp)
    presyn_weight_kernel = \
        [(Jneg_excit2excit +
          (Jpos_excit2excit - Jneg_excit2excit) *
          math.exp(-.5 * (360. * min(j, N_excitatory - j) / N_excitatory) ** 2 / sigma_weight_profile ** 2))
         for j in range(N_excitatory)]
    # validate the normalization condition: (360./N_excitatory)*sum(presyn_weight_kernel)/360.
    fft_presyn_weight_kernel = rfft(presyn_weight_kernel)
    weight_profile_45 = deque(presyn_weight_kernel)
    rot_dist = int(round(len(weight_profile_45) / 8))
    weight_profile_45.rotate(rot_dist)

    # define the inhibitory population
    inhib_lif_dynamics = """
        s_NMDA_total : 1  # the post synaptic sum of s. compare with s_NMDA_presyn
        dv/dt = (
        - G_leak_inhib * (v-E_leak_inhib)
        - G_extern2inhib * s_AMPA * (v-E_AMPA)
        - G_inhib2inhib * s_GABA * (v-E_GABA)
        - G_excit2inhib * s_NMDA_total * (v-E_NMDA)/(1.0+1.0*exp(-0.062*v/volt)/3.57)
        )/Cm_inhib : volt (unless refractory)
        ds_AMPA/dt = -s_AMPA/tau_AMPA : 1
        ds_GABA/dt = -s_GABA/tau_GABA : 1
    """

    inhib_pop = NeuronGroup(
        N_inhibitory, model=inhib_lif_dynamics,
        threshold="v>v_firing_threshold_inhib", reset="v=v_reset_inhib", refractory=t_abs_refract_inhib,
        method="rk2")
    # initialize with random voltages:
    inhib_pop.v = numpy.random.uniform(v_reset_inhib / b2.mV, high=v_firing_threshold_inhib / b2.mV,
                                       size=N_inhibitory) * b2.mV
    # set the connections: inhib2inhib
    syn_inhib2inhib = Synapses(inhib_pop, target=inhib_pop, on_pre="s_GABA += 1.0", delay=0.0 * b2.ms)
    syn_inhib2inhib.connect(condition="i!=j", p=1.0)
    # set the connections: extern2inhib
    input_ext2inhib = PoissonInput(target=inhib_pop, target_var="s_AMPA",
                                   N=N_extern_poisson, rate=poisson_firing_rate, weight=1.0)

    # specify the excitatory population:
    excit_lif_dynamics = """
        I_stim : amp
        s_NMDA_total : 1  # the post synaptic sum of s. compare with s_NMDA_presyn
        dv/dt = (
        - G_leak_excit * (v-E_leak_excit)
        - G_extern2excit * s_AMPA * (v-E_AMPA)
        - G_inhib2excit * s_GABA * (v-E_GABA)
        - G_excit2excit * s_NMDA_total * (v-E_NMDA)/(1.0+1.0*exp(-0.062*v/volt)/3.57)
        + I_stim
        )/Cm_excit : volt (unless refractory)
        ds_AMPA/dt = -s_AMPA/tau_AMPA : 1
        ds_GABA/dt = -s_GABA/tau_GABA : 1
        ds_NMDA/dt = -s_NMDA/tau_NMDA_s + alpha_NMDA * x * (1-s_NMDA) : 1
        dx/dt = -x/tau_NMDA_x : 1
    """

    excit_pop = NeuronGroup(N_excitatory, model=excit_lif_dynamics,
                            threshold="v>v_firing_threshold_excit", reset="v=v_reset_excit; x+=1.0",
                            refractory=t_abs_refract_excit, method="rk2")
    # initialize with random voltages:
    excit_pop.v = numpy.random.uniform(v_reset_excit / b2.mV, high=v_firing_threshold_excit / b2.mV,
                                       size=N_excitatory) * b2.mV
    excit_pop.I_stim = 0. * b2.namp
    # set the connections: extern2excit
    input_ext2excit = PoissonInput(target=excit_pop, target_var="s_AMPA",
                                   N=N_extern_poisson, rate=poisson_firing_rate, weight=1.0)

    # set the connections: inhibitory to excitatory
    syn_inhib2excit = Synapses(inhib_pop, target=excit_pop, on_pre="s_GABA += 1.0")
    syn_inhib2excit.connect(p=1.0)

    # set the connections: excitatory to inhibitory NMDA connections
    syn_excit2inhib = Synapses(excit_pop, inhib_pop,
                               model="s_NMDA_total_post = s_NMDA_pre : 1 (summed)", method="rk2")
    syn_excit2inhib.connect(p=1.0)

    # # set the connections: UNSTRUCTURED excitatory to excitatory
    # syn_excit2excit = Synapses(excit_pop, excit_pop,
    #        model= "s_NMDA_total_post = s_NMDA_pre : 1 (summed)", method="rk2")
    # syn_excit2excit.connect(condition="i!=j", p=1.)

    # set the STRUCTURED recurrent input. use a network_operation
    @network_operation()
    def update_nmda_sum():
        fft_s_NMDA = rfft(excit_pop.s_NMDA)
        fft_s_NMDA_total = numpy.multiply(fft_presyn_weight_kernel, fft_s_NMDA)
        s_NMDA_tot = irfft(fft_s_NMDA_total)
        excit_pop.s_NMDA_total_ = s_NMDA_tot

    @network_operation(dt=1 * b2.ms)
    def stimulate_network(t):
        if t >= t_stimulus_start and t < t_stimulus_end:
            # excit_pop[stim_start_i - 15:stim_start_i + 15].I_stim = 0.25 * b2.namp
            # Todo: review indexing
            # print("stim on")
            excit_pop.I_stim[stim_target_idx] = stimulus_strength
        else:
            # print("stim off")
            excit_pop.I_stim = 0. * b2.namp
        # add distractor
        if t >= t_distractor_start and t < t_distractor_end:
            excit_pop.I_stim[distr_target_idx] = distractor_strength

    def get_monitors(pop, nr_monitored, N):
        nr_monitored = min(nr_monitored, (N))
        idx_monitored_neurons = \
            [int(math.ceil(k))
             for k in numpy.linspace(0, N - 1, nr_monitored + 2)][1:-1]  # sample(range(N), nr_monitored)
        rate_monitor = PopulationRateMonitor(pop)
        # record= some_list is not supported? :-(
        spike_monitor = SpikeMonitor(pop, record=idx_monitored_neurons)
        voltage_monitor = StateMonitor(pop, "v", record=idx_monitored_neurons)
        return rate_monitor, spike_monitor, voltage_monitor, idx_monitored_neurons

    # collect data of a subset of neurons:
    rate_monitor_inhib, spike_monitor_inhib, voltage_monitor_inhib, idx_monitored_neurons_inhib = \
        get_monitors(inhib_pop, monitored_subset_size, N_inhibitory)

    rate_monitor_excit, spike_monitor_excit, voltage_monitor_excit, idx_monitored_neurons_excit = \
        get_monitors(excit_pop, monitored_subset_size, N_excitatory)

    b2.run(sim_time)
    return \
        rate_monitor_excit, spike_monitor_excit, voltage_monitor_excit, idx_monitored_neurons_excit,\
        rate_monitor_inhib, spike_monitor_inhib, voltage_monitor_inhib, idx_monitored_neurons_inhib,\
        weight_profile_45
def sim_decision_making_network(N_Excit=384, N_Inhib=96, weight_scaling_factor=5.33,
                                t_stimulus_start=100 * b2.ms, t_stimulus_duration=9999 * b2.ms, coherence_level=0.,
                                stimulus_update_interval=30 * b2.ms, mu0_mean_stimulus_Hz=160.,
                                stimulus_std_Hz=20.,
                                N_extern=1000, firing_rate_extern=9.8 * b2.Hz,
                                w_pos=1.90, f_Subpop_size=0.25,  # .15 in publication [1]
                                max_sim_time=1000. * b2.ms, stop_condition_rate=None,
                                monitored_subset_size=512):
    """

    Args:
        N_Excit (int): total number of neurons in the excitatory population
        N_Inhib (int): nr of neurons in the inhibitory populations
        weight_scaling_factor: When increasing the number of neurons by 2, the weights should be scaled down by 1/2
        t_stimulus_start (Quantity): time when the stimulation starts
        t_stimulus_duration (Quantity): duration of the stimulation
        coherence_level (int): coherence of the stimulus.
            Difference in mean between the PoissonGroups "left" stimulus and "right" stimulus
        stimulus_update_interval (Quantity): the mean of the stimulating PoissonGroups is
            re-sampled at this interval
        mu0_mean_stimulus_Hz (float): maximum mean firing rate of the stimulus if c=+1 or c=-1. Each neuron
            in the populations "Left" and "Right" receives an independent poisson input.
        stimulus_std_Hz (float): std deviation of the stimulating PoissonGroups.
        N_extern (int): nr of neurons in the stimulus independent poisson background population
        firing_rate_extern (int): firing rate of the stimulus independent poisson background population
        w_pos (float): Scaling (strengthening) of the recurrent weights within the
            subpopulations "Left" and "Right"
        f_Subpop_size (float): fraction of the neurons in the subpopulations "Left" and "Right".
            #left = #right = int(f_Subpop_size*N_Excit).
        max_sim_time (Quantity): simulated time.
        stop_condition_rate (Quantity): An optional stopping criteria: If not None, the simulation stops if the
            firing rate of either subpopulation "Left" or "Right" is above stop_condition_rate.
        monitored_subset_size (int): max nr of neurons for which a state monitor is registered.

    Returns:

        A dictionary with the following keys (strings):
        "rate_monitor_A", "spike_monitor_A", "voltage_monitor_A", "idx_monitored_neurons_A", "rate_monitor_B",
         "spike_monitor_B", "voltage_monitor_B", "idx_monitored_neurons_B", "rate_monitor_Z", "spike_monitor_Z",
         "voltage_monitor_Z", "idx_monitored_neurons_Z", "rate_monitor_inhib", "spike_monitor_inhib",
         "voltage_monitor_inhib", "idx_monitored_neurons_inhib"

    """

    print("simulating {} neurons. Start: {}".format(N_Excit + N_Inhib, time.ctime()))
    t_stimulus_end = t_stimulus_start + t_stimulus_duration

    N_Group_A = int(N_Excit * f_Subpop_size)  # size of the excitatory subpopulation sensitive to stimulus A
    N_Group_B = N_Group_A  # size of the excitatory subpopulation sensitive to stimulus B
    N_Group_Z = N_Excit - N_Group_A - N_Group_B  # (1-2f)Ne excitatory neurons do not respond to either stimulus.

    Cm_excit = 0.5 * b2.nF  # membrane capacitance of excitatory neurons
    G_leak_excit = 25.0 * b2.nS  # leak conductance
    E_leak_excit = -70.0 * b2.mV  # reversal potential
    v_spike_thr_excit = -50.0 * b2.mV  # spike condition
    v_reset_excit = -60.0 * b2.mV  # reset voltage after spike
    t_abs_refract_excit = 2. * b2.ms  # absolute refractory period

    # specify the inhibitory interneurons:
    # N_Inhib = 200
    Cm_inhib = 0.2 * b2.nF
    G_leak_inhib = 20.0 * b2.nS
    E_leak_inhib = -70.0 * b2.mV
    v_spike_thr_inhib = -50.0 * b2.mV
    v_reset_inhib = -60.0 * b2.mV
    t_abs_refract_inhib = 1.0 * b2.ms

    # specify the AMPA synapses
    E_AMPA = 0.0 * b2.mV
    tau_AMPA = 2.5 * b2.ms

    # specify the GABA synapses
    E_GABA = -70.0 * b2.mV
    tau_GABA = 5.0 * b2.ms

    # specify the NMDA synapses
    E_NMDA = 0.0 * b2.mV
    tau_NMDA_s = 100.0 * b2.ms
    tau_NMDA_x = 2. * b2.ms
    alpha_NMDA = 0.5 * b2.kHz

    # projections from the external population
    g_AMPA_extern2inhib = 1.62 * b2.nS
    g_AMPA_extern2excit = 2.1 * b2.nS

    # projectsions from the inhibitory populations
    g_GABA_inhib2inhib = weight_scaling_factor * 1.25 * b2.nS
    g_GABA_inhib2excit = weight_scaling_factor * 1.60 * b2.nS

    # projections from the excitatory population
    g_AMPA_excit2excit = weight_scaling_factor * 0.012 * b2.nS
    g_AMPA_excit2inhib = weight_scaling_factor * 0.015 * b2.nS
    g_NMDA_excit2excit = weight_scaling_factor * 0.040 * b2.nS
    g_NMDA_excit2inhib = weight_scaling_factor * 0.045 * b2.nS  # stronger projection to inhib.

    # weights and "adjusted" weights.
    w_neg = 1. - f_Subpop_size * (w_pos - 1.) / (1. - f_Subpop_size)
    # We use the same postsyn AMPA and NMDA conductances. Adjust the weights coming from different sources:
    w_ext2inhib = g_AMPA_extern2inhib / g_AMPA_excit2inhib
    w_ext2excit = g_AMPA_extern2excit / g_AMPA_excit2excit
    # other weights are 1
    # print("w_neg={}, w_ext2inhib={}, w_ext2excit={}".format(w_neg, w_ext2inhib, w_ext2excit))

    # Define the inhibitory population
    # dynamics:
    inhib_lif_dynamics = """
        s_NMDA_total : 1  # the post synaptic sum of s. compare with s_NMDA_presyn
        dv/dt = (
        - G_leak_inhib * (v-E_leak_inhib)
        - g_AMPA_excit2inhib * s_AMPA * (v-E_AMPA)
        - g_GABA_inhib2inhib * s_GABA * (v-E_GABA)
        - g_NMDA_excit2inhib * s_NMDA_total * (v-E_NMDA)/(1.0+1.0*exp(-0.062*v/volt)/3.57)
        )/Cm_inhib : volt (unless refractory)
        ds_AMPA/dt = -s_AMPA/tau_AMPA : 1
        ds_GABA/dt = -s_GABA/tau_GABA : 1
    """

    inhib_pop = NeuronGroup(
        N_Inhib, model=inhib_lif_dynamics,
        threshold="v>v_spike_thr_inhib", reset="v=v_reset_inhib", refractory=t_abs_refract_inhib,
        method="rk2")
    # initialize with random voltages:
    inhib_pop.v = rnd.uniform(v_spike_thr_inhib / b2.mV - 4., high=v_spike_thr_inhib / b2.mV - 1., size=N_Inhib) * b2.mV

    # Specify the excitatory population:
    # dynamics:
    excit_lif_dynamics = """
        s_NMDA_total : 1  # the post synaptic sum of s. compare with s_NMDA_presyn
        dv/dt = (
        - G_leak_excit * (v-E_leak_excit)
        - g_AMPA_excit2excit * s_AMPA * (v-E_AMPA)
        - g_GABA_inhib2excit * s_GABA * (v-E_GABA)
        - g_NMDA_excit2excit * s_NMDA_total * (v-E_NMDA)/(1.0+1.0*exp(-0.062*v/volt)/3.57)
        )/Cm_excit : volt (unless refractory)
        ds_AMPA/dt = -s_AMPA/tau_AMPA : 1
        ds_GABA/dt = -s_GABA/tau_GABA : 1
        ds_NMDA/dt = -s_NMDA/tau_NMDA_s + alpha_NMDA * x * (1-s_NMDA) : 1
        dx/dt = -x/tau_NMDA_x : 1
    """

    # define the three excitatory subpopulations.
    # A: subpop receiving stimulus A
    excit_pop_A = NeuronGroup(N_Group_A, model=excit_lif_dynamics,
                              threshold="v>v_spike_thr_excit", reset="v=v_reset_excit",
                              refractory=t_abs_refract_excit, method="rk2")
    excit_pop_A.v = rnd.uniform(E_leak_excit / b2.mV, high=E_leak_excit / b2.mV + 5., size=excit_pop_A.N) * b2.mV

    # B: subpop receiving stimulus B
    excit_pop_B = NeuronGroup(N_Group_B, model=excit_lif_dynamics, threshold="v>v_spike_thr_excit",
                              reset="v=v_reset_excit", refractory=t_abs_refract_excit, method="rk2")
    excit_pop_B.v = rnd.uniform(E_leak_excit / b2.mV, high=E_leak_excit / b2.mV + 5., size=excit_pop_B.N) * b2.mV
    # Z: non-sensitive
    excit_pop_Z = NeuronGroup(N_Group_Z, model=excit_lif_dynamics,
                              threshold="v>v_spike_thr_excit", reset="v=v_reset_excit",
                              refractory=t_abs_refract_excit, method="rk2")
    excit_pop_Z.v = rnd.uniform(v_reset_excit / b2.mV, high=v_spike_thr_excit / b2.mV - 1., size=excit_pop_Z.N) * b2.mV

    # now define the connections:
    # projections FROM EXTERNAL POISSON GROUP: ####################################################
    poisson2Inhib = PoissonInput(target=inhib_pop, target_var="s_AMPA",
                                 N=N_extern, rate=firing_rate_extern, weight=w_ext2inhib)
    poisson2A = PoissonInput(target=excit_pop_A, target_var="s_AMPA",
                             N=N_extern, rate=firing_rate_extern, weight=w_ext2excit)

    poisson2B = PoissonInput(target=excit_pop_B, target_var="s_AMPA",
                             N=N_extern, rate=firing_rate_extern, weight=w_ext2excit)
    poisson2Z = PoissonInput(target=excit_pop_Z, target_var="s_AMPA",
                             N=N_extern, rate=firing_rate_extern, weight=w_ext2excit)

    ###############################################################################################

    # GABA projections FROM INHIBITORY population: ################################################
    syn_inhib2inhib = Synapses(inhib_pop, target=inhib_pop, on_pre="s_GABA += 1.0", delay=0.5 * b2.ms)
    syn_inhib2inhib.connect(p=1.)
    syn_inhib2A = Synapses(inhib_pop, target=excit_pop_A, on_pre="s_GABA += 1.0", delay=0.5 * b2.ms)
    syn_inhib2A.connect(p=1.)
    syn_inhib2B = Synapses(inhib_pop, target=excit_pop_B, on_pre="s_GABA += 1.0", delay=0.5 * b2.ms)
    syn_inhib2B.connect(p=1.)
    syn_inhib2Z = Synapses(inhib_pop, target=excit_pop_Z, on_pre="s_GABA += 1.0", delay=0.5 * b2.ms)
    syn_inhib2Z.connect(p=1.)
    ###############################################################################################

    # AMPA projections FROM EXCITATORY A: #########################################################
    syn_AMPA_A2A = Synapses(excit_pop_A, target=excit_pop_A, on_pre="s_AMPA += w_pos", delay=0.5 * b2.ms)
    syn_AMPA_A2A.connect(p=1.)
    syn_AMPA_A2B = Synapses(excit_pop_A, target=excit_pop_B, on_pre="s_AMPA += w_neg", delay=0.5 * b2.ms)
    syn_AMPA_A2B.connect(p=1.)
    syn_AMPA_A2Z = Synapses(excit_pop_A, target=excit_pop_Z, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_A2Z.connect(p=1.)
    syn_AMPA_A2inhib = Synapses(excit_pop_A, target=inhib_pop, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_A2inhib.connect(p=1.)
    ###############################################################################################

    # AMPA projections FROM EXCITATORY B: #########################################################
    syn_AMPA_B2A = Synapses(excit_pop_B, target=excit_pop_A, on_pre="s_AMPA += w_neg", delay=0.5 * b2.ms)
    syn_AMPA_B2A.connect(p=1.)
    syn_AMPA_B2B = Synapses(excit_pop_B, target=excit_pop_B, on_pre="s_AMPA += w_pos", delay=0.5 * b2.ms)
    syn_AMPA_B2B.connect(p=1.)
    syn_AMPA_B2Z = Synapses(excit_pop_B, target=excit_pop_Z, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_B2Z.connect(p=1.)
    syn_AMPA_B2inhib = Synapses(excit_pop_B, target=inhib_pop, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_B2inhib.connect(p=1.)
    ###############################################################################################

    # AMPA projections FROM EXCITATORY Z: #########################################################
    syn_AMPA_Z2A = Synapses(excit_pop_Z, target=excit_pop_A, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_Z2A.connect(p=1.)
    syn_AMPA_Z2B = Synapses(excit_pop_Z, target=excit_pop_B, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_Z2B.connect(p=1.)
    syn_AMPA_Z2Z = Synapses(excit_pop_Z, target=excit_pop_Z, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_Z2Z.connect(p=1.)
    syn_AMPA_Z2inhib = Synapses(excit_pop_Z, target=inhib_pop, on_pre="s_AMPA += 1.0", delay=0.5 * b2.ms)
    syn_AMPA_Z2inhib.connect(p=1.)
    ###############################################################################################

    # NMDA projections FROM EXCITATORY to INHIB, A,B,Z
    @network_operation()
    def update_nmda_sum():
        sum_sNMDA_A = sum(excit_pop_A.s_NMDA)
        sum_sNMDA_B = sum(excit_pop_B.s_NMDA)
        sum_sNMDA_Z = sum(excit_pop_Z.s_NMDA)
        # note the _ at the end of s_NMDA_total_ disables unit checking
        inhib_pop.s_NMDA_total_ = (1.0 * sum_sNMDA_A + 1.0 * sum_sNMDA_B + 1.0 * sum_sNMDA_Z)
        excit_pop_A.s_NMDA_total_ = (w_pos * sum_sNMDA_A + w_neg * sum_sNMDA_B + w_neg * sum_sNMDA_Z)
        excit_pop_B.s_NMDA_total_ = (w_neg * sum_sNMDA_A + w_pos * sum_sNMDA_B + w_neg * sum_sNMDA_Z)
        excit_pop_Z.s_NMDA_total_ = (1.0 * sum_sNMDA_A + 1.0 * sum_sNMDA_B + 1.0 * sum_sNMDA_Z)

    # set a self-recurrent synapse to introduce a delay when updating the intermediate
    # gating variable x
    syn_x_A2A = Synapses(excit_pop_A, excit_pop_A, on_pre="x += 1.", delay=0.5 * b2.ms)
    syn_x_A2A.connect(j="i")
    syn_x_B2B = Synapses(excit_pop_B, excit_pop_B, on_pre="x += 1.", delay=0.5 * b2.ms)
    syn_x_B2B.connect(j="i")
    syn_x_Z2Z = Synapses(excit_pop_Z, excit_pop_Z, on_pre="x += 1.", delay=0.5 * b2.ms)
    syn_x_Z2Z.connect(j="i")
    ###############################################################################################

    # Define the stimulus: two PoissonInput with time time-dependent mean.
    poissonStimulus2A = PoissonGroup(N_Group_A, 0. * b2.Hz)
    syn_Stim2A = Synapses(poissonStimulus2A, excit_pop_A, on_pre="s_AMPA+=w_ext2excit")
    syn_Stim2A.connect(j="i")
    poissonStimulus2B = PoissonGroup(N_Group_B, 0. * b2.Hz)
    syn_Stim2B = Synapses(poissonStimulus2B, excit_pop_B, on_pre="s_AMPA+=w_ext2excit")
    syn_Stim2B.connect(j="i")

    @network_operation(dt=stimulus_update_interval)
    def update_poisson_stimulus(t):
        if t >= t_stimulus_start and t < t_stimulus_end:
            offset_A = mu0_mean_stimulus_Hz * (0.5 + 0.5 * coherence_level)
            offset_B = mu0_mean_stimulus_Hz * (0.5 - 0.5 * coherence_level)

            rate_A = numpy.random.normal(offset_A, stimulus_std_Hz)
            rate_A = (max(0, rate_A)) * b2.Hz  # avoid negative rate
            rate_B = numpy.random.normal(offset_B, stimulus_std_Hz)
            rate_B = (max(0, rate_B)) * b2.Hz

            poissonStimulus2A.rates = rate_A
            poissonStimulus2B.rates = rate_B
            # print("stim on. rate_A= {}, rate_B = {}".format(rate_A, rate_B))
        else:
            # print("stim off")
            poissonStimulus2A.rates = 0.
            poissonStimulus2B.rates = 0.

    ###############################################################################################

    def get_monitors(pop, monitored_subset_size):
        """
        Internal helper.
        Args:
            pop:
            monitored_subset_size:

        Returns:

        """
        monitored_subset_size = min(monitored_subset_size, pop.N)
        idx_monitored_neurons = sample(range(pop.N), monitored_subset_size)
        rate_monitor = PopulationRateMonitor(pop)
        # record parameter: record=idx_monitored_neurons is not supported???
        spike_monitor = SpikeMonitor(pop, record=idx_monitored_neurons)
        voltage_monitor = StateMonitor(pop, "v", record=idx_monitored_neurons)
        return rate_monitor, spike_monitor, voltage_monitor, idx_monitored_neurons

    # collect data of a subset of neurons:
    rate_monitor_inhib, spike_monitor_inhib, voltage_monitor_inhib, idx_monitored_neurons_inhib = \
        get_monitors(inhib_pop, monitored_subset_size)

    rate_monitor_A, spike_monitor_A, voltage_monitor_A, idx_monitored_neurons_A = \
        get_monitors(excit_pop_A, monitored_subset_size)

    rate_monitor_B, spike_monitor_B, voltage_monitor_B, idx_monitored_neurons_B = \
        get_monitors(excit_pop_B, monitored_subset_size)

    rate_monitor_Z, spike_monitor_Z, voltage_monitor_Z, idx_monitored_neurons_Z = \
        get_monitors(excit_pop_Z, monitored_subset_size)

    if stop_condition_rate is None:
        b2.run(max_sim_time)
    else:
        sim_sum = 0. * b2.ms
        sim_batch = 100. * b2.ms
        samples_in_batch = int(floor(sim_batch / b2.defaultclock.dt))
        avg_rate_in_batch = 0
        while (sim_sum < max_sim_time) and (avg_rate_in_batch < stop_condition_rate):
            b2.run(sim_batch)
            avg_A = numpy.mean(rate_monitor_A.rate[-samples_in_batch:])
            avg_B = numpy.mean(rate_monitor_B.rate[-samples_in_batch:])
            avg_rate_in_batch = max(avg_A, avg_B)
            sim_sum += sim_batch

    print("sim end: {}".format(time.ctime()))
    ret_vals = dict()

    ret_vals["rate_monitor_A"] = rate_monitor_A
    ret_vals["spike_monitor_A"] = spike_monitor_A
    ret_vals["voltage_monitor_A"] = voltage_monitor_A
    ret_vals["idx_monitored_neurons_A"] = idx_monitored_neurons_A

    ret_vals["rate_monitor_B"] = rate_monitor_B
    ret_vals["spike_monitor_B"] = spike_monitor_B
    ret_vals["voltage_monitor_B"] = voltage_monitor_B
    ret_vals["idx_monitored_neurons_B"] = idx_monitored_neurons_B

    ret_vals["rate_monitor_Z"] = rate_monitor_Z
    ret_vals["spike_monitor_Z"] = spike_monitor_Z
    ret_vals["voltage_monitor_Z"] = voltage_monitor_Z
    ret_vals["idx_monitored_neurons_Z"] = idx_monitored_neurons_Z

    ret_vals["rate_monitor_inhib"] = rate_monitor_inhib
    ret_vals["spike_monitor_inhib"] = spike_monitor_inhib
    ret_vals["voltage_monitor_inhib"] = voltage_monitor_inhib
    ret_vals["idx_monitored_neurons_inhib"] = idx_monitored_neurons_inhib

    return ret_vals