Exemple #1
0
def test_record_native_model():
    nrn = pyNN.neuron
    
    init_logging(logfile=None, debug=True)
    nrn.setup()

    parameters = {'g_leak': 0.0003}
    p1 = nrn.Population(10, SimpleNeuronType, parameters)
    print p1.get('g_leak')
    p1.rset('gnabar', RandomDistribution('uniform', [0.10, 0.14]))
    print p1.get('gnabar')
    p1.initialize('v', -63.0)

    current_source = nrn.StepCurrentSource({'times': [50.0, 110.0, 150.0, 210.0],
                                            'amplitudes': [0.4, 0.6, -0.2, 0.2]})
    p1.inject(current_source)

    p2 = nrn.Population(1, nrn.SpikeSourcePoisson, {'rate': 100.0})

    p1._record('apical(1.0).v')
    p1._record('soma(0.5).ina')

    connector = nrn.AllToAllConnector(weights=0.1)
    prj_alpha = nrn.Projection(p2, p1, connector, target='apical.ampa')
    
    nrn.run(250.0)
    
    assert_equal(p1.recorders['apical(1.0).v'].get().shape, (25010, 3))
    id, t, v = p1.recorders['apical(1.0).v'].get().T
    return id, t, v
Exemple #2
0
def test_record_vm_and_gsyn_from_assembly(sim):
    from pyNN.utility import init_logging
    init_logging(logfile=None, debug=True)
    dt = 0.1
    tstop = 100.0
    sim.setup(timestep=dt, min_delay=dt)
    cells = sim.Population(5, sim.IF_cond_exp()) + sim.Population(6, sim.EIF_cond_exp_isfa_ista())
    inputs = sim.Population(5, sim.SpikeSourcePoisson(rate=50.0))
    sim.connect(inputs, cells, weight=0.1, delay=0.5, receptor_type='inhibitory')
    sim.connect(inputs, cells, weight=0.1, delay=0.3, receptor_type='excitatory')
    cells.record('v')
    cells[2:9].record(['gsyn_exc', 'gsyn_inh'])
#    for p in cells.populations:
#        assert_equal(p.recorders['v'].recorded, set(p.all_cells))

#    assert_equal(cells.populations[0].recorders['gsyn'].recorded, set(cells.populations[0].all_cells[2:5]))
#    assert_equal(cells.populations[1].recorders['gsyn'].recorded, set(cells.populations[1].all_cells[0:4]))
    sim.run(tstop)
    data0 = cells.populations[0].get_data().segments[0]
    data1 = cells.populations[1].get_data().segments[0]
    data_all = cells.get_data().segments[0]
    vm_p0 = data0.filter(name='v')[0]
    vm_p1 = data1.filter(name='v')[0]
    vm_all = data_all.filter(name='v')[0]
    gsyn_p0 = data0.filter(name='gsyn_exc')[0]
    gsyn_p1 = data1.filter(name='gsyn_exc')[0]
    gsyn_all = data_all.filter(name='gsyn_exc')[0]

    n_points = int(tstop / dt) + 1
    assert_equal(vm_p0.shape, (n_points, 5))
    assert_equal(vm_p1.shape, (n_points, 6))
    assert_equal(vm_all.shape, (n_points, 11))
    assert_equal(gsyn_p0.shape, (n_points, 3))
    assert_equal(gsyn_p1.shape, (n_points, 4))
    assert_equal(gsyn_all.shape, (n_points, 7))

    assert_array_equal(vm_p1[:, 3], vm_all[:, 8])

    assert_array_equal(vm_p0.channel_index.index, numpy.arange(5))
    assert_array_equal(vm_p1.channel_index.index, numpy.arange(6))
    assert_array_equal(vm_all.channel_index.index, numpy.arange(11))
    assert_array_equal(vm_p0.channel_index.channel_ids, numpy.arange(5))
    assert_array_equal(vm_p1.channel_index.channel_ids, numpy.arange(6))
    assert_array_equal(vm_all.channel_index.channel_ids, numpy.arange(11))
    assert_array_equal(gsyn_p0.channel_index.index, numpy.arange(3))
    assert_array_equal(gsyn_p1.channel_index.index, numpy.arange(4))
    assert_array_equal(gsyn_all.channel_index.index, numpy.arange(7))
    assert_array_equal(gsyn_p0.channel_index.channel_ids, numpy.array([2, 3, 4]))
    assert_array_equal(gsyn_p1.channel_index.channel_ids, numpy.arange(4))
    assert_array_equal(gsyn_all.channel_index.channel_ids, numpy.arange(2, 9))

    sim.end()
Exemple #3
0
def ticket195(sim):
    """
    Check that the `connect()` function works correctly with single IDs (see
    http://neuralensemble.org/trac/PyNN/ticket/195)
    """
    init_logging(None, debug=True)
    sim.setup(timestep=0.01)
    pre = sim.Population(10, sim.SpikeSourceArray(spike_times=range(1,10)))
    post = sim.Population(10, sim.IF_cond_exp())
    #sim.connect(pre[0], post[0], weight=0.01, delay=0.1, p=1)
    sim.connect(pre[0:1], post[0:1], weight=0.01, delay=0.1, p=1)
    #prj = sim.Projection(pre, post, sim.FromListConnector([(0, 0, 0.01, 0.1)]))
    post.record(['spikes', 'v'])
    sim.run(100.0)
    assert_arrays_almost_equal(post.get_data().segments[0].spiketrains[0], numpy.array([13.4])*pq.ms, 0.5)
Exemple #4
0
def scenario4(sim):
    """
    Network with spatial structure
    """
    init_logging(logfile=None, debug=True)
    sim.setup()
    rng = NumpyRNG(seed=76454, parallel_safe=False)

    input_layout = RandomStructure(boundary=Cuboid(width=500.0, height=500.0, depth=100.0),
                                   origin=(0, 0, 0), rng=rng)
    inputs = sim.Population(100, sim.SpikeSourcePoisson(rate=RandomDistribution('uniform', low=3.0, high=7.0, rng=rng)),
                            structure=input_layout, label="inputs")
    output_layout = Grid3D(aspect_ratioXY=1.0, aspect_ratioXZ=5.0, dx=10.0, dy=10.0, dz=10.0,
                           x0=0.0, y0=0.0, z0=200.0)
    outputs = sim.Population(200, sim.EIF_cond_exp_isfa_ista(),
                             initial_values={'v': RandomDistribution('normal', mu=-65.0, sigma=5.0, rng=rng),
                                             'w': RandomDistribution('normal', mu=0.0, sigma=1.0, rng=rng)},
                             structure=output_layout,  # 10x10x2 grid
                             label="outputs")
    logger.debug("Output population positions:\n %s", outputs.positions)
    DDPC = sim.DistanceDependentProbabilityConnector
    input_connectivity = DDPC("0.5*exp(-d/100.0)", rng=rng)
    recurrent_connectivity = DDPC("sin(pi*d/250.0)**2", rng=rng)
    depressing = sim.TsodyksMarkramSynapse(weight=RandomDistribution('normal', mu=0.1, sigma=0.02, rng=rng),
                                           delay="0.5 + d/100.0",
                                           U=0.5, tau_rec=800.0, tau_facil=0.0)
    facilitating = sim.TsodyksMarkramSynapse(weight=0.05,
                                             delay="0.2 + d/100.0",
                                             U=0.04, tau_rec=100.0,
                                             tau_facil=1000.0)
    input_connections = sim.Projection(inputs, outputs, input_connectivity,
                                       receptor_type='excitatory',
                                       synapse_type=depressing,
                                       space=Space(axes='xy'),
                                       label="input connections")
    recurrent_connections = sim.Projection(outputs, outputs, recurrent_connectivity,
                                           receptor_type='inhibitory',
                                           synapse_type=facilitating,
                                           space=Space(periodic_boundaries=((-100.0, 100.0), (-100.0, 100.0), None)),  # should add "calculate_boundaries" method to Structure classes
                                           label="recurrent connections")
    outputs.record('spikes')
    outputs.sample(10, rng=rng).record('v')
    sim.run(1000.0)
    data = outputs.get_data()
    sim.end()
    return data
Exemple #5
0
def test_native_stdp_model():
    nest = pyNN.nest
    from pyNN.utility import init_logging

    init_logging(logfile=None, debug=True)

    nest.setup()

    p1 = nest.Population(10, nest.IF_cond_exp())
    p2 = nest.Population(10, nest.SpikeSourcePoisson())

    stdp_params = {'Wmax': 50.0, 'lambda': 0.015, 'weight': 0.001}
    stdp = nest.native_synapse_type("stdp_synapse")(**stdp_params)

    connector = nest.AllToAllConnector()

    prj = nest.Projection(p2, p1, connector, receptor_type='excitatory',
                          synapse_type=stdp)
Exemple #6
0
def test_native_stdp_model():
    nest = pyNN.nest
    from pyNN.utility import init_logging

    init_logging(logfile=None, debug=True)
    
    nest.setup()
    
    p1 = nest.Population(10, nest.IF_cond_exp)
    p2 = nest.Population(10, nest.SpikeSourcePoisson)
    
    stdp_params = {'Wmax': 50.0, 'lambda': 0.015}
    stdp = nest.NativeSynapseDynamics("stdp_synapse", stdp_params)
    
    connector = nest.AllToAllConnector(weights=0.001)
    
    prj = nest.Projection(p2, p1, connector, target='excitatory',
                          synapse_dynamics=stdp)
Exemple #7
0
def test_record_vm_and_gsyn_from_assembly(sim):
    from pyNN.utility import init_logging
    init_logging(logfile=None, debug=True)
    set_simulator(sim)
    dt = 0.1
    tstop = 100.0
    sim.setup(timestep=dt)
    cells = sim.Population(5, sim.IF_cond_exp) + sim.Population(6, sim.EIF_cond_exp_isfa_ista)
    inputs = sim.Population(5, sim.SpikeSourcePoisson, {'rate': 50.0})
    sim.connect(inputs, cells, weight=0.1, delay=0.5, synapse_type='inhibitory')
    sim.connect(inputs, cells, weight=0.1, delay=0.3, synapse_type='excitatory')
    cells.record_v()
    cells[2:9].record_gsyn()
    for p in cells.populations:
        assert_equal(p.recorders['v'].recorded, set(p.all_cells))
    
    assert_equal(cells.populations[0].recorders['gsyn'].recorded, set(cells.populations[0].all_cells[2:5]))
    assert_equal(cells.populations[1].recorders['gsyn'].recorded, set(cells.populations[1].all_cells[0:4]))
    sim.run(tstop)
    vm_p0 = cells.populations[0].get_v()
    vm_p1 = cells.populations[1].get_v()
    vm_all = cells.get_v()
    gsyn_p0 = cells.populations[0].get_gsyn()
    gsyn_p1 = cells.populations[1].get_gsyn()
    gsyn_all = cells.get_gsyn()
    assert_equal(numpy.unique(vm_p0[:,0]).tolist(), [ 0., 1., 2., 3., 4.])
    assert_equal(numpy.unique(vm_p1[:,0]).tolist(), [ 0., 1., 2., 3., 4., 5.])
    assert_equal(numpy.unique(vm_all[:,0]).astype(int).tolist(), range(11))
    assert_equal(numpy.unique(gsyn_p0[:,0]).tolist(), [ 2., 3., 4.])
    assert_equal(numpy.unique(gsyn_p1[:,0]).tolist(), [ 0., 1., 2., 3.])
    assert_equal(numpy.unique(gsyn_all[:,0]).astype(int).tolist(), range(2,9))
    
    n_points = int(tstop/dt) + 1
    assert_equal(vm_p0.shape[0], 5*n_points)
    assert_equal(vm_p1.shape[0], 6*n_points)
    assert_equal(vm_all.shape[0], 11*n_points)
    assert_equal(gsyn_p0.shape[0], 3*n_points)
    assert_equal(gsyn_p1.shape[0], 4*n_points)
    assert_equal(gsyn_all.shape[0], 7*n_points)
    
    assert_arrays_equal(vm_p1[vm_p1[:,0]==3][:,2], vm_all[vm_all[:,0]==8][:,2])

    sim.end()
Exemple #8
0
def test_record_native_model():
    if not have_nest:
        raise SkipTest
    nest = pyNN.nest
    from pyNN.random import RandomDistribution

    init_logging(logfile=None, debug=True)

    nest.setup()

    parameters = {'tau_m': 17.0}
    n_cells = 10
    p1 = nest.Population(n_cells, nest.native_cell_type("ht_neuron")(**parameters))
    p1.initialize(V_m=-70.0, Theta=-50.0)
    p1.set(theta_eq=-51.5)
    #assert_arrays_equal(p1.get('theta_eq'), -51.5*numpy.ones((10,)))
    assert_equal(p1.get('theta_eq'), -51.5)
    print(p1.get('tau_m'))
    p1.set(tau_m=RandomDistribution('uniform', low=15.0, high=20.0))
    print(p1.get('tau_m'))

    current_source = nest.StepCurrentSource(times=[50.0, 110.0, 150.0, 210.0],
                                            amplitudes=[0.01, 0.02, -0.02, 0.01])
    p1.inject(current_source)

    p2 = nest.Population(1, nest.native_cell_type("poisson_generator")(rate=200.0))

    print("Setting up recording")
    p2.record('spikes')
    p1.record('V_m')

    connector = nest.AllToAllConnector()
    syn = nest.StaticSynapse(weight=0.001)

    prj_ampa = nest.Projection(p2, p1, connector, syn, receptor_type='AMPA')

    tstop = 250.0
    nest.run(tstop)

    vm = p1.get_data().segments[0].analogsignals[0]
    n_points = int(tstop / nest.get_time_step()) + 1
    assert_equal(vm.shape, (n_points, n_cells))
    assert vm.max() > 0.0  # should have some spikes
Exemple #9
0
def initialize():
    global sim
    global options
    global extra
    global rngseed
    global parallel_safe
    global rng
    global n_ext
    global n_exc
    global n_inh
    
    sim, options = get_simulator(
        ("--plot-figure", "Plot the connections to a file."))

    init_logging(None, debug=True)

    # === General parameters =================================================

    threads = 1
    rngseed = 98765
    parallel_safe = True
    rng = NumpyRNG(seed=rngseed, parallel_safe=parallel_safe)

    # === general network parameters (except connections) ====================

    n_ext = 60   # number of external stimuli
    n_exc = 60  # number of excitatory cells
    n_inh = 60  # number of inhibitory cells

    # === Options ============================================================

    extra = {'loglevel': 2, 'useSystemSim': True,
            'maxNeuronLoss': 0., 'maxSynapseLoss': 0.4,
            'hardwareNeuronSize': 8,
            'threads': threads,
            'filename': "connections.xml",
            'label': 'VA'}
    if sim.__name__ == "pyNN.hardware.brainscales":
        extra['hardware'] = sim.hardwareSetup['small']

    if options.simulator == "neuroml":
        extra["file"] = "connections.xml"
Exemple #10
0
def test_record_native_model():
    nest = pyNN.nest
    from pyNN.random import RandomDistribution
    from pyNN.utility import init_logging

    init_logging(logfile=None, debug=True)
    
    nest.setup()
    
    parameters = {'Tau_m': 17.0}
    n_cells = 10
    p1 = nest.Population(n_cells, nest.native_cell_type("ht_neuron"), parameters)
    p1.initialize('V_m', -70.0)
    p1.initialize('Theta', -50.0)
    p1.set('Theta_eq', -51.5)
    assert_equal(p1.get('Theta_eq'), [-51.5]*10)
    print p1.get('Tau_m')
    p1.rset('Tau_m', RandomDistribution('uniform', [15.0, 20.0]))
    print p1.get('Tau_m')
    
    current_source = nest.StepCurrentSource({'times' : [50.0, 110.0, 150.0, 210.0],
                                            'amplitudes' : [0.01, 0.02, -0.02, 0.01]})
    p1.inject(current_source)
    
    p2 = nest.Population(1, nest.native_cell_type("poisson_generator"), {'rate': 200.0})
    
    print "Setting up recording"
    p2.record()
    p1._record('V_m')
    
    connector = nest.AllToAllConnector(weights=0.001)
    
    prj_ampa = nest.Projection(p2, p1, connector, target='AMPA')
    
    tstop = 250.0
    nest.run(tstop)
    
    n_points = int(tstop/nest.get_time_step()) + 1
    assert_equal(p1.recorders['V_m'].get().shape, (n_points*n_cells, 3))
    id, t, v = p1.recorders['V_m'].get().T
    assert v.max() > 0.0 # should have some spikes
Exemple #11
0
def test_record_native_model():
    if not have_neuron:
        raise SkipTest
    nrn = pyNN.neuron

    init_logging(logfile=None, debug=True)
    nrn.setup()

    parameters = {'g_leak': 0.0003}
    p1 = nrn.Population(10, SimpleNeuronType(**parameters))
    print(p1.get('g_leak'))
    p1.rset('gnabar', RandomDistribution('uniform', low=0.10, high=0.14))
    print(p1.get('gnabar'))
    p1.initialize(v=-63.0)

    current_source = nrn.StepCurrentSource(times=[50.0, 110.0, 150.0, 210.0],
                                           amplitudes=[0.4, 0.6, -0.2, 0.2])
    p1.inject(current_source)

    p2 = nrn.Population(1, nrn.SpikeSourcePoisson(rate=100.0))

    p1.record(['apical(1.0).v', 'soma(0.5).ina'])

    connector = nrn.AllToAllConnector()
    syn = nrn.StaticSynapse(weight=0.1)
    prj_alpha = nrn.Projection(p2, p1, connector, syn, receptor_type='apical.ampa')

    nrn.run(250.0)

    data = p1.get_data().segments[0].analogsignalarrays
    assert_equal(len(data), 2)  # one array per variable
    assert_equal(data[0].name, 'apical(1.0).v')
    assert_equal(data[1].name, 'soma(0.5).ina')
    assert_equal(data[0].sampling_rate, 10.0 * pq.kHz)
    assert_equal(data[0].units, pq.mV)
    assert_equal(data[1].units, pq.mA / pq.cm**2)
    assert_equal(data[0].t_start, 0.0 * pq.ms)
    assert_equal(data[0].t_stop, 250.1 * pq.ms)  # would prefer if it were 250.0, but this is a fundamental Neo issue
    assert_equal(data[0].shape, (2501, 10))
    return data
def t4():
    print 'Loading Forth XML File (iaf-2coba-Model)'
    print '----------------------------------------'
    component = readers.XMLReader.read_component(Join(tenml_dir,
                                                      'iaf_2coba.10ml'),
                                                 component_name='iaf')
    writers.XMLWriter.write(
        component,
        '/tmp/nineml_toxml4.xml',
    )
    model = readers.XMLReader.read_component(Join(tenml_dir, 'iaf_2coba.10ml'))

    from nineml.abstraction_layer.flattening import flatten
    from nineml.abstraction_layer.dynamics.utils.modifiers import (
        DynamicsModifier)

    flatcomponent = flatten(model, componentname='iaf_2coba')
    DynamicsModifier.close_analog_port(component=flatcomponent,
                                       port_name='iaf_iSyn',
                                       value='0')

    writers.XMLWriter.write(flatcomponent, '/tmp/nineml_out_iaf_2coba.9ml')

    import pyNN.neuron as sim
    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)
    print 'Attempting to simulate From Model:'
    print '----------------------------------'
    celltype_cls = pyNNml.nineml_celltype_from_model(
        name="iaf_2coba",
        nineml_model=flatcomponent,
        synapse_components=[
            pyNNml.CoBaSyn(namespace='cobaExcit', weight_connector='q'),
            pyNNml.CoBaSyn(namespace='cobaInhib', weight_connector='q'),
        ])

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'cobaExcit.tau': 2.0,
        'cobaInhib.tau': 5.0,
        'cobaExcit.vrev': 0.0,
        'cobaInhib.vrev': -70.0,
    }

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)

    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99)  # neuron not refractory at start
    cells.initialize('regime', 1002)  # temporary hack

    input = sim.Population(2, sim.SpikeSourcePoisson, {'rate': 100})

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)

    conn = [
        sim.Projection(input[0:1], cells, connector, target='cobaExcit'),
        sim.Projection(input[1:2], cells, connector, target='cobaInhib')
    ]

    cells._record('iaf_V')
    cells._record('cobaExcit_g')
    cells._record('cobaInhib_g')
    cells._record('cobaExcit_I')
    cells._record('cobaInhib_I')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V",
                                   filter=[cells[0]])
    cells.recorders['cobaExcit_g'].write("Results/nineml_neuron.g_exc",
                                         filter=[cells[0]])
    cells.recorders['cobaInhib_g'].write("Results/nineml_neuron.g_inh",
                                         filter=[cells[0]])
    cells.recorders['cobaExcit_I'].write("Results/nineml_neuron.g_exc",
                                         filter=[cells[0]])
    cells.recorders['cobaInhib_I'].write("Results/nineml_neuron.g_inh",
                                         filter=[cells[0]])

    t = cells.recorders['iaf_V'].get()[:, 1]
    v = cells.recorders['iaf_V'].get()[:, 2]
    gInh = cells.recorders['cobaInhib_g'].get()[:, 2]
    gExc = cells.recorders['cobaExcit_g'].get()[:, 2]
    IInh = cells.recorders['cobaInhib_I'].get()[:, 2]
    IExc = cells.recorders['cobaExcit_I'].get()[:, 2]

    import pylab
    pylab.subplot(311)
    pylab.ylabel('Voltage')
    pylab.plot(t, v)

    pylab.subplot(312)
    pylab.ylabel('Conductance')
    pylab.plot(t, gInh)
    pylab.plot(t, gExc)

    pylab.subplot(313)
    pylab.ylabel('Current')
    pylab.plot(t, IInh)
    pylab.plot(t, IExc)

    pylab.suptitle("From Tree-Model Pathway")
    pylab.show()

    sim.end()
from os.path import isfile, isdir, realpath, dirname, exists
import os
from sys import exit, stderr, argv, path, modules

CODE_DIR = '{}/..'.format(dirname(realpath(__file__)))
path.insert(1, '{}/src'.format(CODE_DIR))

# === Configure the simulator ================================================
noise_std = 0.001
sim, options = get_simulator(
    ("--plot-figure", "Plot the simulation results to a file.", {
        "action": "store_true"
    }), ("--debug", "Print debugging information"))

if options.debug:
    init_logging(None, debug=True)

sim.setup(timestep=0.01, min_delay=1.0)
Trec_min = 20  # in minutes
Trec_s = Trec_min * 60  # in seconds
Trec_ms = Trec_s * 1000.  # simulation time in ms

# === Build and instrument the network =======================================

neurons = sim.Population(
    1, sim.Izhikevich(a=0.02, b=0.2, c=-50, d=2, i_offset=[0.0]))

noise = sim.NoisyCurrentSource(mean=0.011,
                               stdev=noise_std,
                               start=1.0,
                               stop=Trec_ms,
Exemple #14
0
def test_record_vm_and_gsyn_from_assembly(sim):
    from pyNN.utility import init_logging
    init_logging(logfile=None, debug=True)
    dt = 0.1
    tstop = 100.0
    sim.setup(timestep=dt, min_delay=dt)
    cells = sim.Population(5, sim.IF_cond_exp()) + sim.Population(
        6, sim.EIF_cond_exp_isfa_ista())
    inputs = sim.Population(5, sim.SpikeSourcePoisson(rate=50.0))
    sim.connect(inputs,
                cells,
                weight=0.1,
                delay=0.5,
                receptor_type='inhibitory')
    sim.connect(inputs,
                cells,
                weight=0.1,
                delay=0.3,
                receptor_type='excitatory')
    cells.record('v')
    cells[2:9].record(['gsyn_exc', 'gsyn_inh'])
    #    for p in cells.populations:
    #        assert_equal(p.recorders['v'].recorded, set(p.all_cells))

    #    assert_equal(cells.populations[0].recorders['gsyn'].recorded, set(cells.populations[0].all_cells[2:5]))
    #    assert_equal(cells.populations[1].recorders['gsyn'].recorded, set(cells.populations[1].all_cells[0:4]))
    sim.run(tstop)
    data0 = cells.populations[0].get_data().segments[0]
    data1 = cells.populations[1].get_data().segments[0]
    data_all = cells.get_data().segments[0]
    vm_p0 = data0.filter(name='v')[0]
    vm_p1 = data1.filter(name='v')[0]
    vm_all = data_all.filter(name='v')[0]
    gsyn_p0 = data0.filter(name='gsyn_exc')[0]
    gsyn_p1 = data1.filter(name='gsyn_exc')[0]
    gsyn_all = data_all.filter(name='gsyn_exc')[0]

    n_points = int(tstop / dt) + 1
    assert_equal(vm_p0.shape, (n_points, 5))
    assert_equal(vm_p1.shape, (n_points, 6))
    assert_equal(vm_all.shape, (n_points, 11))
    assert_equal(gsyn_p0.shape, (n_points, 3))
    assert_equal(gsyn_p1.shape, (n_points, 4))
    assert_equal(gsyn_all.shape, (n_points, 7))

    assert_array_equal(vm_p1[:, 3], vm_all[:, 8])

    assert_array_equal(vm_p0.channel_index.index, numpy.arange(5))
    assert_array_equal(vm_p1.channel_index.index, numpy.arange(6))
    assert_array_equal(vm_all.channel_index.index, numpy.arange(11))
    assert_array_equal(vm_p0.channel_index.channel_ids, numpy.arange(5))
    assert_array_equal(vm_p1.channel_index.channel_ids, numpy.arange(6))
    assert_array_equal(vm_all.channel_index.channel_ids, numpy.arange(11))
    assert_array_equal(gsyn_p0.channel_index.index, numpy.arange(3))
    assert_array_equal(gsyn_p1.channel_index.index, numpy.arange(4))
    assert_array_equal(gsyn_all.channel_index.index, numpy.arange(7))
    assert_array_equal(gsyn_p0.channel_index.channel_ids,
                       numpy.array([2, 3, 4]))
    assert_array_equal(gsyn_p1.channel_index.channel_ids, numpy.arange(4))
    assert_array_equal(gsyn_all.channel_index.channel_ids, numpy.arange(2, 9))

    sim.end()
Exemple #15
0
 def test_initlogging_debug(self):
     utility.init_logging("test.log", debug=True, num_processes=2, rank=99)
     assert os.path.exists("test.log.99")
     os.remove("test.log.99")
from pylab import *
from pyNN.utility import Timer, init_logging, ProgressBar
import os

simulator_name = sys.argv[1]
exec ("from pyNN.%s import *" % simulator_name)
test_cases = [int(x) for x in sys.argv[2:]]

from pyNN.recording import files
from pyNN.space import *

timer = Timer()
progress_bar = ProgressBar(mode="fixed", width=20)
init_logging("connectors_benchmark_%s.log" % simulator_name, debug=True)


def draw_rf(cell, positions, connections, color="k"):
    idx = numpy.where(connections[:, 1] == cell)[0]
    sources = connections[idx, 0]
    for src in sources:
        plot([positions[cell, 1], positions[src, 1]], [positions[cell, 2], positions[src, 2]], c=color)


def distances(pos_1, pos_2, N):
    dx = abs(pos_1[:, 0] - pos_2[:, 0])
    dy = abs(pos_1[:, 1] - pos_2[:, 1])
    dx = numpy.minimum(dx, N - dx)
    dy = numpy.minimum(dy, N - dy)
    return sqrt(dx * dx + dy * dy)

Exemple #17
0
def main():
    ## Uninteresting setup, start up the visu process,...
    logfile = make_logfile_name()
    ensure_dir(logfile)
    f_h = logging.FileHandler(logfile)
    f_h.setLevel(SUBDEBUG)
    d_h = logging.StreamHandler()
    d_h.setLevel(INFO)
    utils.configure_loggers(debug_handler=d_h, file_handler=f_h)
    parent_conn, child_conn = multiprocessing.Pipe()
    p = multiprocessing.Process(
        target=visualisation.visualisation_process_f,
        name="display_process", args=(child_conn, LOGGER))
    p.start()

    pynnn.setup(timestep=SIMU_TIMESTEP)
    init_logging("logfile", debug=True)
    LOGGER.info("Simulation started with command: %s", sys.argv)

    ## Network setup
    # First population
    p1 = pynnn.Population(100, pynnn.IF_curr_alpha,
                          structure=pynnn.space.Grid2D())
    p1.set({'tau_m':20, 'v_rest':-65})
    # Second population
    p2 = pynnn.Population(20, pynnn.IF_curr_alpha,
                          cellparams={'tau_m': 15.0, 'cm': 0.9})
    # Projection 1 -> 2
    prj1_2 = pynnn.Projection(
        p1, p2, pynnn.AllToAllConnector(allow_self_connections=False),
        target='excitatory')
    # I may need to make own PyNN Connector class. Otherwise, this is
    # neat:  exponentially decaying probability of connections depends
    # on distance. Distance is only calculated using x and y, which
    # are on a toroidal topo with boundaries at 0 and 500.
    connector = pynnn.DistanceDependentProbabilityConnector(
        "exp(-abs(d))",
        space=pynnn.Space(
            axes='xy', periodic_boundaries=((0,500), (0,500), None)))
    # Alternately, the powerful connection set algebra (python CSA
    # module) can be used.
    weight_distr = pynnn.RandomDistribution(distribution='gamma',
                                            parameters=[1,0.1])
    prj1_2.randomizeWeights(weight_distr)

    # This one is in NEST but not in Brian:
    # source = pynnn.NoisyCurrentSource(
    #     mean=100, stdev=50, dt=SIMU_TIMESTEP, 
    #     start=10.0, stop=SIMU_DURATION, rng=pynnn.NativeRNG(seed=100)) 
    source = pynnn.DCSource(
        start=10.0, stop=SIMU_DURATION, amplitude=100) 
    source.inject_into(list(p1.sample(50).all()))

    p1.record(to_file=False)
    p2.record(to_file=False)

    ## Build and send the visualizable network structure
    adapter = pynn_to_visu.PynnToVisuAdapter(LOGGER)
    adapter.add_pynn_population(p1)
    adapter.add_pynn_population(p2)
    adapter.add_pynn_projection(p1, p2, prj1_2.connection_manager)
    adapter.commit_structure()
    
    parent_conn.send(adapter.output_struct)
    
    # Number of chunks to run the simulation:
    n_chunks = SIMU_DURATION // SIMU_TO_VISU_MESSAGE_PERIOD
    last_chunk_duration = SIMU_DURATION % SIMU_TO_VISU_MESSAGE_PERIOD
    # Run the simulator
    for visu_i in xrange(n_chunks):
        pynnn.run(SIMU_TO_VISU_MESSAGE_PERIOD)
        parent_conn.send(adapter.make_activity_update_message())
        LOGGER.debug("real current p1 spike counts: %s",
                     p1.get_spike_counts().values())
    if last_chunk_duration > 0:
        pynnn.run(last_chunk_duration)
        parent_conn.send(adapter.make_activity_update_message())
    # Cleanup
    pynnn.end()
    # Wait for the visualisation process to terminate
    p.join(VISU_PROCESS_JOIN_TIMEOUT)
def std_pynn_simulation(test_component, parameters, initial_values,
                        synapse_components, records, plot=True, sim_time=100.,
                        synapse_weights=1.0, syn_input_rate=100):

    from nineml.abstraction_layer.flattening import ComponentFlattener

    import pyNN.neuron as sim
    import pyNN.neuron.nineml as pyNNml
    from pyNN.neuron.nineml import CoBaSyn

    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.01, min_delay=0.1)

    synapse_components_ML = [CoBaSyn(namespace=ns,  weight_connector=wc)
                             for (ns, wc) in synapse_components]

    celltype_cls = pyNNml.nineml_celltype_from_model(
        name=test_component.name,
        nineml_model=test_component,
        synapse_components=synapse_components_ML,
    )

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)
    initial_values = ComponentFlattener.flatten_namespace_dict(initial_values)

    cells = sim.Population(1, celltype_cls, parameters)

    # Set Initial Values:
    for state, state_initial_value in initial_values.iteritems():
        cells.initialize(state, state_initial_value)

    # For each synapse type, create a spike source:
    if synapse_components:
        input = sim.Population(
            len(synapse_components), sim.SpikeSourcePoisson,
            {'rate': syn_input_rate})
        connector = sim.OneToOneConnector(weights=synapse_weights, delays=0.5)

        conn = []
        for i, (ns, weight_connector) in enumerate(synapse_components):
            proj = sim.Projection(input[i:i + 1], cells, connector, target=ns),
            conn.append(proj)

    # Setup the Records:
    for record in records:
        cells.record(record.what)

    cells.record('spikes')

    # Run the simulation:
    sim.run(sim_time)

    if len(records) == 0:
        assert False

    # Write the Results to a file:
    cells.write_data("Results/nineml.pkl")

    # Plot the values:

    results = cells.get_data().segments[0]

    # Create a list of the tags:
    tags = []
    for record in records:
        if not record.tag in tags:
            tags.append(record.tag)

    # Plot the graphs:
    if plot:
        import pylab
        nGraphs = len(tags)

        # Plot the Records:
        for graphIndex, tag in enumerate(tags):
            pylab.subplot(nGraphs, 1, graphIndex + 1)

            for r in records:
                if r.tag != tag:
                    continue
                trace = results.filter(name=r.what)[0]
                pylab.plot(trace.times, trace, label=r.label)

            pylab.ylabel(tag)
            pylab.legend()

        # Plot the spikes:
        # pylab.subplot(nGraphs,1, len(tags)+1)
        # t_spikes = cells[0:1].getSpikes()[:1]
        # pylab.plot( [1,3],[1,3],'x'  )
        # print t_spikes
        # if t_spikes:
        #    pylab.scatter( t_spikes, t_spikes )

        # Add the X axis to the last plot:
        pylab.xlabel('t [ms]')

        # pylab.suptitle("From Tree-Model Pathway")
        pylab.show()

    sim.end()

    return results
Exemple #19
0
def scenario3(sim):
    """
    Simple feed-forward network network with additive STDP. The second half of
    the presynaptic neurons fires faster than the second half, so their
    connections should be potentiated more.
    """

    init_logging(logfile=None, debug=True)
    second = 1000.0
    duration = 10
    tau_m = 20 # ms
    cm = 1.0 # nF
    v_reset = -60
    cell_parameters = dict(
        tau_m = tau_m,
        cm = cm,
        v_rest = -70,
        e_rev_E = 0,
        e_rev_I = -70,
        v_thresh = -54,
        v_reset = v_reset,
        tau_syn_E = 5,
        tau_syn_I = 5,
    )
    g_leak = cm/tau_m # µS

    w_min = 0.0*g_leak
    w_max = 0.05*g_leak

    r1 = 5.0
    r2 = 40.0

    sim.setup()
    pre = sim.Population(100, sim.SpikeSourcePoisson())
    post = sim.Population(10, sim.IF_cond_exp())

    pre.set(duration=duration*second)
    pre.set(start=0.0)
    pre[:50].set(rate=r1)
    pre[50:].set(rate=r2)
    assert_equal(pre[49].rate, r1)
    assert_equal(pre[50].rate, r2)
    post.set(**cell_parameters)
    post.initialize(v=RandomDistribution('normal', mu=v_reset, sigma=5.0))

    stdp = sim.STDPMechanism(
                sim.SpikePairRule(tau_plus=20.0, tau_minus=20.0,
                                  A_plus=0.01, A_minus=0.01),
                sim.AdditiveWeightDependence(w_min=w_min, w_max=w_max),
                #dendritic_delay_fraction=0.5))
                dendritic_delay_fraction=1)

    connections = sim.Projection(pre, post, sim.AllToAllConnector(),
                                 synapse_type=stdp,
                                 receptor_type='excitatory')

    initial_weight_distr = RandomDistribution('uniform', low=w_min, high=w_max)
    connections.randomizeWeights(initial_weight_distr)
    initial_weights = connections.get('weight', format='array', gather=False)
    assert initial_weights.min() >= w_min
    assert initial_weights.max() < w_max
    assert initial_weights[0,0] != initial_weights[1,0]

    pre.record('spikes')
    post.record('spikes')
    post[0:1].record('v')

    sim.run(duration*second)

    actual_rate = pre.mean_spike_count()/duration
    expected_rate = (r1+r2)/2
    errmsg = "actual rate: %g  expected rate: %g" % (actual_rate, expected_rate)
    assert abs(actual_rate - expected_rate) < 1, errmsg
    #assert abs(pre[:50].mean_spike_count()/duration - r1) < 1
    #assert abs(pre[50:].mean_spike_count()/duration- r2) < 1
    final_weights = connections.get('weight', format='array', gather=False)
    assert initial_weights[0,0] != final_weights[0,0]

    try:
        import scipy.stats
    except ImportError:
        raise SkipTest
    t,p = scipy.stats.ttest_ind(initial_weights[:50,:].flat, initial_weights[50:,:].flat)
    assert p > 0.05, p
    t,p = scipy.stats.ttest_ind(final_weights[:50,:].flat, final_weights[50:,:].flat)
    assert p < 0.01, p
    assert final_weights[:50,:].mean() < final_weights[50:,:].mean()
    sim.end()
    return initial_weights, final_weights, pre, post, connections
Exemple #20
0
        currentTimer = time.time()


def printMessage(message):
    global rank
    if rank == 0:
        print("\033[2;46m" + (message).ljust(60) + "\033[m")


###################### MAIN BODY ###########################
## Rank for MPI ##
numberOfNodes = sim.num_processes()
rank = sim.rank()

# Log to stderr, only warnings, errors, critical
init_logging('sim.log', num_processes=numberOfNodes, rank=rank, level=logging.DEBUG)

## Start message ##
if rank == 0:
    print("\033[1;45m" + (("Lattice Simulation").rjust(38)).ljust(60) + "\033[m")
    print("\033[0;44m" + ("MPI_Rank: %d  " % rank + " MPI_Size: %d " % numberOfNodes).ljust(60) + "\033[m")


## Timer ##
currentTimer = time.time()
totalTimer = time.time()

## Default global parameters ##
dt = 0.1  # simulation time step in milliseconds
tinit = 500.0  # simtime over which the network is allowed to settle down
tsim = 2000.0  # total simulation length in milliseconds
Exemple #21
0
"""

from pyNN.utility import init_logging, normalized_filename
from pyNN.parameters import Sequence
from pyNN.space import Grid2D
from importlib import import_module
import numpy
from lazyarray import sqrt
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('simulator_name')
parser.add_argument("--debug", action="store_true")
args = parser.parse_args()

init_logging(None, debug=args.debug)

sim = import_module("pyNN.%s" % args.simulator_name)

simtime = 100.0
input_rate = 20.0
n_cells = 9

sim.setup()

cell_type = sim.IF_cond_exp(tau_m=10.0,
#                            v_rest=lambda x, y, z: -60.0 - sqrt((x**2 + y**2)/100),
#                            v_thresh=lambda x, y, z: -55.0 + x/10.0)
                            v_rest=lambda i: -60.0 + i,
                            v_thresh=lambda i: -55.0 + i)
Exemple #22
0
               i * numpy.ones_like(model_parameters.input_spike_times),
               "|",
               label=label,
               markersize=25)
    tick_labels.append(label)
    panel.set_ylim(-0.5, i + 0.5)
    panel.set_yticks(range(7))
    panel.set_yticklabels(tick_labels, size=6)
    return fig


# ==============================================================================
if __name__ == "__main__":
    from NeuroTools import datastore

    init_logging("test_synaptic_integration.log", debug=False)
    parameters = load_parameters(sys.argv[1])
    sim_list = sys.argv[2:]
    assert len(sim_list) >= 1, "Must specify at least one simulator."
    exec("from pyNN import %s" % ", ".join(sim_list))
    sim_list = [eval(s) for s in sim_list]

    spike_data, vm_data, model_parameters = run(parameters, sim_list)

    if len(sim_list) >= 2:
        distances = calc_distances(spike_data)
        print distances
        vm_diff = calc_Vm_diff(vm_data)
        print vm_diff

        ds = datastore.ShelveDataStore(
Exemple #23
0
def scenario3(sim):
    """
    Simple feed-forward network network with additive STDP. The second half of
    the presynaptic neurons fires faster than the second half, so their
    connections should be potentiated more.
    """

    init_logging(logfile=None, debug=True)
    second = 1000.0
    duration = 10
    tau_m = 20 # ms
    cm = 1.0 # nF
    v_reset = -60
    cell_parameters = dict(
        tau_m = tau_m,
        cm = cm,
        v_rest = -70,
        e_rev_E = 0,
        e_rev_I = -70,
        v_thresh = -54,
        v_reset = v_reset,
        tau_syn_E = 5,
        tau_syn_I = 5,
    )
    g_leak = cm/tau_m # µS

    w_min = 0.0*g_leak
    w_max = 0.05*g_leak

    r1 = 5.0
    r2 = 40.0

    sim.setup()
    pre = sim.Population(100, sim.SpikeSourcePoisson())
    post = sim.Population(10, sim.IF_cond_exp())

    pre.set(duration=duration*second)
    pre.set(start=0.0)
    pre[:50].set(rate=r1)
    pre[50:].set(rate=r2)
    assert_equal(pre[49].rate, r1)
    assert_equal(pre[50].rate, r2)
    post.set(**cell_parameters)
    post.initialize(v=RandomDistribution('normal', (v_reset, 5.0)))

    stdp = sim.STDPMechanism(
                sim.SpikePairRule(tau_plus=20.0, tau_minus=20.0 ),
                sim.AdditiveWeightDependence(w_min=w_min, w_max=w_max,
                                             A_plus=0.01, A_minus=0.01),
                #dendritic_delay_fraction=0.5))
                dendritic_delay_fraction=1)

    connections = sim.Projection(pre, post, sim.AllToAllConnector(),
                                 synapse_type=stdp,
                                 receptor_type='excitatory')

    initial_weight_distr = RandomDistribution('uniform', (w_min, w_max))
    connections.randomizeWeights(initial_weight_distr)
    initial_weights = connections.get('weight', format='array', gather=False)
    assert initial_weights.min() >= w_min
    assert initial_weights.max() < w_max
    assert initial_weights[0,0] != initial_weights[1,0]

    pre.record('spikes')
    post.record('spikes')
    post[0:1].record('v')

    sim.run(duration*second)

    actual_rate = pre.mean_spike_count()/duration
    expected_rate = (r1+r2)/2
    errmsg = "actual rate: %g  expected rate: %g" % (actual_rate, expected_rate)
    assert abs(actual_rate - expected_rate) < 1, errmsg
    #assert abs(pre[:50].mean_spike_count()/duration - r1) < 1
    #assert abs(pre[50:].mean_spike_count()/duration- r2) < 1
    final_weights = connections.get('weight', format='array', gather=False)
    assert initial_weights[0,0] != final_weights[0,0]

    try:
        import scipy.stats
    except ImportError:
        raise SkipTest
    t,p = scipy.stats.ttest_ind(initial_weights[:50,:].flat, initial_weights[50:,:].flat)
    assert p > 0.05, p
    t,p = scipy.stats.ttest_ind(final_weights[:50,:].flat, final_weights[50:,:].flat)
    assert p < 0.01, p
    assert final_weights[:50,:].mean() < final_weights[50:,:].mean()

    return initial_weights, final_weights, pre, post, connections
Exemple #24
0
from pylab import *
from pyNN.utility import Timer, init_logging, ProgressBar
import os

simulator_name = sys.argv[1]
exec("from pyNN.%s import *" % simulator_name)
test_cases = [int(x) for x in sys.argv[2:]]

from pyNN.recording import files
from pyNN.space import *

timer = Timer()
progress_bar = ProgressBar(mode='fixed', width=20)
init_logging("connectors_benchmark_%s.log" % simulator_name, debug=True)


def draw_rf(cell, positions, connections, color='k'):
    idx = numpy.where(connections[:, 1] == cell)[0]
    sources = connections[idx, 0]
    for src in sources:
        plot([positions[cell, 1], positions[src, 1]],
             [positions[cell, 2], positions[src, 2]],
             c=color)


def distances(pos_1, pos_2, N):
    dx = abs(pos_1[:, 0] - pos_2[:, 0])
    dy = abs(pos_1[:, 1] - pos_2[:, 1])
    dx = numpy.minimum(dx, N - dx)
    dy = numpy.minimum(dy, N - dy)
    return sqrt(dx * dx + dy * dy)
Exemple #25
0
def main():
    ## Uninteresting setup, start up the visu process,...
    logfile = make_logfile_name()
    ensure_dir(logfile)
    f_h = logging.FileHandler(logfile)
    f_h.setLevel(SUBDEBUG)
    d_h = logging.StreamHandler()
    d_h.setLevel(INFO)
    utils.configure_loggers(debug_handler=d_h, file_handler=f_h)
    parent_conn, child_conn = multiprocessing.Pipe()
    p = multiprocessing.Process(target=visualisation.visualisation_process_f,
                                name="display_process",
                                args=(child_conn, LOGGER))
    p.start()

    pynnn.setup(timestep=SIMU_TIMESTEP)
    init_logging("logfile", debug=True)
    LOGGER.info("Simulation started with command: %s", sys.argv)

    ## Network setup
    # First population
    p1 = pynnn.Population(100,
                          pynnn.IF_curr_alpha,
                          structure=pynnn.space.Grid2D())
    p1.set({'tau_m': 20, 'v_rest': -65})
    # Second population
    p2 = pynnn.Population(20,
                          pynnn.IF_curr_alpha,
                          cellparams={
                              'tau_m': 15.0,
                              'cm': 0.9
                          })
    # Projection 1 -> 2
    prj1_2 = pynnn.Projection(
        p1,
        p2,
        pynnn.AllToAllConnector(allow_self_connections=False),
        target='excitatory')
    # I may need to make own PyNN Connector class. Otherwise, this is
    # neat:  exponentially decaying probability of connections depends
    # on distance. Distance is only calculated using x and y, which
    # are on a toroidal topo with boundaries at 0 and 500.
    connector = pynnn.DistanceDependentProbabilityConnector(
        "exp(-abs(d))",
        space=pynnn.Space(axes='xy',
                          periodic_boundaries=((0, 500), (0, 500), None)))
    # Alternately, the powerful connection set algebra (python CSA
    # module) can be used.
    weight_distr = pynnn.RandomDistribution(distribution='gamma',
                                            parameters=[1, 0.1])
    prj1_2.randomizeWeights(weight_distr)

    # This one is in NEST but not in Brian:
    # source = pynnn.NoisyCurrentSource(
    #     mean=100, stdev=50, dt=SIMU_TIMESTEP,
    #     start=10.0, stop=SIMU_DURATION, rng=pynnn.NativeRNG(seed=100))
    source = pynnn.DCSource(start=10.0, stop=SIMU_DURATION, amplitude=100)
    source.inject_into(list(p1.sample(50).all()))

    p1.record(to_file=False)
    p2.record(to_file=False)

    ## Build and send the visualizable network structure
    adapter = pynn_to_visu.PynnToVisuAdapter(LOGGER)
    adapter.add_pynn_population(p1)
    adapter.add_pynn_population(p2)
    adapter.add_pynn_projection(p1, p2, prj1_2.connection_manager)
    adapter.commit_structure()

    parent_conn.send(adapter.output_struct)

    # Number of chunks to run the simulation:
    n_chunks = SIMU_DURATION // SIMU_TO_VISU_MESSAGE_PERIOD
    last_chunk_duration = SIMU_DURATION % SIMU_TO_VISU_MESSAGE_PERIOD
    # Run the simulator
    for visu_i in xrange(n_chunks):
        pynnn.run(SIMU_TO_VISU_MESSAGE_PERIOD)
        parent_conn.send(adapter.make_activity_update_message())
        LOGGER.debug("real current p1 spike counts: %s",
                     p1.get_spike_counts().values())
    if last_chunk_duration > 0:
        pynnn.run(last_chunk_duration)
        parent_conn.send(adapter.make_activity_update_message())
    # Cleanup
    pynnn.end()
    # Wait for the visualisation process to terminate
    p.join(VISU_PROCESS_JOIN_TIMEOUT)
def run(plot_and_show=True):
    import sys
    from os.path import abspath, realpath, join
    import numpy
    import nineml

    root = abspath(join(realpath(nineml.__path__[0]), "../../.."))
    sys.path.append(join(root, "lib9ml/python/examples/AL"))
    sys.path.append(join(root, "code_generation/nmodl"))     
    sys.path.append(join(root, "code_generation/nest2"))       
               

    #from nineml.abstraction_layer.example_models import  get_hierachical_iaf_3coba
    from nineml.abstraction_layer.testing_utils import TestableComponent
    from nineml.abstraction_layer.flattening import  ComponentFlattener

    import pyNN.neuron as sim
    import pyNN.neuron.nineml as pyNNml

    from pyNN.utility import init_logging


    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)


    #test_component = get_hierachical_iaf_3coba()
    test_component = TestableComponent('hierachical_iaf_3coba')()

    from nineml.abstraction_layer.writers import DotWriter
    DotWriter.write(test_component, 'test1.dot')
    

    from nineml.abstraction_layer.writers import XMLWriter
    XMLWriter.write(test_component, 'iaf_3coba.xml')


    celltype_cls = pyNNml.nineml_celltype_from_model(
                                            name = "iaf_3coba",
                                            nineml_model = test_component,
                                            synapse_components = [
                                                pyNNml.CoBaSyn( namespace='AMPA',  weight_connector='q' ),
                                                pyNNml.CoBaSyn( namespace='GABAa',  weight_connector='q' ),
                                                pyNNml.CoBaSyn( namespace='GABAb',  weight_connector='q' ),
                                                       ]
                                            )

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'AMPA.tau': 2.0,
        'GABAa.tau': 5.0,
        'GABAb.tau': 50.0,
        'AMPA.vrev': 0.0,
        'GABAa.vrev': -70.0,
        'GABAb.vrev': -95.0,

    }


    parameters = ComponentFlattener.flatten_namespace_dict( parameters )


    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99) # neuron not refractory at start
    cells.initialize('regime', 1002) # temporary hack

    input = sim.Population(3, sim.SpikeSourceArray)

    numpy.random.seed(12345)
    input[0].spike_times = numpy.add.accumulate(numpy.random.exponential(1000.0/100.0, size=1000))
    input[1].spike_times = numpy.add.accumulate(numpy.random.exponential(1000.0/20.0, size=1000))
    input[2].spike_times = numpy.add.accumulate(numpy.random.exponential(1000.0/50.0, size=1000))

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)


    conn = [sim.Projection(input[0:1], cells, connector, target='AMPA'),
            sim.Projection(input[1:2], cells, connector, target='GABAa'),
            sim.Projection(input[2:3], cells, connector, target='GABAb')]


    cells._record('iaf_V')
    cells._record('AMPA_g')
    cells._record('GABAa_g')
    cells._record('GABAb_g')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V", filter=[cells[0]])
    cells.recorders['AMPA_g'].write("Results/nineml_neuron.g_exc", filter=[cells[0]])
    cells.recorders['GABAa_g'].write("Results/nineml_neuron.g_gabaA", filter=[cells[0]])
    cells.recorders['GABAb_g'].write("Results/nineml_neuron.g_gagaB", filter=[cells[0]])


    t = cells.recorders['iaf_V'].get()[:,1]
    v = cells.recorders['iaf_V'].get()[:,2]
    gInhA = cells.recorders['GABAa_g'].get()[:,2]
    gInhB = cells.recorders['GABAb_g'].get()[:,2]
    gExc = cells.recorders['AMPA_g'].get()[:,2]

    if plot_and_show:
        import pylab
        pylab.subplot(211)
        pylab.plot(t,v)
        pylab.ylabel('voltage [mV]')
        pylab.suptitle("AMPA, GABA_A, GABA_B")
        pylab.subplot(212)
        pylab.plot(t,gInhA,label='GABA_A')
        pylab.plot(t,gInhB, label='GABA_B')
        pylab.plot(t,gExc, label='AMPA')
        pylab.ylabel('conductance [nS]')
        pylab.xlabel('t [ms]')
        pylab.legend()

        pylab.show()

    sim.end()
        currentTimer = time.time()

def printMessage(message):
    global rank
    if rank==0:
        print("\033[2;46m" + (message).ljust(60) + "\033[m")


###################### MAIN BODY ###########################
## Rank for MPI ##
global numberOfNodes, rank
numberOfNodes = sim.num_processes()
rank = sim.rank()

# Log to stderr, only warnings, errors, critical
init_logging('sim.log',num_processes=numberOfNodes,rank=rank,level=logging.DEBUG)

## Start message ##
if rank==0:
    print("\033[1;45m" + (("Lattice Simulation").rjust(38)).ljust(60) + "\033[m")
    print("\033[0;44m" + ("MPI_Rank: %d  " % rank + " MPI_Size: %d " % numberOfNodes).ljust(60) + "\033[m")


## Timer ##
global currentTimer, totalTimer
currentTimer = time.time()
totalTimer = time.time()

## Default global parameters ##
dt = 0.1 # simulation time step in milliseconds
tinit = 500.0 # simtime over which the network is allowed to settle down
Exemple #28
0
 def test_initlogging_debug(self):
     utility.init_logging("test.log", debug=True, num_processes=2, rank=99)
     assert os.path.exists("test.log.99")
     os.remove("test.log.99")
def t4():
    print 'Loading Forth XML File (iaf-2coba-Model)'
    print '----------------------------------------'
    component = readers.XMLReader.read_component(
        Join(tenml_dir, 'iaf_2coba.10ml'), component_name='iaf')
    writers.XMLWriter.write(component, '/tmp/nineml_toxml4.xml', )
    model = readers.XMLReader.read_component(Join(tenml_dir, 'iaf_2coba.10ml'))

    from nineml.abstraction_layer.flattening import flatten
    from nineml.abstraction_layer.component_modifiers import ComponentModifier

    flatcomponent = flatten(model, componentname='iaf_2coba')
    ComponentModifier.close_analog_port(component=flatcomponent, port_name='iaf_iSyn', value='0')

    writers.XMLWriter.write(flatcomponent, '/tmp/nineml_out_iaf_2coba.9ml')

    import pyNN.neuron as sim
    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)
    print 'Attempting to simulate From Model:'
    print '----------------------------------'
    celltype_cls = pyNNml.nineml_celltype_from_model(
        name="iaf_2coba",
        nineml_model=flatcomponent,
        synapse_components=[
            pyNNml.CoBaSyn(namespace='cobaExcit',  weight_connector='q'),
            pyNNml.CoBaSyn(namespace='cobaInhib',  weight_connector='q'),
        ]
    )

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'cobaExcit.tau': 2.0,
        'cobaInhib.tau': 5.0,
        'cobaExcit.vrev': 0.0,
        'cobaInhib.vrev': -70.0,
    }

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)

    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99)  # neuron not refractory at start
    cells.initialize('regime', 1002)  # temporary hack

    input = sim.Population(2, sim.SpikeSourcePoisson, {'rate': 100})

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)

    conn = [sim.Projection(input[0:1], cells, connector, target='cobaExcit'),
            sim.Projection(input[1:2], cells, connector, target='cobaInhib')]

    cells._record('iaf_V')
    cells._record('cobaExcit_g')
    cells._record('cobaInhib_g')
    cells._record('cobaExcit_I')
    cells._record('cobaInhib_I')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V", filter=[cells[0]])
    cells.recorders['cobaExcit_g'].write("Results/nineml_neuron.g_exc", filter=[cells[0]])
    cells.recorders['cobaInhib_g'].write("Results/nineml_neuron.g_inh", filter=[cells[0]])
    cells.recorders['cobaExcit_I'].write("Results/nineml_neuron.g_exc", filter=[cells[0]])
    cells.recorders['cobaInhib_I'].write("Results/nineml_neuron.g_inh", filter=[cells[0]])

    t = cells.recorders['iaf_V'].get()[:, 1]
    v = cells.recorders['iaf_V'].get()[:, 2]
    gInh = cells.recorders['cobaInhib_g'].get()[:, 2]
    gExc = cells.recorders['cobaExcit_g'].get()[:, 2]
    IInh = cells.recorders['cobaInhib_I'].get()[:, 2]
    IExc = cells.recorders['cobaExcit_I'].get()[:, 2]

    import pylab
    pylab.subplot(311)
    pylab.ylabel('Voltage')
    pylab.plot(t, v)

    pylab.subplot(312)
    pylab.ylabel('Conductance')
    pylab.plot(t, gInh)
    pylab.plot(t, gExc)

    pylab.subplot(313)
    pylab.ylabel('Current')
    pylab.plot(t, IInh)
    pylab.plot(t, IExc)

    pylab.suptitle("From Tree-Model Pathway")
    pylab.show()

    sim.end()
def run(plot_and_show=True):

    import sys
    from os.path import abspath, realpath, join
    import nineml

    root = abspath(join(realpath(nineml.__path__[0]), "../../.."))
    sys.path.append(join(root, "lib9ml/python/examples/AL"))
    sys.path.append(join(root, "code_generation/nmodl"))

    from nineml.abstraction_layer.example_models import get_hierachical_iaf_2coba
    from nineml.abstraction_layer.flattening import ComponentFlattener

    import pyNN.neuron as sim
    import pyNN.neuron.nineml as pyNNml

    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)

    testModel = get_hierachical_iaf_2coba()

    celltype_cls = pyNNml.nineml_celltype_from_model(
        name="iaf_2coba",
        nineml_model=testModel,
        synapse_components=[
            pyNNml.CoBaSyn(
                namespace='cobaExcit',  weight_connector='q'),
            pyNNml.CoBaSyn(
                namespace='cobaInhib',  weight_connector='q'),
        ]
    )

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'cobaExcit.tau': 2.0,
        'cobaInhib.tau': 5.0,
        'cobaExcit.vrev': 0.0,
        'cobaInhib.vrev': -70.0,
    }

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)

    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99)  # neuron not refractory at start
    cells.initialize('regime', 1002)  # temporary hack

    input = sim.Population(2, sim.SpikeSourcePoisson, {'rate': 100})

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)
    # connector = sim.OneToOneConnector(weights=20.0, delays=0.5)

    conn = [sim.Projection(input[0:1], cells, connector, target='cobaExcit'),
            sim.Projection(input[1:2], cells, connector, target='cobaInhib')]

    cells._record('iaf_V')
    cells._record('cobaExcit_g')
    cells._record('cobaInhib_g')
    cells._record('regime')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V", filter=[cells[0]])
    cells.recorders['regime'].write("Results/nineml_neuron.regime", filter=[cells[0]])
    cells.recorders['cobaExcit_g'].write("Results/nineml_neuron.g_exc", filter=[cells[0]])
    cells.recorders['cobaInhib_g'].write("Results/nineml_neuron.g_inh", filter=[cells[0]])

    t = cells.recorders['iaf_V'].get()[:, 1]
    v = cells.recorders['iaf_V'].get()[:, 2]
    regime = cells.recorders['regime'].get()[:, 2]
    gInh = cells.recorders['cobaInhib_g'].get()[:, 2]
    gExc = cells.recorders['cobaExcit_g'].get()[:, 2]

    if plot_and_show:
        import pylab
        pylab.subplot(311)
        pylab.plot(t, v)
        pylab.subplot(312)
        pylab.plot(t, gInh)
        pylab.plot(t, gExc)
        pylab.subplot(313)
        pylab.plot(t, regime)
        pylab.ylim((999, 1005))
        pylab.suptitle("From Tree-Model Pathway")
        pylab.show()

    sim.end()
Exemple #31
0
from collections import defaultdict
import argparse
import json
import sys
import addict
import sys

# Import nest
import pyNN.nest as pynn

from pyNN.utility import init_logging
init_logging("logfile", debug=False)

# Import model utils
import pynn_model


def spikes_to_json(spikes):
    """Convert spikes to json format.

    Args:
        spikes: Spikes as an array of tuples.
    """
    spiking_neurons = defaultdict(list)
    for spike in spikes:
        spiking_neurons[int(spike[0])].append(spike[1])

    return spiking_neurons.values()


def execute(conf, train, test):
Exemple #32
0
        currentTimer = time.time()


def printMessage(message):
    global rank
    if rank == 0:
        print("\033[2;46m" + (message).ljust(60) + "\033[m")


###################### MAIN BODY ###########################
## Rank for MPI ##
numberOfNodes = sim.num_processes()
rank = sim.rank()

# Log to stderr, only warnings, errors, critical
init_logging(None, num_processes=numberOfNodes, rank=rank, level=logging.WARNING)

## Start message ##
if rank == 0:
    print("\033[1;45m" + (("Lattice Simulation").rjust(38)).ljust(60) + "\033[m")
    print("\033[0;44m" + ("MPI_Rank: %d  " % rank + " MPI_Size: %d " % numberOfNodes).ljust(60) + "\033[m")


## Timer ##
currentTimer = time.time()
totalTimer = time.time()

## Default global parameters ##
dt = 0.1  # simulation time step in milliseconds
tinit = 500.0  # simtime over which the network is allowed to settle down
tsim = 2000.0  # total simulation length in milliseconds
Exemple #33
0
def run(plot_and_show=True):
    import sys
    from os.path import abspath, realpath, join
    import numpy
    import nineml

    root = abspath(join(realpath(nineml.__path__[0]), "../../.."))
    sys.path.append(join(root, "lib9ml/python/examples/AL"))
    sys.path.append(join(root, "code_generation/nmodl"))
    sys.path.append(join(root, "code_generation/nest2"))

    #from nineml.abstraction_layer.example_models import  get_hierachical_iaf_3coba
    from nineml.abstraction_layer.testing_utils import TestableComponent
    from nineml.abstraction_layer.flattening import ComponentFlattener

    import pyNN.neuron as sim
    import pyNN.neuron.nineml as pyNNml

    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)

    #test_component = get_hierachical_iaf_3coba()
    test_component = TestableComponent('hierachical_iaf_3coba')()

    from nineml.abstraction_layer.writers import DotWriter
    DotWriter.write(test_component, 'test1.dot')

    from nineml.abstraction_layer.writers import XMLWriter
    XMLWriter.write(test_component, 'iaf_3coba.xml')

    celltype_cls = pyNNml.nineml_celltype_from_model(
        name="iaf_3coba",
        nineml_model=test_component,
        synapse_components=[
            pyNNml.CoBaSyn(namespace='AMPA', weight_connector='q'),
            pyNNml.CoBaSyn(namespace='GABAa', weight_connector='q'),
            pyNNml.CoBaSyn(namespace='GABAb', weight_connector='q'),
        ])

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'AMPA.tau': 2.0,
        'GABAa.tau': 5.0,
        'GABAb.tau': 50.0,
        'AMPA.vrev': 0.0,
        'GABAa.vrev': -70.0,
        'GABAb.vrev': -95.0,
    }

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)

    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99)  # neuron not refractory at start
    cells.initialize('regime', 1002)  # temporary hack

    input = sim.Population(3, sim.SpikeSourceArray)

    numpy.random.seed(12345)
    input[0].spike_times = numpy.add.accumulate(
        numpy.random.exponential(1000.0 / 100.0, size=1000))
    input[1].spike_times = numpy.add.accumulate(
        numpy.random.exponential(1000.0 / 20.0, size=1000))
    input[2].spike_times = numpy.add.accumulate(
        numpy.random.exponential(1000.0 / 50.0, size=1000))

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)

    conn = [
        sim.Projection(input[0:1], cells, connector, target='AMPA'),
        sim.Projection(input[1:2], cells, connector, target='GABAa'),
        sim.Projection(input[2:3], cells, connector, target='GABAb')
    ]

    cells._record('iaf_V')
    cells._record('AMPA_g')
    cells._record('GABAa_g')
    cells._record('GABAb_g')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V",
                                   filter=[cells[0]])
    cells.recorders['AMPA_g'].write("Results/nineml_neuron.g_exc",
                                    filter=[cells[0]])
    cells.recorders['GABAa_g'].write("Results/nineml_neuron.g_gabaA",
                                     filter=[cells[0]])
    cells.recorders['GABAb_g'].write("Results/nineml_neuron.g_gagaB",
                                     filter=[cells[0]])

    t = cells.recorders['iaf_V'].get()[:, 1]
    v = cells.recorders['iaf_V'].get()[:, 2]
    gInhA = cells.recorders['GABAa_g'].get()[:, 2]
    gInhB = cells.recorders['GABAb_g'].get()[:, 2]
    gExc = cells.recorders['AMPA_g'].get()[:, 2]

    if plot_and_show:
        import pylab
        pylab.subplot(211)
        pylab.plot(t, v)
        pylab.ylabel('voltage [mV]')
        pylab.suptitle("AMPA, GABA_A, GABA_B")
        pylab.subplot(212)
        pylab.plot(t, gInhA, label='GABA_A')
        pylab.plot(t, gInhB, label='GABA_B')
        pylab.plot(t, gExc, label='AMPA')
        pylab.ylabel('conductance [nS]')
        pylab.xlabel('t [ms]')
        pylab.legend()

        pylab.show()

    sim.end()
Exemple #34
0
Andrew Davison, UNIC, CNRS
August 2006, November 2009

"""

import socket, os
from importlib import import_module
import numpy
from pyNN.utility import get_script_args, init_logging, normalized_filename

simulator_name = get_script_args(1)[0]
sim = import_module("pyNN.%s" % simulator_name)

from pyNN.random import NumpyRNG, RandomDistribution

init_logging(None, debug=True)

seed = 764756387
rng = NumpyRNG(seed=seed, parallel_safe=True)
tstop = 1000.0 # ms
input_rate = 100.0 # Hz
cell_params = {'tau_refrac': 2.0,  # ms
               'v_thresh':  -50.0, # mV
               'tau_syn_E':  2.0,  # ms
               'tau_syn_I':  2.0,  # ms
               'tau_m': RandomDistribution('uniform', low=18.0, high=22.0, rng=rng)
}
n_record = 3

node = sim.setup(timestep=0.025, min_delay=1.0, max_delay=1.0, debug=True, quit_on_end=False)
print("Process with rank %d running on %s" % (node, socket.gethostname()))
Exemple #35
0
def scenario4(sim):
    """
    Network with spatial structure
    """
    init_logging(logfile=None, debug=True)
    sim.setup()
    rng = NumpyRNG(seed=76454, parallel_safe=False)

    input_layout = RandomStructure(boundary=Cuboid(width=500.0,
                                                   height=500.0,
                                                   depth=100.0),
                                   origin=(0, 0, 0),
                                   rng=rng)
    inputs = sim.Population(
        100,
        sim.SpikeSourcePoisson(
            rate=RandomDistribution('uniform', low=3.0, high=7.0, rng=rng)),
        structure=input_layout,
        label="inputs")
    output_layout = Grid3D(aspect_ratioXY=1.0,
                           aspect_ratioXZ=5.0,
                           dx=10.0,
                           dy=10.0,
                           dz=10.0,
                           x0=0.0,
                           y0=0.0,
                           z0=200.0)
    outputs = sim.Population(
        200,
        sim.EIF_cond_exp_isfa_ista(),
        initial_values={
            'v': RandomDistribution('normal', mu=-65.0, sigma=5.0, rng=rng),
            'w': RandomDistribution('normal', mu=0.0, sigma=1.0, rng=rng)
        },
        structure=output_layout,  # 10x10x2 grid
        label="outputs")
    logger.debug("Output population positions:\n %s", outputs.positions)
    DDPC = sim.DistanceDependentProbabilityConnector
    input_connectivity = DDPC("0.5*exp(-d/100.0)", rng=rng)
    recurrent_connectivity = DDPC("sin(pi*d/250.0)**2", rng=rng)
    depressing = sim.TsodyksMarkramSynapse(weight=RandomDistribution(
        'normal', mu=0.1, sigma=0.02, rng=rng),
                                           delay="0.5 + d/100.0",
                                           U=0.5,
                                           tau_rec=800.0,
                                           tau_facil=0.0)
    facilitating = sim.TsodyksMarkramSynapse(weight=0.05,
                                             delay="0.2 + d/100.0",
                                             U=0.04,
                                             tau_rec=100.0,
                                             tau_facil=1000.0)
    input_connections = sim.Projection(inputs,
                                       outputs,
                                       input_connectivity,
                                       receptor_type='excitatory',
                                       synapse_type=depressing,
                                       space=Space(axes='xy'),
                                       label="input connections")
    recurrent_connections = sim.Projection(
        outputs,
        outputs,
        recurrent_connectivity,
        receptor_type='inhibitory',
        synapse_type=facilitating,
        # should add "calculate_boundaries" method to Structure classes
        space=Space(periodic_boundaries=((-100.0, 100.0), (-100.0, 100.0),
                                         None)),
        label="recurrent connections")
    outputs.record('spikes')
    outputs.sample(10, rng=rng).record('v')
    sim.run(1000.0)
    data = outputs.get_data()
    sim.end()
    return data
"""

from pyNN.utility import init_logging, normalized_filename
from pyNN.parameters import Sequence
from pyNN.space import Grid2D
from importlib import import_module
import numpy
from lazyarray import sqrt
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('simulator_name')
parser.add_argument("--debug", action="store_true")
args = parser.parse_args()

init_logging(None, debug=args.debug)

sim = import_module("pyNN.%s" % args.simulator_name)

simtime = 100.0
input_rate = 20.0
n_cells = 9

sim.setup()

cell_type = sim.IF_cond_exp(tau_m=10.0,
#                            v_rest=lambda x, y, z: -60.0 - sqrt((x**2 + y**2)/100),
#                            v_thresh=lambda x, y, z: -55.0 + x/10.0)
                            v_rest=lambda i: -60.0 + i,
                            v_thresh=lambda i: -55.0 + i)
                tick_labels.append(label)
                i += 1
    label = "Input spikes"
    panel.plot( model_parameters.input_spike_times, i*numpy.ones_like(model_parameters.input_spike_times),
               "|", label=label, markersize=25 )
    tick_labels.append(label)
    panel.set_ylim(-0.5,i+0.5)
    panel.set_yticks(range(7))
    panel.set_yticklabels(tick_labels, size=6)
    return fig

# ==============================================================================
if __name__ == "__main__":
    from NeuroTools import datastore
    
    init_logging("test_synaptic_integration.log", debug=False)
    parameters = load_parameters(sys.argv[1])
    sim_list = sys.argv[2:]
    assert len(sim_list) >= 1, "Must specify at least one simulator."
    exec("from pyNN import %s" % ", ".join(sim_list))
    sim_list = [eval(s) for s in sim_list]    
    
    spike_data, vm_data, model_parameters = run(parameters, sim_list)
    
    if len(sim_list) >= 2:
        distances = calc_distances(spike_data)
        print distances
        vm_diff = calc_Vm_diff(vm_data)
        print vm_diff
    
        ds = datastore.ShelveDataStore(root_dir=parameters.results_dir, key_generator=datastore.keygenerators.hash_pickle)