예제 #1
0
 def testFixedProbability(self):
     """For all connections created with "fixedProbability"..."""
     for srcP in [self.source5, self.source22]:
         for tgtP in [self.target1, self.target6, self.target33]:
             prj1 = neuron.Projection(srcP, tgtP, neuron.FixedProbabilityConnector(0.5), rng=random.NumpyRNG(12345))
             prj2 = neuron.Projection(srcP, tgtP, neuron.FixedProbabilityConnector(0.5), rng=random.NativeRNG(12345))
             for prj in prj1, prj2:
                 assert (0 < len(prj) < len(srcP)*len(tgtP)), 'len(prj) = %d, len(srcP)*len(tgtP) = %d' % (len(prj), len(srcP)*len(tgtP))
예제 #2
0
 def testFixedNumberPost(self):
     c1 = neuron.FixedNumberPostConnector(10)
     c2 = neuron.FixedNumberPostConnector(3)
     c3 = neuron.FixedNumberPostConnector(random.RandomDistribution('poisson',[5]))
     for srcP in [self.source5, self.source22]:
         for tgtP in [self.target6, self.target33]:
             for c in c1, c2:
                 prj1 = neuron.Projection(srcP, tgtP, c)
                 self.assertEqual(len(prj1.connections), c.n*len(srcP))
             prj2 = neuron.Projection(srcP, tgtP, c3) # just a test that no Exceptions are raised
     self.assertRaises(Exception, neuron.FixedNumberPostConnector, None)
예제 #3
0
def build_network(sim, order=1000, epsilon=0.1, delay=1.5, J=0.1, theta=20.0,
                  tau=20.0, tau_syn=0.1, tau_refrac=2.0, v_reset=10.0,
                  R=1.5, g=5, eta=2, seed=None):

    NE = 4 * order
    NI = 1 * order
    CE = int(epsilon * NE)  # number of excitatory synapses per neuron
    CI = int(epsilon * NI)  # number of inhibitory synapses per neuron

    CMem = tau/R

    J_unit = psp_height(tau, R, tau_syn)
    J_ex  = J / J_unit
    J_in  = -g * J_ex

    nu_th = theta / (J_ex * CE * R * tau_syn)
    nu_ex = eta * nu_th
    p_rate = 1000.0 * nu_ex * CE

    assert seed is not None
    rng = NumpyRNG(seed)

    neuron_params = {
        "nrn_tau": tau,
        "nrn_v_threshold": theta,
        "nrn_refractory_period": tau_refrac,
        "nrn_v_reset": v_reset,
        "nrn_R": R,
        "syn_tau": tau_syn
    }

    celltype = Dynamics(name='iaf',
                        subnodes={'nrn': read("sources/BrunelIaF.xml")['BrunelIaF'],
                                  'syn': read("sources/AlphaPSR.xml")['AlphaPSR']})
    celltype.connect_ports('syn.i_synaptic', 'nrn.i_synaptic')

    exc = sim.Population(NE, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**neuron_params))
    inh = sim.Population(NI, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**neuron_params))
    all = exc + inh
    all.initialize(v=RandomDistribution('uniform', (0.0, theta), rng=rng))

    stim = sim.Population(NE + NI, nineml_cell_type('Poisson', read("sources/Poisson.xml")['Poisson'], {})(rate=p_rate))

    print("Connecting network")

    exc_synapse = sim.StaticSynapse(weight=J_ex, delay=delay)
    inh_synapse = sim.StaticSynapse(weight=J_in, delay=delay)

    input_connections = sim.Projection(stim, all, sim.OneToOneConnector(), exc_synapse, receptor_type="syn")
    exc_connections = sim.Projection(exc, all, sim.FixedNumberPreConnector(n=CE), exc_synapse, receptor_type="syn")  # check is Pre not Post
    inh_connections = sim.Projection(inh, all, sim.FixedNumberPreConnector(n=CI), inh_synapse, receptor_type="syn")

    return stim, exc, inh
예제 #4
0
 def testAllToAll(self):
     """For all connections created with "allToAll" it should be possible to
     obtain the weight using the top-level HocObject"""
     for srcP in [self.source5, self.source22, self.target33]:
         for tgtP in [self.target6, self.target33]:
             #print "gid_counter = ", neuron.simulator.state.gid_counter
             if srcP == tgtP:
                 prj = neuron.Projection(srcP, tgtP, neuron.AllToAllConnector(allow_self_connections=False))
             else:
                 prj = neuron.Projection(srcP, tgtP, neuron.AllToAllConnector())
             prj.setWeights(1.234)
             weights = []
             for c in prj.connections:
                 weights.append(c.nc.weight[0])
             assert weights == [1.234]*len(prj)
예제 #5
0
 def testRandomizeWeights(self):
     # The probability of having two consecutive weights vector that are equal should be 0
     prj1 = neuron.Projection(self.source, self.target, neuron.AllToAllConnector())
     prj2 = neuron.Projection(self.source, self.target, neuron.AllToAllConnector())
     prj1.randomizeWeights(self.distrib_Numpy)
     prj2.randomizeWeights(self.distrib_Native)
     w1 = []; w2 = []; w3 = []; w4 = []
     for c in prj1.connections:
         w1.append(c.nc.weight[0])
         w2.append(c.nc.weight[0])
     prj1.randomizeWeights(self.distrib_Numpy)
     prj2.randomizeWeights(self.distrib_Native)
     for c in prj1.connections:
         w3.append(c.nc.weight[0])
         w4.append(c.nc.weight[0])
     self.assertNotEqual(w1, w3) and self.assertNotEqual(w2, w4) 
예제 #6
0
 def testRandomizeDelays(self):
     # The probability of having two consecutive delays vector that are equal should be 0
     prj1 = neuron.Projection(self.source, self.target, neuron.AllToAllConnector())
     prj2 = neuron.Projection(self.source, self.target, neuron.AllToAllConnector())
     prj1.randomizeDelays(self.distrib_Numpy)
     prj2.randomizeDelays(self.distrib_Native)
     d1 = []; d2 = []; d3 = []; d4 = []
     for c in prj1.connections:
         d1.append(c.nc.weight[0])
         d2.append(c.nc.weight[0])
     prj1.randomizeWeights(self.distrib_Numpy)
     prj2.randomizeWeights(self.distrib_Native)
     for c in prj1.connections:
         d3.append(c.nc.weight[0])
         d4.append(c.nc.weight[0])
     self.assertNotEqual(d1, d3) and self.assertNotEqual(d2, d4) 
예제 #7
0
 def testSetDelays(self):
     prj1 = neuron.Projection(self.source, self.target, neuron.AllToAllConnector())
     prj1.setDelays(2.345)
     delays = []
     for c in prj1.connections:
         delays.append(c.nc.delay)
     result = 2.345*numpy.ones(len(prj1.connections))
     assert (delays == result.tolist())
예제 #8
0
 def testSetWeights(self):
     prj1 = neuron.Projection(self.source, self.target, neuron.AllToAllConnector())
     prj1.setWeights(2.345)
     weights = []
     for c in prj1.connections:
         weights.append(c.nc.weight[0])
     result = 2.345*numpy.ones(len(prj1.connections))
     assert (weights == result.tolist())
def model_network(param_dict):
    """
    This model network consists of a spike source and a neuron (IF_curr_alpha). 
    The spike rate of the source and the weight can be specified in the 
    param_dict. Returns the number of spikes fired during 1000 ms of simulation.
    
    Parameters:
    param_dict - dictionary with keys
                 rate - the rate of the spike source (spikes/second)
                 weight - weight of the connection source -> neuron
                 
    Returns:
    dictionary with keys:
        source_rate - the rate of the spike source
        weight - weight of the connection source -> neuron
        neuron_rate - spike rate of the neuron
    """
    #set up the network
    import pyNN.neuron as sim
    sim.setup(dt=0.01,
              min_delay=1.,
              max_delay=1.,
              debug=False,
              quit_on_end=False)

    weight = param_dict['weight']

    import NeuroTools.stgen as stgen
    stgen = stgen.StGen()
    spiketrain = stgen.poisson_generator(param_dict['rate'], t_stop=1000.)
    source = sim.Population(1, sim.SpikeSourceArray,
                            {'spike_times': spiketrain.spike_times})
    neuron = sim.Population(1, sim.IF_cond_alpha)
    sim.Projection(source,
                   neuron,
                   method=sim.OneToOneConnector(weights=param_dict['weight'],
                                                delays=1.))

    #set recorder
    neuron.record()
    neuron.record_v()

    #run the simulation
    sim.run(1001.)
    sim.end()

    # count the number of spikes
    spikes = neuron.getSpikes()
    numspikes = len(spikes)

    # return everything, including the input parameters
    return {
        'source_rate': param_dict['rate'],
        'weight': param_dict['weight'],
        'neuron_rate': numspikes
    }
예제 #10
0
 def testFromList(self):
     c1 = neuron.FromListConnector([
         ([0,], [0,], 0.1, 0.1),
         ([3,], [0,], 0.2, 0.11),
         ([2,], [3,], 0.3, 0.12),
         ([4,], [2,], 0.4, 0.13),
         ([0,], [1,], 0.5, 0.14),
         ])
     prj = neuron.Projection(self.source5, self.target6, c1)
     self.assertEqual(len(prj.connections), 5)
예제 #11
0
 def testSaveAndLoad(self):
     prj1 = neuron.Projection(self.source33, self.target33, neuron.OneToOneConnector())
     prj1.setDelays(1)
     prj1.setWeights(1.234)
     prj1.saveConnections("connections.tmp", gather=False)
     if neuron.num_processes() > 1:
         distributed = True
     else:
         distributed = False
     prj2 = neuron.Projection(self.source33, self.target33, neuron.FromFileConnector("connections.tmp",
                                                                                     distributed=distributed))
     w1 = []; w2 = []; d1 = []; d2 = []
     # For a connections scheme saved and reloaded, we test if the connections, their weights and their delays
     # are equal.
     for c1,c2 in zip(prj1.connections, prj2.connections):
         w1.append(c1.nc.weight[0])
         w2.append(c2.nc.weight[0])
         d1.append(c1.nc.delay)
         d2.append(c2.nc.delay)
     assert (w1 == w2), 'w1 = %s\nw2 = %s' % (w1, w2)
     assert (d1 == d2), 'd1 = %s\nd2 = %s' % (d1, d2)
예제 #12
0
 def testDistanceDependentProbability(self):
     """For all connections created with "distanceDependentProbability"..."""
     # Test should be improved..."
     for rngclass in (random.NumpyRNG, random.NativeRNG):
         for expr in ('exp(-d)', 'd < 0.5'):
     #rngclass = random.NumpyRNG
     #expr = 'exp(-d)'
             prj = neuron.Projection(self.source33, self.target33,
                                     neuron.DistanceDependentProbabilityConnector(d_expression=expr),
                                     rng=rngclass(12345))
             assert (0 < len(prj) < len(self.source33)*len(self.target33))
     self.assertRaises(ZeroDivisionError, neuron.DistanceDependentProbabilityConnector, d_expression="d/0.0")
예제 #13
0
        return gen()


assert generate_spike_times(0).max() > simtime

spike_source = sim.Population(
    n, sim.SpikeSourceArray(spike_times=generate_spike_times))

spike_source.record('spikes')
cells.record('spikes')
cells[0:2].record('m')

syn = sim.StaticSynapse(weight=w, delay=syn_delay)
input_conns = sim.Projection(spike_source,
                             cells,
                             sim.FixedProbabilityConnector(0.5),
                             syn,
                             receptor_type="default")

# === Run simulation ===========================================================

sim.run(simtime)

filename = normalized_filename("Results", "nrn_artificial_cell", "pkl",
                               "neuron", sim.num_processes())
cells.write_data(filename, annotations={'script_name': __file__})

print("Mean firing rate: ", cells.mean_spike_count() * 1000.0 / simtime, "Hz")

plot_figure = True
if plot_figure:
def t4():
    print 'Loading Forth XML File (iaf-2coba-Model)'
    print '----------------------------------------'
    component = readers.XMLReader.read_component(Join(tenml_dir,
                                                      'iaf_2coba.10ml'),
                                                 component_name='iaf')
    writers.XMLWriter.write(
        component,
        '/tmp/nineml_toxml4.xml',
    )
    model = readers.XMLReader.read_component(Join(tenml_dir, 'iaf_2coba.10ml'))

    from nineml.abstraction_layer.flattening import flatten
    from nineml.abstraction_layer.dynamics.utils.modifiers import (
        DynamicsModifier)

    flatcomponent = flatten(model, componentname='iaf_2coba')
    DynamicsModifier.close_analog_port(component=flatcomponent,
                                       port_name='iaf_iSyn',
                                       value='0')

    writers.XMLWriter.write(flatcomponent, '/tmp/nineml_out_iaf_2coba.9ml')

    import pyNN.neuron as sim
    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)
    print 'Attempting to simulate From Model:'
    print '----------------------------------'
    celltype_cls = pyNNml.nineml_celltype_from_model(
        name="iaf_2coba",
        nineml_model=flatcomponent,
        synapse_components=[
            pyNNml.CoBaSyn(namespace='cobaExcit', weight_connector='q'),
            pyNNml.CoBaSyn(namespace='cobaInhib', weight_connector='q'),
        ])

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'cobaExcit.tau': 2.0,
        'cobaInhib.tau': 5.0,
        'cobaExcit.vrev': 0.0,
        'cobaInhib.vrev': -70.0,
    }

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)

    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99)  # neuron not refractory at start
    cells.initialize('regime', 1002)  # temporary hack

    input = sim.Population(2, sim.SpikeSourcePoisson, {'rate': 100})

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)

    conn = [
        sim.Projection(input[0:1], cells, connector, target='cobaExcit'),
        sim.Projection(input[1:2], cells, connector, target='cobaInhib')
    ]

    cells._record('iaf_V')
    cells._record('cobaExcit_g')
    cells._record('cobaInhib_g')
    cells._record('cobaExcit_I')
    cells._record('cobaInhib_I')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V",
                                   filter=[cells[0]])
    cells.recorders['cobaExcit_g'].write("Results/nineml_neuron.g_exc",
                                         filter=[cells[0]])
    cells.recorders['cobaInhib_g'].write("Results/nineml_neuron.g_inh",
                                         filter=[cells[0]])
    cells.recorders['cobaExcit_I'].write("Results/nineml_neuron.g_exc",
                                         filter=[cells[0]])
    cells.recorders['cobaInhib_I'].write("Results/nineml_neuron.g_inh",
                                         filter=[cells[0]])

    t = cells.recorders['iaf_V'].get()[:, 1]
    v = cells.recorders['iaf_V'].get()[:, 2]
    gInh = cells.recorders['cobaInhib_g'].get()[:, 2]
    gExc = cells.recorders['cobaExcit_g'].get()[:, 2]
    IInh = cells.recorders['cobaInhib_I'].get()[:, 2]
    IExc = cells.recorders['cobaExcit_I'].get()[:, 2]

    import pylab
    pylab.subplot(311)
    pylab.ylabel('Voltage')
    pylab.plot(t, v)

    pylab.subplot(312)
    pylab.ylabel('Conductance')
    pylab.plot(t, gInh)
    pylab.plot(t, gExc)

    pylab.subplot(313)
    pylab.ylabel('Current')
    pylab.plot(t, IInh)
    pylab.plot(t, IExc)

    pylab.suptitle("From Tree-Model Pathway")
    pylab.show()

    sim.end()
}
p = sim.Population(1, sim.IF_curr_alpha(**cell_parameters))
p.initialize(v=0.0)

rate = 20
stim = sim.Population(
    1,
    nineml_cell_type('Poisson',
                     read("../sources/Poisson.xml")['Poisson'], {})(rate=rate))
stim.initialize(t_next=numpy.random.exponential(1000 / rate))

weight = 0.1
delay = 0.5
prj = sim.Projection(stim,
                     p,
                     sim.AllToAllConnector(),
                     sim.StaticSynapse(weight=weight, delay=delay),
                     receptor_type='excitatory')

stim.record('spikes')
p.record('v')

sim.run(t_stop)

nrn_data = p.get_data().segments[0]
stim_data = stim.get_data().segments[0]

print("Expected spike count: {}".format(t_stop * rate / 1000))
print("Actual spike count: {}".format(stim.mean_spike_count()))

Figure(
예제 #16
0
 def test_create_with_fast_synapse_dynamics(self):
     prj = sim.Projection(self.p1, self.p2, self.all2all,
                          synapse_type=sim.TsodyksMarkramSynapse())
예제 #17
0
 def test_create_simple(self):
     prj = sim.Projection(self.p1, self.p2, self.all2all, self.syn2)
예제 #18
0
#seeds = numpy.arange(numberOfNodes) + int((time.time()*100)%2**32)
# seeds which are same every time, different for each node
seeds = numpy.arange(numberOfNodes)

# bcast, as we can't be sure each node has the same time, and therefore
# different seeds.  This way, all nodes get the list from rank=0.
seeds = MPI.COMM_WORLD.bcast(seeds)

#rng = NumpyRNG(seed=seeds[rank], parallel_safe=False, rank=rank,
#               num_processes=numberOfNodes)

#nest.SetKernelStatus({'rng_seeds': list(seeds)})

myconn = sim.OneToOneConnector(weights=globalWeight, delays=dt)

prjE_E = sim.Projection(poissonE_E, popE, method=myconn, target='excitatory')
prjE_I = sim.Projection(poissonE_I, popI, method=myconn, target='excitatory')

prjI_E = sim.Projection(poissonI_E, popE, method=myconn, target='inhibitory')
prjI_I = sim.Projection(poissonI_I, popI, method=myconn, target='inhibitory')

## Record the spikes ##
popE.record(to_file=False)
popI.record(to_file=False)
printTimer("Time for setup part")


###################### RUN PART ###########################
## Run the simulation without inter-connection ##
printMessage("Now running without inter-lattice connections.")
sim.run(int(tinit))
예제 #19
0
def run(plot_and_show=True):
    import sys
    from os.path import abspath, realpath, join
    import numpy
    import nineml

    root = abspath(join(realpath(nineml.__path__[0]), "../../.."))
    sys.path.append(join(root, "lib9ml/python/examples/AL"))
    sys.path.append(join(root, "code_generation/nmodl"))
    sys.path.append(join(root, "code_generation/nest2"))

    #from nineml.abstraction_layer.example_models import  get_hierachical_iaf_3coba
    from nineml.abstraction_layer.testing_utils import TestableComponent
    from nineml.abstraction_layer.flattening import ComponentFlattener

    import pyNN.neuron as sim
    import pyNN.neuron.nineml as pyNNml

    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)

    #test_component = get_hierachical_iaf_3coba()
    test_component = TestableComponent('hierachical_iaf_3coba')()

    from nineml.abstraction_layer.writers import DotWriter
    DotWriter.write(test_component, 'test1.dot')

    from nineml.abstraction_layer.writers import XMLWriter
    XMLWriter.write(test_component, 'iaf_3coba.xml')

    celltype_cls = pyNNml.nineml_celltype_from_model(
        name="iaf_3coba",
        nineml_model=test_component,
        synapse_components=[
            pyNNml.CoBaSyn(namespace='AMPA', weight_connector='q'),
            pyNNml.CoBaSyn(namespace='GABAa', weight_connector='q'),
            pyNNml.CoBaSyn(namespace='GABAb', weight_connector='q'),
        ])

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'AMPA.tau': 2.0,
        'GABAa.tau': 5.0,
        'GABAb.tau': 50.0,
        'AMPA.vrev': 0.0,
        'GABAa.vrev': -70.0,
        'GABAb.vrev': -95.0,
    }

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)

    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99)  # neuron not refractory at start
    cells.initialize('regime', 1002)  # temporary hack

    input = sim.Population(3, sim.SpikeSourceArray)

    numpy.random.seed(12345)
    input[0].spike_times = numpy.add.accumulate(
        numpy.random.exponential(1000.0 / 100.0, size=1000))
    input[1].spike_times = numpy.add.accumulate(
        numpy.random.exponential(1000.0 / 20.0, size=1000))
    input[2].spike_times = numpy.add.accumulate(
        numpy.random.exponential(1000.0 / 50.0, size=1000))

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)

    conn = [
        sim.Projection(input[0:1], cells, connector, target='AMPA'),
        sim.Projection(input[1:2], cells, connector, target='GABAa'),
        sim.Projection(input[2:3], cells, connector, target='GABAb')
    ]

    cells._record('iaf_V')
    cells._record('AMPA_g')
    cells._record('GABAa_g')
    cells._record('GABAb_g')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V",
                                   filter=[cells[0]])
    cells.recorders['AMPA_g'].write("Results/nineml_neuron.g_exc",
                                    filter=[cells[0]])
    cells.recorders['GABAa_g'].write("Results/nineml_neuron.g_gabaA",
                                     filter=[cells[0]])
    cells.recorders['GABAb_g'].write("Results/nineml_neuron.g_gagaB",
                                     filter=[cells[0]])

    t = cells.recorders['iaf_V'].get()[:, 1]
    v = cells.recorders['iaf_V'].get()[:, 2]
    gInhA = cells.recorders['GABAa_g'].get()[:, 2]
    gInhB = cells.recorders['GABAb_g'].get()[:, 2]
    gExc = cells.recorders['AMPA_g'].get()[:, 2]

    if plot_and_show:
        import pylab
        pylab.subplot(211)
        pylab.plot(t, v)
        pylab.ylabel('voltage [mV]')
        pylab.suptitle("AMPA, GABA_A, GABA_B")
        pylab.subplot(212)
        pylab.plot(t, gInhA, label='GABA_A')
        pylab.plot(t, gInhB, label='GABA_B')
        pylab.plot(t, gExc, label='AMPA')
        pylab.ylabel('conductance [nS]')
        pylab.xlabel('t [ms]')
        pylab.legend()

        pylab.show()

    sim.end()
예제 #20
0
    'GABAb.vrev': -95.0,
}

parameters = ModelToSingleComponentReducer.flatten_namespace_dict(parameters)

cells = sim.Population(1, celltype_cls, parameters)
cells.initialize('iaf_V', parameters['iaf_vrest'])
cells.initialize('tspike', -1e99)  # neuron not refractory at start
cells.initialize('regime', 1002)  # temporary hack

input = sim.Population(3, sim.SpikeSourcePoisson, {'rate': 100})

connector = sim.OneToOneConnector(weights=1.0, delays=0.5)

conn = [
    sim.Projection(input[0:1], cells, connector, target='AMPA'),
    sim.Projection(input[1:2], cells, connector, target='GABAa'),
    sim.Projection(input[2:3], cells, connector, target='GABAb')
]

cells._record('iaf_V')
cells._record('AMPA_g')
cells._record('GABAa_g')
cells._record('GABAb_g')
cells.record()

sim.run(100.0)

cells.recorders['iaf_V'].write("Results/nineml_neuron.V", filter=[cells[0]])
cells.recorders['AMPA_g'].write("Results/nineml_neuron.g_exc",
                                filter=[cells[0]])
예제 #21
0
parameters = ComponentFlattener.flatten_namespace_dict(parameters)


cells = sim.Population(1, celltype_cls, parameters)

cells.initialize('iaf_V', parameters['iaf_vrest'])
cells.initialize('tspike', -1e99)  # neuron not refractory at start
cells.initialize('regime', 1002)  # temporary hack

input = sim.Population(1, sim.SpikeSourcePoisson, {'rate': 100})

connector = sim.OneToOneConnector(weights=1.0, delays=0.5)


conn = [
    sim.Projection(input[0:1], cells, connector, target='nmda'),
    sim.Projection(input[0:1], cells, connector, target='cobaExcit'),
]


cells._record('iaf_V')
cells._record('nmda_g')
cells._record('cobaExcit_g')
cells.record()

sim.run(100.0)

cells.recorders['iaf_V'].write("Results/nineml_neuron.V", filter=[cells[0]])
cells.recorders['nmda_g'].write("Results/nineml_neuron.g_nmda", filter=[cells[0]])
cells.recorders['cobaExcit_g'].write("Results/nineml_neuron.g_cobaExcit", filter=[cells[0]])
예제 #22
0
def sim_runner(wgf):
    wg = wgf

    import pyNN.neuron as sim
    nproc = sim.num_processes()
    node = sim.rank()
    print(nproc)
    import matplotlib
    matplotlib.use('Agg')

    import matplotlib.pyplot as plt
    import matplotlib as mpl
    mpl.rcParams.update({'font.size':16})

    #import mpi4py
    #threads  = sim.rank()
    threads = 1
    rngseed  = 98765
    parallel_safe = False
    #extra = {'threads' : threads}
    import os
    import pandas as pd
    import sys
    import numpy as np
    from pyNN.neuron import STDPMechanism
    import copy
    from pyNN.random import RandomDistribution, NumpyRNG
    import pyNN.neuron as neuron
    from pyNN.neuron import h
    from pyNN.neuron import StandardCellType, ParameterSpace
    from pyNN.random import RandomDistribution, NumpyRNG
    from pyNN.neuron import STDPMechanism, SpikePairRule, AdditiveWeightDependence, FromListConnector, TsodyksMarkramSynapse
    from pyNN.neuron import Projection, OneToOneConnector
    from numpy import arange
    import pyNN
    from pyNN.utility import get_simulator, init_logging, normalized_filename
    import random
    import socket
    #from neuronunit.optimization import get_neab
    import networkx as nx
    sim = pyNN.neuron

    # Get some hippocampus connectivity data, based on a conversation with
    # academic researchers on GH:
    # https://github.com/Hippocampome-Org/GraphTheory/issues?q=is%3Aissue+is%3Aclosed
    # scrape hippocamome connectivity data, that I intend to use to program neuromorphic hardware.
    # conditionally get files if they don't exist.


    path_xl = '_hybrid_connectivity_matrix_20171103_092033.xlsx'
    if not os.path.exists(path_xl):
        os.system('wget https://github.com/Hippocampome-Org/GraphTheory/files/1657258/_hybrid_connectivity_matrix_20171103_092033.xlsx')

    xl = pd.ExcelFile(path_xl)
    dfEE = xl.parse()
    dfEE.loc[0].keys()
    dfm = dfEE.as_matrix()

    rcls = dfm[:,:1] # real cell labels.
    rcls = rcls[1:]
    rcls = { k:v for k,v in enumerate(rcls) } # real cell labels, cast to dictionary
    import pickle
    with open('cell_names.p','wb') as f:
        pickle.dump(rcls,f)
    import pandas as pd
    pd.DataFrame(rcls).to_csv('cell_names.csv', index=False)

    filtered = dfm[:,3:]
    filtered = filtered[1:]
    rng = NumpyRNG(seed=64754)
    delay_distr = RandomDistribution('normal', [2, 1e-1], rng=rng)
    weight_distr = RandomDistribution('normal', [45, 1e-1], rng=rng)


    sanity_e = []
    sanity_i = []

    EElist = []
    IIlist = []
    EIlist = []
    IElist = []

    for i,j in enumerate(filtered):
      for k,xaxis in enumerate(j):
        if xaxis == 1 or xaxis == 2:
          source = i
          sanity_e.append(i)
          target = k

        if xaxis ==-1 or xaxis == -2:
          sanity_i.append(i)
          source = i
          target = k

    index_exc = list(set(sanity_e))
    index_inh = list(set(sanity_i))
    import pickle
    with open('cell_indexs.p','wb') as f:
        returned_list = [index_exc, index_inh]
        pickle.dump(returned_list,f)

    import numpy
    a = numpy.asarray(index_exc)
    numpy.savetxt('pickles/'+str(k)+'excitatory_nunber_labels.csv', a, delimiter=",")
    import numpy
    a = numpy.asarray(index_inh)
    numpy.savetxt('pickles/'+str(k)+'inhibitory_nunber_labels.csv', a, delimiter=",")

    for i,j in enumerate(filtered):
      for k,xaxis in enumerate(j):
        if xaxis==1 or xaxis == 2:
          source = i
          sanity_e.append(i)
          target = k
          delay = delay_distr.next()
          weight = 1.0
          if target in index_inh:
             EIlist.append((source,target,delay,weight))
          else:
             EElist.append((source,target,delay,weight))

        if xaxis==-1 or xaxis == -2:
          sanity_i.append(i)

          source = i
          target = k
          delay = delay_distr.next()
          weight = 1.0
          if target in index_exc:
              IElist.append((source,target,delay,weight))
          else:
              IIlist.append((source,target,delay,weight))


    internal_conn_ee = sim.FromListConnector(EElist)
    ee = internal_conn_ee.conn_list

    ee_srcs = ee[:,0]
    ee_tgs = ee[:,1]

    internal_conn_ie = sim.FromListConnector(IElist)
    ie = internal_conn_ie.conn_list
    ie_srcs = set([ int(e[0]) for e in ie ])
    ie_tgs = set([ int(e[1]) for e in ie ])

    internal_conn_ei = sim.FromListConnector(EIlist)
    ei = internal_conn_ei.conn_list
    ei_srcs = set([ int(e[0]) for e in ei ])
    ei_tgs = set([ int(e[1]) for e in ei ])

    internal_conn_ii = sim.FromListConnector(IIlist)
    ii = internal_conn_ii.conn_list
    ii_srcs = set([ int(e[0]) for e in ii ])
    ii_tgs = set([ int(e[1]) for e in ii ])

    for e in internal_conn_ee.conn_list:
        assert e[0] in ee_srcs
        assert e[1] in ee_tgs

    for i in internal_conn_ii.conn_list:
        assert i[0] in ii_srcs
        assert i[1] in ii_tgs


    ml = len(filtered[1])+1
    pre_exc = []
    post_exc = []
    pre_inh = []
    post_inh = []


    rng = NumpyRNG(seed=64754)
    delay_distr = RandomDistribution('normal', [2, 1e-1], rng=rng)

    plot_EE = np.zeros(shape=(ml,ml), dtype=bool)
    plot_II = np.zeros(shape=(ml,ml), dtype=bool)
    plot_EI = np.zeros(shape=(ml,ml), dtype=bool)
    plot_IE = np.zeros(shape=(ml,ml), dtype=bool)

    for i in EElist:
        plot_EE[i[0],i[1]] = int(0)
        #plot_ss[i[0],i[1]] = int(1)

        if i[0]!=i[1]: # exclude self connections
            plot_EE[i[0],i[1]] = int(1)

            pre_exc.append(i[0])
            post_exc.append(i[1])



    assert len(pre_exc) == len(post_exc)
    for i in IIlist:
        plot_II[i[0],i[1]] = int(0)
        if i[0]!=i[1]:
            plot_II[i[0],i[1]] = int(1)
            pre_inh.append(i[0])
            post_inh.append(i[1])

    for i in IElist:
        plot_IE[i[0],i[1]] = int(0)
        if i[0]!=i[1]: # exclude self connections
            plot_IE[i[0],i[1]] = int(1)
            pre_inh.append(i[0])
            post_inh.append(i[1])

    for i in EIlist:
        plot_EI[i[0],i[1]] = int(0)
        if i[0]!=i[1]:
            plot_EI[i[0],i[1]] = int(1)
            pre_exc.append(i[0])
            post_exc.append(i[1])

    plot_excit = plot_EI + plot_EE
    plot_inhib = plot_IE + plot_II

    assert len(pre_inh) == len(post_inh)

    num_exc = [ i for i,e in enumerate(plot_excit) if sum(e) > 0 ]
    num_inh = [ y for y,i in enumerate(plot_inhib) if sum(i) > 0 ]

    # the network is dominated by inhibitory neurons, which is unusual for modellers.
    assert num_inh > num_exc
    assert np.sum(plot_inhib) > np.sum(plot_excit)
    assert len(num_exc) < ml
    assert len(num_inh) < ml
    # # Plot all the Projection pairs as a connection matrix (Excitatory and Inhibitory Connections)

    import pickle
    with open('graph_inhib.p','wb') as f:
       pickle.dump(plot_inhib,f, protocol=2)


    import pickle
    with open('graph_excit.p','wb') as f:
       pickle.dump(plot_excit,f, protocol=2)


    #with open('cell_names.p','wb') as f:
    #    pickle.dump(rcls,f)
    import pandas as pd
    pd.DataFrame(plot_EE).to_csv('ee.csv', index=False)

    import pandas as pd
    pd.DataFrame(plot_IE).to_csv('ie.csv', index=False)

    import pandas as pd
    pd.DataFrame(plot_II).to_csv('ii.csv', index=False)

    import pandas as pd
    pd.DataFrame(plot_EI).to_csv('ei.csv', index=False)


    from scipy.sparse import coo_matrix
    m = np.matrix(filtered[1:])

    bool_matrix = np.add(plot_excit,plot_inhib)
    with open('bool_matrix.p','wb') as f:
       pickle.dump(bool_matrix,f, protocol=2)

    if not isinstance(m, coo_matrix):
        m = coo_matrix(m)

    Gexc_ud = nx.Graph(plot_excit)
    avg_clustering = nx.average_clustering(Gexc_ud)#, nodes=None, weight=None, count_zeros=True)[source]

    rc = nx.rich_club_coefficient(Gexc_ud,normalized=False)
    print('This graph structure as rich as: ',rc[0])
    gexc = nx.DiGraph(plot_excit)

    gexcc = nx.betweenness_centrality(gexc)
    top_exc = sorted(([ (v,k) for k, v in dict(gexcc).items() ]), reverse=True)

    in_degree = gexc.in_degree()
    top_in = sorted(([ (v,k) for k, v in in_degree.items() ]))
    in_hub = top_in[-1][1]
    out_degree = gexc.out_degree()
    top_out = sorted(([ (v,k) for k, v in out_degree.items() ]))
    out_hub = top_out[-1][1]
    mean_out = np.mean(list(out_degree.values()))
    mean_in = np.mean(list(in_degree.values()))

    mean_conns = int(mean_in + mean_out/2)

    k = 2 # number of neighbouig nodes to wire.
    p = 0.25 # probability of instead wiring to a random long range destination.
    ne = len(plot_excit)# size of small world network
    small_world_ring_excit = nx.watts_strogatz_graph(ne,mean_conns,0.25)



    k = 2 # number of neighbouring nodes to wire.
    p = 0.25 # probability of instead wiring to a random long range destination.
    ni = len(plot_inhib)# size of small world network
    small_world_ring_inhib   = nx.watts_strogatz_graph(ni,mean_conns,0.25)


    nproc = sim.num_processes()
    nproc = 8
    host_name = socket.gethostname()
    node_id = sim.setup(timestep=0.01, min_delay=1.0)#, **extra)
    print("Host #%d is on %s" % (node_id + 1, host_name))
    rng = NumpyRNG(seed=64754)

    #pop_size = len(num_exc)+len(num_inh)
    #num_exc = [ i for i,e in enumerate(plot_excit) if sum(e) > 0 ]
    #num_inh = [ y for y,i in enumerate(plot_inhib) if sum(i) > 0 ]
    #pop_exc =  sim.Population(len(num_exc), sim.Izhikevich(a=0.02, b=0.2, c=-65, d=8, i_offset=0))
    #pop_inh = sim.Population(len(num_inh), sim.Izhikevich(a=0.02, b=0.25, c=-65, d=2, i_offset=0))


    #index_exc = list(set(sanity_e))
    #index_inh = list(set(sanity_i))
    all_cells = sim.Population(len(index_exc)+len(index_inh), sim.Izhikevich(a=0.02, b=0.2, c=-65, d=8, i_offset=0))
    #all_cells = None
    #all_cells = pop_exc + pop_inh
    pop_exc = sim.PopulationView(all_cells,index_exc)
    pop_inh = sim.PopulationView(all_cells,index_inh)
    #print(pop_exc)
    #print(dir(pop_exc))
    for pe in pop_exc:
        print(pe)
        #import pdb
        pe = all_cells[pe]
        #pdb.set_trace()
        #pe = all_cells[i]
        r = random.uniform(0.0, 1.0)
        pe.set_parameters(a=0.02, b=0.2, c=-65+15*r, d=8-r**2, i_offset=0)
        #pop_exc.append(pe)

    #pop_exc = sim.Population(pop_exc)
    for pi in index_inh:
        pi = all_cells[pi]
        #print(pi)
        #pi = all_cells[i]
        r = random.uniform(0.0, 1.0)
        pi.set_parameters(a=0.02+0.08*r, b=0.25-0.05*r, c=-65, d= 2, i_offset=0)
        #pop_inh.append(pi)
    #pop_inh = sim.Population(pop_inh)

    '''
    for pe in pop_exc:
        r = random.uniform(0.0, 1.0)
        pe.set_parameters(a=0.02, b=0.2, c=-65+15*r, d=8-r**2, i_offset=0)

    for pi in pop_inh:
        r = random.uniform(0.0, 1.0)
        pi.set_parameters(a=0.02+0.08*r, b=0.25-0.05*r, c=-65, d= 2, i_offset=0)
    '''
    NEXC = len(num_exc)
    NINH = len(num_inh)

    exc_syn = sim.StaticSynapse(weight = wg, delay=delay_distr)
    assert np.any(internal_conn_ee.conn_list[:,0]) < ee_srcs.size
    prj_exc_exc = sim.Projection(all_cells, all_cells, internal_conn_ee, exc_syn, receptor_type='excitatory')
    prj_exc_inh = sim.Projection(all_cells, all_cells, internal_conn_ei, exc_syn, receptor_type='excitatory')
    inh_syn = sim.StaticSynapse(weight = wg, delay=delay_distr)
    delay_distr = RandomDistribution('normal', [1, 100e-3], rng=rng)
    prj_inh_inh = sim.Projection(all_cells, all_cells, internal_conn_ii, inh_syn, receptor_type='inhibitory')
    prj_inh_exc = sim.Projection(all_cells, all_cells, internal_conn_ie, inh_syn, receptor_type='inhibitory')
    inh_distr = RandomDistribution('normal', [1, 2.1e-3], rng=rng)


    def prj_change(prj,wg):
        prj.setWeights(wg)
    prj_change(prj_exc_exc,wg)
    prj_change(prj_exc_inh,wg)
    prj_change(prj_inh_exc,wg)
    prj_change(prj_inh_inh,wg)

    def prj_check(prj):
        for w in prj.weightHistogram():
            for i in w:
                print(i)
    prj_check(prj_exc_exc)
    prj_check(prj_exc_inh)
    prj_check(prj_inh_exc)
    prj_check(prj_inh_inh)

    #print(rheobase['value'])
    #print(float(rheobase['value']),1.25/1000.0)
    '''Old values that worked
    noise = sim.NoisyCurrentSource(mean=0.85/1000.0, stdev=5.00/1000.0, start=0.0, stop=2000.0, dt=1.0)
    pop_exc.inject(noise)
    #1000.0 pA


    noise = sim.NoisyCurrentSource(mean=1.740/1000.0, stdev=5.00/1000.0, start=0.0, stop=2000.0, dt=1.0)
    pop_inh.inject(noise)
    #1750.0 pA
    '''

    noise = sim.NoisyCurrentSource(mean=0.74/1000.0, stdev=4.00/1000.0, start=0.0, stop=2000.0, dt=1.0)
    pop_exc.inject(noise)
    #1000.0 pA


    noise = sim.NoisyCurrentSource(mean=1.440/1000.0, stdev=4.00/1000.0, start=0.0, stop=2000.0, dt=1.0)
    pop_inh.inject(noise)

    ##
    # Setup and run a simulation. Note there is no current injection into the neuron.
    # All cells in the network are in a quiescent state, so its not a surprise that xthere are no spikes
    ##

    sim = pyNN.neuron
    arange = np.arange
    import re
    all_cells.record(['v','spikes'])  # , 'u'])
    all_cells.initialize(v=-65.0, u=-14.0)
    # === Run the simulation =====================================================
    tstop = 2000.0
    sim.run(tstop)
    data = None
    data = all_cells.get_data().segments[0]

    #print(len(data.analogsignals[0].times))
    with open('pickles/qi'+str(wg)+'.p', 'wb') as f:
        pickle.dump(data,f)
    # make data none or else it will grow in a loop
    all_cells = None
    data = None
    noise = None
예제 #23
0
 def testOneToOne(self):
     """For all connections created with "OneToOne" ..."""
     prj = neuron.Projection(self.source33, self.target33, neuron.OneToOneConnector())
     assert len(prj.connections) == len(self.target33.local_cells), prj.connections
예제 #24
0
    "e_e": 0.0,
    "e_i": -75.0,
}

sim.setup()

p0 = sim.Population(1, sim.SpikeSourcePoisson, {'rate': 100.0})
p1 = sim.Population(10, TestCell, cell_params)
p2 = sim.Population(10, sim.IF_cond_exp)

p1.record_v(1)
p1.record()
p2.record_v(1)

#curr = sim.DCSource()
#curr.inject_into(p1)

prj01 = sim.Projection(p0, p1, sim.AllToAllConnector())
prj12 = sim.Projection(p1, p2, sim.FixedProbabilityConnector(0.5))

prj01.setWeights(0.1)
prj12.setWeights(0.1)

sim.run(1000.0)

t,v = p1.get_v()[:, 1:3].T
#print p2.get_v()
import pylab
pylab.rcParams['interactive'] = True

pylab.plot(t, v)
예제 #25
0

stdp = sim.STDPMechanism(weight_dependence=sim.AdditiveWeightDependence(w_min=0.0, w_max=0.1),
                         timing_dependence=sim.Vogels2011Rule(eta=0.0, rho=1e-3),
                         weight=0.005, delay=0.5)





fpc 	= sim.FixedProbabilityConnector(0.02, rng=NumpyRNG(seed=854))



connections = sim.Projection(input, output, fpc,
                             synapse_type=stdp,
                             receptor_type='excitatory')


connections.set(eta=0.0003)


output.record(['spikes', 'v'])

sim.run(simTimeFin - simTimeIni)


print("\n\nETA: input to output:")
print connections.get('eta', format='list')

예제 #26
0
 def testSettingDelays(self):
     """Delays should be set correctly when using a Connector object."""
     for srcP in [self.source5, self.source22]:
         for tgtP in [self.target6, self.target33]:
             prj1 = neuron.Projection(srcP, tgtP, neuron.AllToAllConnector(delays=0.321))
             assert prj1.connections[0].nc.delay == 0.321, "Delay should be 0.321, actually %g" % prj1.connections[0].nc.delay
import pyNN.neuron as sim  # can of course replace `neuron` with `nest`, `brian`, etc.
import matplotlib.pyplot as plt
import numpy as np

sim.setup(timestep=0.01)
p_in = sim.Population(10, sim.SpikeSourcePoisson(rate=10.0), label="input")
p_out = sim.Population(10, sim.EIF_cond_exp_isfa_ista(), label="AdExp neurons")

syn = sim.StaticSynapse(weight=0.05)
random = sim.FixedProbabilityConnector(p_connect=0.5)
connections = sim.Projection(p_in, p_out, random, syn, receptor_type='excitatory')

p_in.record('spikes')
p_out.record('spikes')                    # record spikes from all neurons
p_out[0:2].record(['v', 'w', 'gsyn_exc'])  # record other variables from first two neurons

sim.run(500.0)

spikes_in = p_in.get_data()
data_out = p_out.get_data()

fig_settings = {
    'lines.linewidth': 0.5,
    'axes.linewidth': 0.5,
    'axes.labelsize': 'small',
    'legend.fontsize': 'small',
    'font.size': 8
}
plt.rcParams.update(fig_settings)
plt.figure(1, figsize=(6, 8))
예제 #28
0

# PyNN/NineML simulation

sim.setup(timestep=dt)

celltype = Dynamics(name='iaf',
                    subnodes={'nrn': read("../sources/BrunelIaF.xml")['BrunelIaF'],
                              'syn': read("../sources/AlphaPSR.xml")['AlphaPSR']})
celltype.connect_ports('syn.i_synaptic', 'nrn.i_synaptic')

p = sim.Population(2, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**cell_parameters))
stim = sim.Population(1, sim.SpikeSourceArray(spike_times=spike_times))

prj = sim.Projection(stim, p,
                     sim.AllToAllConnector(),
                     sim.StaticSynapse(weight=w_eff, delay=delay),
                     receptor_type='syn')

p.record(['nrn_v', 'syn_a', 'syn_b'])

sim.run(t_stop)

nrn_data = p.get_data().segments[0]

expected = np.zeros((1 + int(round(t_stop/dt)),))
tau_syn = cell_parameters["syn_tau"]
tp = np.arange(0, t_stop - spike_times[0] - delay, dt)/tau_syn
expected[1 + int(round((spike_times[0] + delay)/dt)):] = w_eff * tp * np.exp(-tp)

synaptic_current = nrn_data.filter(name='syn_a')[0]
# for convenience of plotting, we overwrite the synaptic current recorded from the second neuron
예제 #29
0
ii_tgs = all_cells[list(ii_tgs)]
ee_srcs = all_cells[list(ee_srcs)]
ee_tgs = all_cells[list(ee_tgs)]
ei_srcs = all_cells[list(ei_srcs)]
ei_tgs = all_cells[list(ei_tgs)]
ie_srcs = all_cells[list(ie_srcs)]
ie_tgs = all_cells[list(ie_tgs)]

exc_distr = RandomDistribution('normal', [3.125, 10e-2], rng=rng)
exc_syn = sim.StaticSynapse(weight=exc_distr, delay=delay_distr)

#if numpy.any(self.conn_list[:, 0] >= projection.pre.size):
assert np.any(internal_conn_ee.conn_list[:, 0]) < ee_srcs.size
prj_exc_exc = sim.Projection(all_cells,
                             all_cells,
                             internal_conn_ee,
                             exc_syn,
                             receptor_type='excitatory')

inh_distr = RandomDistribution('normal', [5, 2.1e-4], rng=rng)
inh_syn = sim.StaticSynapse(weight=inh_distr, delay=delay_distr)

iis = all_cells[[e[0] for e in IIlist]]
iit = all_cells[[e[1] for e in IIlist]]

rng = NumpyRNG(seed=64754)
delay_distr = RandomDistribution('normal', [50, 100e-3], rng=rng)
prj_inh_inh = sim.Projection(all_cells,
                             all_cells,
                             internal_conn_ii,
                             inh_syn,
예제 #30
0
def std_pynn_simulation(test_component, parameters, initial_values,
                        synapse_components, records, plot=True, sim_time=100.,
                        synapse_weights=1.0, syn_input_rate=100):

    from nineml.abstraction_layer.flattening import ComponentFlattener

    import pyNN.neuron as sim
    import pyNN.neuron.nineml as pyNNml
    from pyNN.neuron.nineml import CoBaSyn

    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.01, min_delay=0.1)

    synapse_components_ML = [CoBaSyn(namespace=ns,  weight_connector=wc)
                             for (ns, wc) in synapse_components]

    celltype_cls = pyNNml.nineml_celltype_from_model(
        name=test_component.name,
        nineml_model=test_component,
        synapse_components=synapse_components_ML,
    )

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)
    initial_values = ComponentFlattener.flatten_namespace_dict(initial_values)

    cells = sim.Population(1, celltype_cls, parameters)

    # Set Initial Values:
    for state, state_initial_value in initial_values.iteritems():
        cells.initialize(state, state_initial_value)

    # For each synapse type, create a spike source:
    if synapse_components:
        input = sim.Population(
            len(synapse_components), sim.SpikeSourcePoisson,
            {'rate': syn_input_rate})
        connector = sim.OneToOneConnector(weights=synapse_weights, delays=0.5)

        conn = []
        for i, (ns, weight_connector) in enumerate(synapse_components):
            proj = sim.Projection(input[i:i + 1], cells, connector, target=ns),
            conn.append(proj)

    # Setup the Records:
    for record in records:
        cells.record(record.what)

    cells.record('spikes')

    # Run the simulation:
    sim.run(sim_time)

    if len(records) == 0:
        assert False

    # Write the Results to a file:
    cells.write_data("Results/nineml.pkl")

    # Plot the values:

    results = cells.get_data().segments[0]

    # Create a list of the tags:
    tags = []
    for record in records:
        if not record.tag in tags:
            tags.append(record.tag)

    # Plot the graphs:
    if plot:
        import pylab
        nGraphs = len(tags)

        # Plot the Records:
        for graphIndex, tag in enumerate(tags):
            pylab.subplot(nGraphs, 1, graphIndex + 1)

            for r in records:
                if r.tag != tag:
                    continue
                trace = results.filter(name=r.what)[0]
                pylab.plot(trace.times, trace, label=r.label)

            pylab.ylabel(tag)
            pylab.legend()

        # Plot the spikes:
        # pylab.subplot(nGraphs,1, len(tags)+1)
        # t_spikes = cells[0:1].getSpikes()[:1]
        # pylab.plot( [1,3],[1,3],'x'  )
        # print t_spikes
        # if t_spikes:
        #    pylab.scatter( t_spikes, t_spikes )

        # Add the X axis to the last plot:
        pylab.xlabel('t [ms]')

        # pylab.suptitle("From Tree-Model Pathway")
        pylab.show()

    sim.end()

    return results