def build_network(sim, order=1000, epsilon=0.1, delay=1.5, J=0.1, theta=20.0, tau=20.0, tau_syn=0.1, tau_refrac=2.0, v_reset=10.0, R=1.5, g=5, eta=2, seed=None): NE = 4 * order NI = 1 * order CE = int(epsilon * NE) # number of excitatory synapses per neuron CI = int(epsilon * NI) # number of inhibitory synapses per neuron CMem = tau/R J_unit = psp_height(tau, R, tau_syn) J_ex = J / J_unit J_in = -g * J_ex nu_th = theta / (J_ex * CE * R * tau_syn) nu_ex = eta * nu_th p_rate = 1000.0 * nu_ex * CE assert seed is not None rng = NumpyRNG(seed) neuron_params = { "nrn_tau": tau, "nrn_v_threshold": theta, "nrn_refractory_period": tau_refrac, "nrn_v_reset": v_reset, "nrn_R": R, "syn_tau": tau_syn } celltype = Dynamics(name='iaf', subnodes={'nrn': read("sources/BrunelIaF.xml")['BrunelIaF'], 'syn': read("sources/AlphaPSR.xml")['AlphaPSR']}) celltype.connect_ports('syn.i_synaptic', 'nrn.i_synaptic') exc = sim.Population(NE, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**neuron_params)) inh = sim.Population(NI, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**neuron_params)) all = exc + inh all.initialize(v=RandomDistribution('uniform', (0.0, theta), rng=rng)) stim = sim.Population(NE + NI, nineml_cell_type('Poisson', read("sources/Poisson.xml")['Poisson'], {})(rate=p_rate)) print("Connecting network") exc_synapse = sim.StaticSynapse(weight=J_ex, delay=delay) inh_synapse = sim.StaticSynapse(weight=J_in, delay=delay) input_connections = sim.Projection(stim, all, sim.OneToOneConnector(), exc_synapse, receptor_type="syn") exc_connections = sim.Projection(exc, all, sim.FixedNumberPreConnector(n=CE), exc_synapse, receptor_type="syn") # check is Pre not Post inh_connections = sim.Projection(inh, all, sim.FixedNumberPreConnector(n=CI), inh_synapse, receptor_type="syn") return stim, exc, inh
def test_write_read_roundtrip(self): for version in (1.0, 2.0): if version == 1.0: docs = v1_safe_docs else: docs = list(instances_of_all_types['NineML'].values()) for format in format_to_serializer: # @ReservedAssignment try: ext = format_to_ext[format] except KeyError: continue # ones that can't be written to file (e.g. dict) for i, document in enumerate(docs): try: doc = document.clone() except: document.clone() raise url = os.path.join( self._tmp_dir, 'test{}v{}{}'.format(i, version, ext)) nineml.write(url, doc, format=format, version=version, indent=2) if self.print_serialized and format in self.printable: with open(url) as f: print(f.read()) reread_doc = nineml.read(url, reload=True) self.assertTrue(doc.equals(reread_doc), doc.find_mismatch(reread_doc))
def function(): for version in (1.0, 2.0): if version == 1.0: docs = v1_safe_docs else: docs = list(instances_of_all_types['NineML'].values()) for format in format_to_serializer: # @ReservedAssignment try: ext = format_to_ext[format] except KeyError: continue # ones that can't be written to file (e.g. dict) for i, document in enumerate(docs): doc = document.clone() url = os.path.join(_tmp_dir, 'test{}v{}{}'.format(i, version, ext)) nineml.write(url, doc, format=format, version=version, indent=2) if print_serialized and format in printable: with open(url) as f: print(f.read()) reread_doc = nineml.read(url, reload=True) # @UnusedVariable shutil.rmtree(_tmp_dir)
def _ref_network(self, simulator, external_input=None, **kwargs): if simulator == 'nest': NetworkClass = NetworkNEST Simulation = NESTSimulation elif simulator == 'neuron': NetworkClass = NetworkNEURON Simulation = NeuronSimulation else: assert False model = nineml.read(self.reduced_brunel_path).as_network( 'ReducedBrunel') with Simulation(dt=self.dt * un.ms, seed=self.seed, **model.delay_limits()) as sim: network = NetworkClass(model, **kwargs) if external_input is not None: network.component_array('Ext').play('spike_input__cell', external_input) for pop_name in self.recorded_pops: network.component_array(pop_name).record('spike_output') sim.run(self.t_stop * un.ms) recordings = {} for pop_name in self.recorded_pops: recordings[pop_name] = network.component_array(pop_name).recording( 'spike_output') return recordings
def test_load_and_validate_all(self): for p in self.iterate_xml_paths(os.path.join(self.repo_root, 'xml')): # Just check to see whether all elements of the document load # without error all_elems = list(nineml.read(p).elements)
def read(self, filename): document = nineml.read(filename) return Network( name='root', populations=dict((p.name, p) for p in document.populations), projections=dict((p.name, p) for p in document.projections), selections=dict((s.name, s) for s in document.selections))
def test_write_read_roundtrip(self): for version in (1.0, 2.0): if version == 1.0: docs = v1_safe_docs else: docs = list(instances_of_all_types['NineML'].values()) for format in format_to_serializer: # @ReservedAssignment try: ext = format_to_ext[format] except KeyError: continue # ones that can't be written to file (e.g. dict) for i, document in enumerate(docs): try: doc = document.clone() except: document.clone() raise url = os.path.join(self._tmp_dir, 'test{}v{}{}'.format(i, version, ext)) nineml.write(url, doc, format=format, version=version, indent=2) if self.print_serialized and format in self.printable: with open(url) as f: print(f.read()) reread_doc = nineml.read(url, reload=True) self.assertTrue(doc.equals(reread_doc), doc.find_mismatch(reread_doc))
def write_nmodl(nineml_file, weight_variables={}, hierarchical_mode=False): components = nineml.read(nineml_file) output_dir = os.path.dirname(nineml_file) basename = os.path.basename(nineml_file) if len(components) == 0: print 'No components found in file!' elif len(components) == 1: output_filename = basename.replace(".xml", ".mod").replace("-", "_") print "Converting %s to %s" % (nineml_file, output_filename) write_nmodldirect(component=components[0], mod_filename=os.path.join(output_dir, output_filename), weight_variables=weight_variables, hierarchical_mode=hierarchical_mode) else: for c in components.itervalues(): if isinstance(c, al.DynamicsClass): output_filename = basename.replace(".xml", "_%s.mod" % c.name).replace("-", "_") print "Converting %s to %s" % (nineml_file, output_filename) write_nmodldirect(component=c, mod_filename=os.path.join(output_dir, output_filename), weight_variables=weight_variables, hierarchical_mode=hierarchical_mode)
def test_convert_format(self): in_path = './' + os.path.join(os.path.relpath(ninemlcatalog.root), 'neuron', 'Izhikevich.xml') out_path = os.path.join(self.tmpdir, 'Izhikevich.yml') print(out_path) args = '{} {}'.format(in_path, out_path) convert.run(args.split()) # Check the output file is yaml with open(out_path) as f: contents = yaml.load(f) self.assertEqual(list(contents.keys()), [b'NineML']) # Check the converted document is equivalent in_doc = read(in_path) out_doc = read(out_path) in_doc._url = None out_doc._url = None self.assertEqual(in_doc, out_doc)
def test_convert_version(self): in_path = './' + os.path.join(os.path.relpath(ninemlcatalog.root), 'neuron', 'Izhikevich.xml') out_path = os.path.join(self.tmpdir, 'Izhikevich.xml') args = '--nineml_version 2 {} {}'.format(in_path, out_path) convert.run(args.split()) # Check the document has been written in version 2 format with open(out_path) as f: xml = etree.parse(f) root = xml.getroot() self.assertEqual(root.tag, '{http://nineml.net/9ML/2.0}NineML') # Check the converted document is equivalent in_doc = read(in_path) out_doc = read(out_path) in_doc._url = None out_doc._url = None self.assertEqual(in_doc, out_doc)
def test_to_xml(self): context = read(os.path.join(examples_dir, 'normal.xml')) comp_class = context['NormalDistribution'] xml = comp_class.to_xml() self.assertEquals(_Element, type(xml)) with tempfile.TemporaryFile() as f: ElementTree(xml).write(f, encoding="UTF-8", pretty_print=True, xml_declaration=True)
def test_to_xml(self): document = read(os.path.join(examples_dir, 'AllToAll.xml')) comp_class = document['AllToAll'] xml = comp_class.to_xml() self.assertEquals(_Element, type(xml)) with tempfile.TemporaryFile() as f: ElementTree(xml).write(f, encoding="UTF-8", pretty_print=True, xml_declaration=True)
def __init__(self, nineml_model, build_mode='lazy', **kwargs): if isinstance(nineml_model, basestring): nineml_model = nineml.read(nineml_model).as_network( name=os.path.splitext(os.path.basename(nineml_model))[0]) elif isinstance(nineml_model, Document): if nineml_model.url is not None: name = os.path.splitext(os.path.basename(nineml_model.url))[0] else: name = "Anonymous" nineml_model = nineml_model.as_network(name=name) self._nineml = nineml_model.clone() # Get RNG for random distribution values and connectivity rng = self.Simulation.active().properties_rng if build_mode != 'build_only': self.nineml.resample_connectivity( connectivity_class=self.ConnectivityClass, rng=rng) (flat_comp_arrays, flat_conn_groups, flat_selections) = self._flatten_to_arrays_and_conns(self._nineml) self._component_arrays = {} # Build the PyNN populations # Add build args to distinguish models built for this network as # opposed to other networks build_url = kwargs.pop('build_url', nineml_model.url) build_version = nineml_model.name + kwargs.pop('build_version', '') for name, comp_array in flat_comp_arrays.items(): self._component_arrays[name] = self.ComponentArrayClass( comp_array, build_mode=build_mode, build_url=build_url, build_version=build_version, **kwargs) self._selections = {} # Build the PyNN Selections for selection in flat_selections.values(): # TODO: Assumes that selections are only concatenations (which is # true for 9MLv1.0 but not v2.0) self._selections[selection.name] = self.SelectionClass( selection, *[self.component_array(p.name) for p in selection.populations]) if build_mode != 'build_only': # Set the connectivity objects of the projections to the # PyNNConnectivity class if self.nineml.connectivity_has_been_sampled(): raise Pype9RuntimeError( "Connections have already been sampled, please reset them" " using 'resample_connectivity' before constructing " "network") self._connection_groups = {} for name, conn_group in flat_conn_groups.items(): try: source = self._component_arrays[conn_group.source.name] except KeyError: source = self._selections[conn_group.source.name] try: destination = self._component_arrays[ conn_group.destination.name] except KeyError: destination = self._selections[conn_group.destination.name] self._connection_groups[name] = self.ConnectionGroupClass( conn_group, source=source, destination=destination) self._finalise_construction()
def test_serialization(self): for ext in ext_to_format: fname = mkstemp(suffix=ext)[1] try: self.d.write(fname, register=False, preserve_order=True) except NineMLSerializerNotImportedError: continue reread_d = nineml.read(fname + '#d') self._test_indices(reread_d)
def nineml_document(doc_path): if doc_path.startswith(CATALOG_PREFIX): model = ninemlcatalog.load(doc_path[len(CATALOG_PREFIX):]) else: if (not doc_path.startswith('/') and not doc_path.startswith('./') and not doc_path.startswith('../')): doc_path = './' + doc_path model = nineml.read(doc_path, relative_to=os.getcwd()) return model
def read(self, filename): document = nineml.read(filename) return Network(name='root', populations=dict( (p.name, p) for p in document.populations), projections=dict( (p.name, p) for p in document.projections), selections=dict( (s.name, s) for s in document.selections))
def load(path, name=None): """ Retrieves a model from the catalog from the given path """ doc = nineml.read(get_full_path(path)) if name is not None: elem = doc[name] else: elem = doc return elem
def test_url_resolution(self): tmp_dir = tempfile.mkdtemp() os.chdir(tmp_dir) write(self.tmp_path, dynA, dynB) reread_dynA = read('{}#dynA'.format(self.tmp_path)) self.assertEqual(dynA, reread_dynA) # Read again using document cache via Dynamics Properties dynBProps = DynamicsProperties( name='dynBProps', definition='{}#dynB'.format(os.path.join(tmp_dir, self.tmp_path)), properties={'P1': 1, 'P2': 2, 'P3': 3}) self.assertEqual(dynB, dynBProps.component_class)
def test_url_resolution(self): tmp_dir = tempfile.mkdtemp() os.chdir(tmp_dir) write(self.tmp_path, dynA, dynB) reread_dynA = read('{}#dynA'.format(self.tmp_path)) self.assertEqual(dynA, reread_dynA) # Read again using document cache via Dynamics Properties dynBProps = DynamicsProperties( name='dynBProps', definition='{}#dynB'.format(os.path.join(tmp_dir, self.tmp_path)), properties={ 'P1': 1, 'P2': 2, 'P3': 3 }) self.assertEqual(dynB, dynBProps.component_class)
def main(): h('{nrn_load_dll("'+LIBNRNMECHPATH+'")}') dcn = nineml.read(os.path.join( os.environ['HOME'], 'git', 'CerebellarNuclei', '9ml', 'dcn.xml'))['DCN'] mc = MultiCompartmentSplit(dcn) #dcn_cell = CellMetaClass(dcn) mc.check_complexity_file() mc.setup_sections() mc.setup_mechanisms() mc.multisplit() mc.set_vec_t() mc.set_vec_v('DCN[100]') #mc.show_all_sections() mc.run_simulation() mc.show_info() mc.show_plot()
def function(): for version in (1.0, 2.0): if version == 1.0: docs = v1_safe_docs else: docs = list(instances_of_all_types['NineML'].values()) for format in format_to_serializer: # @ReservedAssignment try: ext = format_to_ext[format] except KeyError: continue # ones that can't be written to file (e.g. dict) for i, document in enumerate(docs): doc = document.clone() url = os.path.join( _tmp_dir, 'test{}v{}{}'.format(i, version, ext)) nineml.write(url, doc, format=format, version=version, indent=2) if print_serialized and format in printable: with open(url) as f: print(f.read()) reread_doc = nineml.read(url, reload=True) # @UnusedVariable shutil.rmtree(_tmp_dir)
def _ref_network(self, simulator, external_input=None, **kwargs): nest.ResetKernel() if simulator == 'nest': NetworkClass = NetworkNEST pyNN_simulator = pyNN.nest.simulator.state elif simulator == 'neuron': NetworkClass = NetworkNEURON pyNN_simulator = pyNN.neuron.simulator.state else: assert False model = nineml.read(self.reduced_brunel_path).as_network( 'ReducedBrunel') network = NetworkClass(model, **kwargs) if external_input is not None: network.component_array('Ext').play('spike_input__cell', external_input) for pop_name in self.recorded_pops: network.component_array(pop_name).record('spikes') pyNN_simulator.run(self.t_stop) recordings = {} for pop_name in self.recorded_pops: recordings[pop_name] = network.component_array(pop_name).recording( 'spikes') return recordings
def test_xml_540degree_roundtrip(self): document1 = read(self.test_file) xml = document1.to_xml() document2 = load(xml, read_from=self.test_file) self.assertEquals(document1, document2)
def test_load(self): document = read(os.path.join(examples_dir, 'Normal.xml')) self.assertEquals(type(document['NormalDistribution']), DistributionClass)
def test_xml_540degree_roundtrip(self): context1 = read(self.test_file) xml = context1.to_xml() context2 = load(xml, read_from=self.test_file) self.assertEquals(context1, context2)
from pyNN.neuron.nineml import nineml_cell_type from pyNN.utility.plotting import Figure, Panel t_stop = 100000 dt = 0.1 sim.setup(timestep=dt) cell_parameters = {'v_reset': 10.0, 'tau_m': 20.0, 'v_rest': 0.0, 'tau_refrac': 2.0, 'v_thresh': 20.0, 'tau_syn_E': 2.0} p = sim.Population(1, sim.IF_curr_alpha(**cell_parameters)) p.initialize(v=0.0) rate = 20 stim = sim.Population(1, nineml_cell_type('Poisson', read("../sources/Poisson.xml")['Poisson'], {})(rate=rate)) stim.initialize(t_next=numpy.random.exponential(1000/rate)) weight = 0.1 delay = 0.5 prj = sim.Projection(stim, p, sim.AllToAllConnector(), sim.StaticSynapse(weight=weight, delay=delay), receptor_type='excitatory') stim.record('spikes') p.record('v') sim.run(t_stop) nrn_data = p.get_data().segments[0]
def test_prototype_xml_540degree_roundtrip(self): test_file = os.path.join(examples_dir, 'HodgkinHuxleyModified.xml') document1 = read(test_file) xml = document1.to_xml() document2 = load(xml, read_from=test_file) self.assertEquals(document1, document2)
cell_parameters = { 'v_reset': 10.0, 'tau_m': 20.0, 'v_rest': 0.0, 'tau_refrac': 2.0, 'v_thresh': 20.0, 'tau_syn_E': 2.0 } p = sim.Population(1, sim.IF_curr_alpha(**cell_parameters)) p.initialize(v=0.0) rate = 20 stim = sim.Population( 1, nineml_cell_type('Poisson', read("../sources/Poisson.xml")['Poisson'], {})(rate=rate)) stim.initialize(t_next=numpy.random.exponential(1000 / rate)) weight = 0.1 delay = 0.5 prj = sim.Projection(stim, p, sim.AllToAllConnector(), sim.StaticSynapse(weight=weight, delay=delay), receptor_type='excitatory') stim.record('spikes') p.record('v') sim.run(t_stop)
def test_xml_540degree_roundtrip(self): document1 = read(self.test_file) xml = document1.to_xml() document2 = load(xml, read_from=self.test_file) self.assertEquals(document1.items(), document2.items())
def test_mismatch_dimension(self): context = read(os.path.join(examples_dir, 'HodgkinHuxleyBadUnits.xml')) with self.assertRaises(NineMLUnitMismatchError): context['HodgkinHuxleyBadUnits']
def test_load(self): context = read(os.path.join(examples_dir, 'normal.xml')) self.assertEquals(type(context['NormalDistribution']), ComponentClass)
def test_load(self): document = read(os.path.join(examples_dir, 'AllToAll.xml')) self.assertEquals(type(document['AllToAll']), ConnectionRuleClass)
def test_mismatch_dimension(self): document = read(os.path.join(examples_dir, 'HodgkinHuxleyBadUnits.xml')) with self.assertRaises(NineMLUnitMismatchError): document['HodgkinHuxleyBadUnits']
def test_component_xml_540degree_roundtrip(self): test_file = os.path.join(examples_dir, 'HodgkinHuxley.xml') document1 = read(test_file) xml = document1.to_xml() document2 = load(xml, read_from=test_file) self.assertEquals(document1, document2)
def get_Izh_FS_component(): """ Load Fast spiking Izhikevich XML definition from file and parse into Abstraction Layer of Python API. """ return nineml.read('NineML_Izh_FS.xml')['IzhikevichClass']
def test_prototype_xml_540degree_roundtrip(self): test_file = os.path.join(examples_dir, 'HodgkinHuxleyModified.xml') context1 = read(test_file) xml = context1.to_xml() context2 = load(xml, read_from=test_file) self.assertEquals(context1, context2)
evs = cm.getTrajEventTimes('test') plt.figure(8) plt.plot(pts['t'], pts['V'], 'k') plt.title('Combined passive response model') plt.xlabel('t') plt.ylabel('V') # ========== print("Testing Hodgkin Huxley cell model") #test_HH() print("Testing adaptive Integrate and Fire cell model") #test_aeIF() #print("Testing compound cell model") #test_compound() print("Testing basic Izhikevich cell model") #test_Izh() fs = nineml.read('NineML_Izh_FS.xml') print("Testing Izhikevich fast spiking cell model from XML import") print(" at three input current levels") test_Izh_FS([100, 200, 400]) plt.show()
cm = cell_parameters['nrn_tau']/cell_parameters['nrn_R'] nu_thresh = 1000.0 * cell_parameters['nrn_v_threshold'] * cm / ( w_eff * cell_parameters['nrn_tau'] * cell_parameters['syn_tau']) print("\ntau = {}, R = {}, tau_syn = {}".format(cell_parameters['nrn_tau'], cell_parameters["nrn_R"], cell_parameters["syn_tau"])) print("\nEffective weight = {} nA".format(w_eff)) print("Threshold rate = {} Hz\n".format(nu_thresh)) # PyNN/NineML simulation sim.setup(timestep=dt) celltype = Dynamics(name='iaf', subnodes={'nrn': read("../sources/BrunelIaF.xml")['BrunelIaF'], 'syn': read("../sources/AlphaPSR.xml")['AlphaPSR']}) celltype.connect_ports('syn.i_synaptic', 'nrn.i_synaptic') p1 = sim.Population(4, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**cell_parameters)) cell_parameters_no_spikes = copy(cell_parameters) cell_parameters_no_spikes["nrn_v_threshold"] = 1000.0 p2 = sim.Population(4, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**cell_parameters_no_spikes)) stim = sim.Population(4, nineml_cell_type('Poisson', read("../sources/Poisson.xml")['Poisson'], {})( rate=[0.5*nu_thresh, nu_thresh, 2*nu_thresh, 0.0])) prj1 = sim.Projection(stim, p1, sim.OneToOneConnector(), sim.StaticSynapse(weight=w_eff, delay=delay), receptor_type='syn')
def __init__(self, nineml_model, build_mode='lazy', timestep=None, min_delay=None, max_delay=None, rng=None, **kwargs): if isinstance(nineml_model, basestring): nineml_model = nineml.read(nineml_model).as_network( name=os.path.splitext(os.path.basename(nineml_model))[0]) elif isinstance(nineml_model, Document): if nineml_model.url is not None: name = os.path.splitext(os.path.basename(nineml_model.url))[0] else: name = "Anonymous" nineml_model = nineml_model.as_network(name=name) self._nineml = deepcopy(nineml_model) timestep = timestep if timestep is not None else self.time_step min_delay = min_delay if min_delay is not None else self.min_delay max_delay = max_delay if max_delay is not None else self.max_delay self._set_simulation_params(timestep=timestep, min_delay=min_delay, max_delay=max_delay, **kwargs) self._rng = rng if rng else NumpyRNG() if build_mode != 'build_only': # Convert self.nineml.resample_connectivity( connectivity_class=self.ConnectivityClass) (flat_comp_arrays, flat_conn_groups, flat_selections) = self._flatten_to_arrays_and_conns(self._nineml) self._component_arrays = {} code_gen = self.CellCodeGenerator() # Build the PyNN populations for name, comp_array in flat_comp_arrays.iteritems(): self._component_arrays[name] = self.ComponentArrayClass( comp_array, rng=self._rng, build_mode=build_mode, build_dir=code_gen.get_build_dir( self.nineml.url, name, group=self.nineml.name), **kwargs) self._selections = {} # Build the PyNN Selections for selection in flat_selections.itervalues(): # TODO: Assumes that selections are only concatenations (which is # true for 9MLv1.0 but not v2.0) self._selections[selection.name] = self.SelectionClass( selection, *[self.component_array(p.name) for p in selection.populations]) if build_mode != 'build_only': # Set the connectivity objects of the projections to the # PyNNConnectivity class if self.nineml.connectivity_has_been_sampled(): raise Pype9RuntimeError( "Connections have already been sampled, please reset them" " using 'resample_connectivity' before constructing " "network") self._connection_groups = {} for name, conn_group in flat_conn_groups.iteritems(): try: source = self._component_arrays[conn_group.source.name] except KeyError: source = self._selections[conn_group.source.name] try: destination = self._component_arrays[ conn_group.destination.name] except KeyError: destination = self._selections[conn_group.destination.name] self._connection_groups[name] = self.ConnectionGroupClass( conn_group, source=source, destination=destination, rng=self._rng) self._finalise_construction()
def read(url, class_map=class_map, **kwargs): return nineml.read(url, class_map=class_map, **kwargs)
weight = 0.1 # EPSP height from a single spike received at resting potential scale_factor = psp_height(cell_parameters['nrn_tau'], cell_parameters["nrn_R"], cell_parameters["syn_tau"]) w_eff = weight/scale_factor delay = 0.5 print("\nEffective weight = {} nA\n".format(w_eff)) # PyNN/NineML simulation sim.setup(timestep=dt) celltype = Dynamics(name='iaf', subnodes={'nrn': read("../sources/BrunelIaF.xml")['BrunelIaF'], 'syn': read("../sources/AlphaPSR.xml")['AlphaPSR']}) celltype.connect_ports('syn.i_synaptic', 'nrn.i_synaptic') p = sim.Population(2, nineml_cell_type('BrunelIaF', celltype, {'syn': 'syn_weight'})(**cell_parameters)) stim = sim.Population(1, sim.SpikeSourceArray(spike_times=spike_times)) prj = sim.Projection(stim, p, sim.AllToAllConnector(), sim.StaticSynapse(weight=w_eff, delay=delay), receptor_type='syn') p.record(['nrn_v', 'syn_a', 'syn_b']) sim.run(t_stop)