def export_to_neuroml2(hoc_or_python_file, nml2_file_name, includeBiophysicalProperties=True, separateCellFiles=False, validate=True): from neuron import * from nrn import * if hoc_or_python_file is not None: if hoc_or_python_file.endswith(".py"): print("Importing Python scripts not yet implemented...") else: h.load_file(hoc_or_python_file) print "Loaded NEURON file: %s"%hoc_or_python_file h.load_file("mview.hoc") h('objref mv') h('mv = new ModelView()') h.load_file("%s/mview_neuroml2.hoc"%(os.path.dirname(__file__))) h('objref mvnml') h('mvnml = new ModelViewNeuroML2(mv)') nml2_level = 2 if includeBiophysicalProperties else 1 h.mvnml.exportNeuroML2(nml2_file_name, nml2_level, int(separateCellFiles)) if validate: validate_neuroml2(nml2_file_name)
def export_to_neuroml2(hoc_or_python_file, nml2_file_name, includeBiophysicalProperties=True, separateCellFiles=False, known_rev_potentials={}, validate=True): from neuron import * from nrn import * if hoc_or_python_file is not None: if hoc_or_python_file.endswith(".py"): print_comment_v( "***************\nImporting Python scripts not yet implemented...\n***************" ) else: if not os.path.isfile(hoc_or_python_file): print_comment_v( "***************\nProblem importing file %s (%s)..\n***************" % (hoc_or_python_file, os.path.abspath(hoc_or_python_file))) h.load_file( 1, hoc_or_python_file ) # Using 1 to force loading of the file, in case file with same name was loaded before... else: print_comment_v( "hoc_or_python_file variable is None; exporting what's currently in memory..." ) for ion in known_rev_potentials.keys(): set_erev_for_mechanism(ion, known_rev_potentials[ion]) print_comment_v("Loaded NEURON file: %s" % hoc_or_python_file) h.load_file("mview.hoc") h('objref mv') h('mv = new ModelView(0)') h.load_file("%s/mview_neuroml2.hoc" % (os.path.dirname(__file__))) h('objref mvnml') h('mvnml = new ModelViewNeuroML2(mv)') nml2_level = 2 if includeBiophysicalProperties else 1 h.mvnml.exportNeuroML2(nml2_file_name, nml2_level, int(separateCellFiles)) if validate: validate_neuroml2(nml2_file_name) h('mv.destroy()')
def save_network(nml_doc, nml_file_name, validate=True, comment=True, format='xml'): info = "\n\nThis NeuroML 2 file was generated by OpenCortex v%s using: \n"%(opencortex.__version__) info += " libNeuroML v%s\n"%(neuroml.__version__) info += " pyNeuroML v%s\n\n "%(pyneuroml.__version__) if nml_doc.notes: nml_doc.notes += info else: nml_doc.notes = info if format == 'xml': writers.NeuroMLWriter.write(nml_doc, nml_file_name) elif format == 'hdf5': writers.NeuroMLHdf5Writer.write(nml_doc, nml_file_name) opencortex.print_comment_v("Saved NeuroML with id: %s to %s"%(nml_doc.id, nml_file_name)) if validate: from pyneuroml.pynml import validate_neuroml2 passed = validate_neuroml2(nml_file_name) if passed: opencortex.print_comment_v("Generated NeuroML file is valid") else: opencortex.print_comment_v("Generated NeuroML file is NOT valid!")
def export_to_neuroml2(hoc_or_python_file, nml2_file_name, includeBiophysicalProperties=True, separateCellFiles=False, known_rev_potentials={}, validate=True): from neuron import * from nrn import * if hoc_or_python_file is not None: if hoc_or_python_file.endswith(".py"): print_comment_v("***************\nImporting Python scripts not yet implemented...\n***************") else: if not os.path.isfile(hoc_or_python_file): print_comment_v("***************\nProblem importing file %s (%s)..\n***************"%(hoc_or_python_file, os.path.abspath(hoc_or_python_file))) h.load_file(1, hoc_or_python_file) # Using 1 to force loading of the file, in case file with same name was loaded before... else: print_comment_v("hoc_or_python_file variable is None; exporting what's currently in memory...") for ion in known_rev_potentials.keys(): set_erev_for_mechanism(ion,known_rev_potentials[ion]) print_comment_v("Loaded NEURON file: %s"%hoc_or_python_file) h.load_file("mview.hoc") h('objref mv') h('mv = new ModelView(0)') h.load_file("%s/mview_neuroml2.hoc"%(os.path.dirname(__file__))) h('objref mvnml') h('mvnml = new ModelViewNeuroML2(mv)') nml2_level = 2 if includeBiophysicalProperties else 1 h.mvnml.exportNeuroML2(nml2_file_name, nml2_level, int(separateCellFiles)) if validate: validate_neuroml2(nml2_file_name) h('mv.destroy()')
def export_to_neuroml2( hoc_or_python_file, nml2_file_name, includeBiophysicalProperties=True, separateCellFiles=False, validate=True ): from neuron import * from nrn import * if hoc_or_python_file is not None: if hoc_or_python_file.endswith(".py"): print_comment_v("Importing Python scripts not yet implemented...") else: h.load_file( 1, hoc_or_python_file ) # Using 1 to force loading of the file, in case file with same name was loaded before... else: print_comment_v("hoc_or_python_file variable is None; exporting what's currently in memory...") print_comment_v("Loaded NEURON file: %s" % hoc_or_python_file) h.load_file("mview.hoc") h("objref mv") h("mv = new ModelView(0)") h.load_file("%s/mview_neuroml2.hoc" % (os.path.dirname(__file__))) h("objref mvnml") h("mvnml = new ModelViewNeuroML2(mv)") nml2_level = 2 if includeBiophysicalProperties else 1 h.mvnml.exportNeuroML2(nml2_file_name, nml2_level, int(separateCellFiles)) if validate: validate_neuroml2(nml2_file_name) h("mv.destroy()")
def save_network(nml_doc, nml_file_name, validate=True, format='xml', max_memory=None, target_dir='./', use_subfolder=True): """ Save the contents of the built NeuroML document, including the network to the file specified by `nml_file_name`, optionally specifying the `target_dir` """ oc_build._finalise_copy_to_dir_for_model(nml_doc, target_dir, use_subfolder=use_subfolder) info = "\n\nThis NeuroML 2 file was generated by OpenCortex v%s using: \n" % ( opencortex.__version__) info += " libNeuroML v%s\n" % (neuroml.__version__) info += " pyNeuroML v%s\n\n " % (pyneuroml.__version__) if nml_doc.notes: nml_doc.notes += info else: nml_doc.notes = info nml_full_file_name = target_dir + '/' + nml_file_name if format == 'xml': writers.NeuroMLWriter.write(nml_doc, nml_full_file_name) elif format == 'xml_hdf5': writers.NeuroMLHdf5Writer.write_xml_and_hdf5( nml_doc, nml_full_file_name, '%s.h5' % nml_full_file_name) elif format == 'hdf5': writers.NeuroMLHdf5Writer.write(nml_doc, nml_full_file_name) opencortex.print_comment_v("Saved NeuroML with id: %s to %s" % (nml_doc.id, nml_full_file_name)) if validate: from pyneuroml.pynml import validate_neuroml2 passed = validate_neuroml2(nml_full_file_name, max_memory=max_memory) if passed: opencortex.print_comment_v("Generated NeuroML file is valid") else: opencortex.print_comment_v("Generated NeuroML file is NOT valid!")
def generate_neuroml2_from_network(nl_model, nml_file_name=None, print_summary=True, seed=1234, format='xml', base_dir=None, copy_included_elements=False, target_dir=None, validate=False): """ Generate and save NeuroML2 file (in either XML or HDF5 format) from the NeuroMLlite description """ print_v("Generating NeuroML2 for %s%s..." % (nl_model.id, ' (base dir: %s; target dir: %s)' % (base_dir, target_dir) if base_dir or target_dir else '')) import neuroml from neuroml.hdf5.NetworkBuilder import NetworkBuilder neuroml_handler = NetworkBuilder() generate_network(nl_model, neuroml_handler, seed=seed, base_dir=base_dir) nml_doc = neuroml_handler.get_nml_doc() for i in nl_model.input_sources: if nml_doc.get_by_id(i.id) == None: if i.neuroml2_source_file: incl = neuroml.IncludeType( _locate_file(i.neuroml2_source_file, base_dir)) if not incl in nml_doc.includes: nml_doc.includes.append(incl) if i.neuroml2_input: input_params = i.parameters if i.parameters else {} # TODO make more generic... if i.neuroml2_input.lower() == 'pulsegenerator': input = neuroml.PulseGenerator(id=i.id) nml_doc.pulse_generators.append(input) elif i.neuroml2_input.lower() == 'pulsegeneratordl': input = neuroml.PulseGeneratorDL(id=i.id) nml_doc.pulse_generator_dls.append(input) elif i.neuroml2_input.lower() == 'poissonfiringsynapse': input = neuroml.PoissonFiringSynapse(id=i.id) nml_doc.poisson_firing_synapses.append(input) for p in input_params: exec('input.%s = "%s"' % (p, evaluate(input_params[p], nl_model.parameters))) for c in nl_model.cells: if c.neuroml2_source_file: incl = neuroml.IncludeType( _locate_file(c.neuroml2_source_file, base_dir)) found_cell = False for cell in nml_doc.cells: if cell.id == c.id: nml_doc.cells.remove( cell ) # Better to use imported cell file; will have channels, etc. nml_doc.includes.append(incl) found_cell = True if not found_cell: for p in nl_model.populations: if p.component == c.id: pass if not incl in nml_doc.includes: nml_doc.includes.append(incl) ''' Needed??? if c.lems_source_file: incl = neuroml.IncludeType(_locate_file(c.lems_source_file, base_dir)) if not incl in nml_doc.includes: nml_doc.includes.append(incl)''' if c.neuroml2_cell: cell_params = c.parameters if c.parameters else {} # TODO make more generic... if c.neuroml2_cell.lower() == 'spikegenerator': cell = neuroml.SpikeGenerator(id=c.id) nml_doc.spike_generators.append(cell) elif c.neuroml2_cell.lower() == 'spikegeneratorpoisson': cell = neuroml.SpikeGeneratorPoisson(id=c.id) nml_doc.spike_generator_poissons.append(cell) elif c.neuroml2_cell.lower() == 'spikegeneratorrefpoisson': cell = neuroml.SpikeGeneratorRefPoisson(id=c.id) nml_doc.spike_generator_ref_poissons.append(cell) else: raise Exception( 'The neuroml2_cell: %s is not yet supported...' % c.neuroml2_cell) for p in cell_params: exec('cell.%s = "%s"' % (p, evaluate(cell_params[p], nl_model.parameters))) for s in nl_model.synapses: if nml_doc.get_by_id(s.id) == None: if s.neuroml2_source_file: incl = neuroml.IncludeType( _locate_file(s.neuroml2_source_file, base_dir)) if not incl in nml_doc.includes: nml_doc.includes.append(incl) # Look for and add the PyNN based elements to the NeuroMLDocument _extract_pynn_components_to_neuroml(nl_model, nml_doc) if print_summary: # Print info print_v(nml_doc.summary()) # Save to file if target_dir == None: target_dir = base_dir if format == 'xml': if not nml_file_name: nml_file_name = _locate_file('%s.net.nml' % nml_doc.id, target_dir) from neuroml.writers import NeuroMLWriter NeuroMLWriter.write(nml_doc, nml_file_name) if format == 'hdf5': if not nml_file_name: nml_file_name = _locate_file('%s.net.nml.h5' % nml_doc.id, target_dir) from neuroml.writers import NeuroMLHdf5Writer NeuroMLHdf5Writer.write(nml_doc, nml_file_name) print_v("Written NeuroML to %s" % nml_file_name) if validate and format == 'xml': from pyneuroml import pynml success = pynml.validate_neuroml2(nml_file_name, verbose_validate=False) if success: print_v('Generated file is valid NeuroML2!') else: print_v('Generated file is NOT valid NeuroML2!') return nml_file_name, nml_doc
channel_id = 'Channelpedia_%s_%s' % (root.attrib['ModelName'].replace( "/", "_").replace(" ", "_").replace(".", "_"), root.attrib['ModelID']) print("Channel id: %s" % channel_id) file_out = open("test/%s.xml" % channel_id, 'w') file_out.write(cpd_xml) file_out.close() nml2_file_name = "%s.channel.nml" % channel_id nml2_file_path = "test/" + nml2_file_name unknowns = channelpedia_xml_to_neuroml2(cpd_xml, nml2_file_path, unknowns) pynml.validate_neuroml2(nml2_file_path) doc = loaders.NeuroMLLoader.load(nml2_file_path) #print dir(doc) gates = [] for ic in doc.ion_channel_hhs: if ic.id == channel_id: for g in ic.gates: gates.append(g.id) new_lems_file = "test/LEMS_Test_%s.xml" % channel_id lems_helper = generate(nml2_file_name, channel_id, gates, temperature,
net.explicit_inputs.append(exp_input) for post in range(0, size1): if random.random() <= prob_connection: syn = Connection(id=count, pre_cell_id="../%s[%i]" % (pop0.id, pre), synapse=syn0.id, post_cell_id="../%s[%i]" % (pop1.id, post)) proj.connections.append(syn) count += 1 nml_file = 'izhikevich2007_network.nml' writers.NeuroMLWriter.write(nml_doc, nml_file) print("Written network file to: " + nml_file) pynml.validate_neuroml2(nml_file) simulation_id = "example_izhikevich2007network_sim" simulation = LEMSSimulation(sim_id=simulation_id, duration=1000, dt=0.1, simulation_seed=123) simulation.assign_simulation_target(net.id) simulation.include_neuroml2_file(nml_file) simulation.create_event_output_file("pop0", "%s.0.spikes.dat" % simulation_id, format='ID_TIME') for pre in range(0, size0): simulation.add_selection_to_event_output_file("pop0", pre, 'IzPop0[{}]'.format(pre),
root = ET.fromstring(cpd_xml) channel_id='Channelpedia_%s_%s'%(root.attrib['ModelName'].replace("/","_").replace(" ","_").replace(".","_"), root.attrib['ModelID']) print("Channel id: %s"%channel_id) file_out = open("test/%s.xml"%channel_id, 'w') file_out.write(cpd_xml) file_out.close() nml2_file_name = "%s.channel.nml"%channel_id nml2_file_path = "test/"+nml2_file_name unknowns = channelpedia_xml_to_neuroml2(cpd_xml, nml2_file_path, unknowns) pynml.validate_neuroml2(nml2_file_path) doc = loaders.NeuroMLLoader.load(nml2_file_path) #print dir(doc) gates = [] for ic in doc.ion_channel_hhs: if ic.id == channel_id: for g in ic.gates: gates.append(g.id) new_lems_file = "LEMS_Test_%s.xml"%channel_id lems_helper = generate(nml2_file_path, channel_id, gates, temperature, ion=doc.ion_channel_hhs[0].species) file_out = open(new_lems_file, 'w') file_out.write(lems_helper)