def main(): """ Program entry point. """ args = process_args() print('Parsing and resolving model: ' + args.lems_file) model = Model() if args.I is not None: for dir in args.I: model.add_include_directory(dir) model.import_from_file(args.lems_file) resolved_model = model.resolve() print('Building simulation') sim = SimulationBuilder(resolved_model).build() #sim.dump("Afterbuild:") if args.dlems: print('Exporting as: ' + dlems_info) from lems.dlems.exportdlems import export_component target = model.targets[0] sim_comp = model.components[target] target_net = sim_comp.parameters['target'] target_comp = model.components[target_net] dlems_file_name = args.lems_file.replace('.xml', '.json') if dlems_file_name == args.lems_file: dlems_file_name = args.lems_file + '.json' if target_comp.type == 'network': for child in target_comp.children: if child.type == 'population': comp = model.components[child.parameters['component']] export_component(model, comp, sim_comp, child.id, file_name=dlems_file_name) else: export_component(model, sim_comp, target_comp) else: print('Running simulation') sim.run() process_simulation_output(sim, args)
def load_model(model_filename, folder=None): "Load model from filename" fp_xml = lems_file(model_filename, folder) model = Model() model.import_from_file(fp_xml) # modelextended = model.resolve() return model
def load_model(self): "Load model from filename" # instantiate LEMS lib model = Model() model.import_from_file(self.xml_location) self.XSD_validate_XML() # Do some preprocessing on the template to easify rendering self.pp_pow(model) noisepresent, nsigpresent = self.pp_noise(model) couplinglist = self.pp_cplist(model) svboundaries = self.pp_bound(model) return model, svboundaries, couplinglist, noisepresent, nsigpresent
def get_component_types(srcdir): """Obtain a list of all defined component types. Does not return anything. Fills global variables. It works in two stages. First, from the XML comp_definition files, we read all the models and get the required metadata: - names, - the xml source file in which it is defined, - description of the component. Next, we read all the XML files to get an ordered list of components. We get this list by reading the XML files and parsing them rather than using the LEMS API used above because the LEMS API does not guarantee what order the components will be returned in. By using the file as the source here, we ensure that we get the component list in the same order in which they are defined in the XML file. :returns: nothing """ for comp_definition in comp_definitions: fullpath = "{}/{}.xml".format(srcdir, comp_definition) """Stage 1""" model = Model(include_includes=False) model.import_from_file(fullpath) for comp_type in model.component_types: comp_types[comp_type.name] = comp_type comp_type_src[comp_type.name] = comp_definition comp_type_desc[ comp_type. name] = comp_type.description if comp_type.description is not None else "ComponentType: " + comp_type.name """Stage 2""" ordered_comp_type_list = [] with open(fullpath) as fp: for line in fp: s = '<ComponentType name=' if s in line: i = line.index(s) e = line.find('"', i + len(s) + 1) comp_type_defined = line[i + len(s) + 1:e] ordered_comp_type_list.append(comp_type_defined) ordered_comp_types[comp_definition] = ordered_comp_type_list
def validate_model(filename, name=None, config=None): """ Check that a model is valid Args: filename (:obj:`str`): path to model name (:obj:`str`, optional): name of model for use in error messages config (:obj:`Config`, optional): whether to fail on missing includes Returns: :obj:`tuple`: * nested :obj:`list` of :obj:`str`: nested list of errors (e.g., required ids missing or ids not unique) * nested :obj:`list` of :obj:`str`: nested list of errors (e.g., required ids missing or ids not unique) * :obj:`Model`: model """ config = config or get_config() errors = [] warnings = [] model = None with StandardOutputErrorCapturer(relay=False, level=StandardOutputErrorCapturerLevel.c): valid, output = validate_neuroml2_lems_file(filename, exit_on_fail=False, return_string=True) if not valid: errors.append(['`{}` is not a valid LEMS file.'.format(filename), [[output]]]) return (errors, warnings, model) core_types_dir = tempfile.mkdtemp() jar_filename = get_path_to_jnml_jar() with zipfile.ZipFile(jar_filename, 'r') as jar_file: neuroml2_core_type_members = (name for name in jar_file.namelist() if name.startswith('NeuroML2CoreTypes/')) jar_file.extractall(core_types_dir, members=neuroml2_core_type_members) model = Model(include_includes=True, fail_on_missing_includes=config.VALIDATE_IMPORTED_MODEL_FILES) model.add_include_directory(os.path.join(core_types_dir, 'NeuroML2CoreTypes')) model.import_from_file(filename) shutil.rmtree(core_types_dir) return (errors, warnings, model)
def get_model(self): model = Model() model.add(Dimension('voltage', m=1, l=3, t=-3, i=-1)) model.add(Dimension('time', t=1)) model.add(Dimension('capacitance', m=-1, l=-2, t=4, i=2)) model.add(Dimension('conductanceDensity', m="-1", l="-4", t="3", i="2")) model.add(Dimension('temperature', k=1)) model.add(Unit('volt', 'V', 'voltage', 0)) model.add(Unit('milliVolt', 'mV', 'voltage', -3)) model.add(Unit('milliSecond', 'ms', 'time', -3)) model.add(Unit('microFarad', 'uF', 'capacitance', -12)) model.add(Unit('mS_per_cm2', 'mS_per_cm2', 'conductanceDensity', 1)) model.add(Unit('Kelvin', 'K', 'temperature', 0)) model.add(Unit('celsius', 'degC', 'temperature', 0, offset=273.15)) model.add(Unit('hour', 'hour', 'time', scale=3600)) model.add(Unit('min', 'min', 'time', scale=60)) return model
dlems_file = open(dlems_file_name, 'w') dlems_file.write(json.dumps(dlems, indent=4, separators=(',', ': '))) dlems_file.close() reopen = open(dlems_file_name, 'r') print(reopen.read()) reopen.close() print("Written to %s" % dlems_file_name) if __name__ == '__main__': model = Model() try: lems_file = sys.argv[1] except: lems_file = '../NeuroML2/LEMSexamples/LEMS_NML2_Ex9_FN.xml' model.add_include_directory('../NeuroML2/NeuroML2CoreTypes') print('Importing LEMS file from: %s' % lems_file) model.import_from_file(lems_file) target = model.targets[0] sim_comp = model.components[target] target_net = sim_comp.parameters['target']
files = [ "Cells", "Synapses", "Channels", "Inputs", "Networks", "PyNN", "NeuroMLCoreDimensions", "NeuroMLCoreCompTypes" ] comp_types = {} comp_type_src = {} comp_type_desc = {} ordered_comp_types = {} for file in files: fullfile = "%s/%s.xml" % (nml_src, file) print "\n---------- Reading LEMS file: " + fullfile model = Model(include_includes=False) model.import_from_file(fullfile) for comp_type in model.component_types: comp_types[comp_type.name] = comp_type comp_type_src[comp_type.name] = file comp_type_desc[ comp_type. name] = comp_type.description if comp_type.description is not None else "ComponentType: " + comp_type.name ordered_comp_type_list = [] with open(fullfile) as fp: for line in fp: s = '<ComponentType name=' if s in line: i = line.index(s) e = line.find('"', i + len(s) + 1)
def main(srcdir, destdir): """Main parser and generator function. :param srcdir: directory holding source NeuroML Core Type XML files :type srcdir: str :param destdir: directory where generated files should be stored :type destdir: str :returns: nothing """ # If not defined or empty, download a new copy to a temporary directory if not srcdir or src == "": print("No src directory specified. Cloning NeuroML2 repo") tempdir = tempfile.TemporaryDirectory() tmpsrcdir = tempdir.name print("Temporariy directory: {}".format(tmpsrcdir)) clone_command = [ "git", "clone", "--depth", "1", "--branch", nml_branch, GitHubRepo, tmpsrcdir ] subprocess.run(clone_command) else: tmpsrcdir = srcdir # TODO: add LEMS examples # We can't do this at the moment, because the LEMS python bits are in # pyneuroml, while we point to the libNeuroML docs for the NeuroML2 usage # examples. pyneuroml does not currently have docs on RTD, and some more # work will be required to tell our templates when an example is NeuroML # and when it is LEMS so it can point to the correct docs. # exampledirs = [tmpsrcdir + "/examples/", tmpsrcdir + "/LEMSexamples/"] exampledirs = [tmpsrcdir + "/examples/"] tmpsrcdir = tmpsrcdir + "/NeuroML2CoreTypes/" # Get current commit commit_command = ["git", "log", "-1", "--pretty=format:%H"] output = subprocess.run(commit_command, capture_output=True, cwd=tmpsrcdir, text=True) nml_commit = output.stdout # read the downloaded files get_component_types(tmpsrcdir) # get examples get_comp_examples(exampledirs) # get python signatures get_libneuroml_signatures() if not destdir or destdir == "": destdir = "." print("Output files will be written to {} directory".format(destdir)) for comp_definition in comp_definitions: fullpath = "{}/{}.xml".format(tmpsrcdir, comp_definition) outputfile = "{}/{}.md".format(destdir, comp_definition) """Stage 1""" model = Model(include_includes=False) model.import_from_file(fullpath) print("Processing {}".format(fullpath)) print("Writing output to {}".format(outputfile)) ast_doc = open(outputfile, 'w') """Page header""" print(asttemplates.page_header.render( comp_definition=comp_definition, comp_description=format_description(model.description), GitHubCompSources=GitHubCompSources, nml_version=nml_version, nml_branch=nml_branch, nml_date=nml_date, nml_commit=nml_commit), file=ast_doc) """Dimensions and units""" if "Dimensions" in comp_definition: dimensions = model.dimensions dimensions = sorted(dimensions, key=lambda dim: dim.name) units = model.units units = sorted(units, key=lambda unit: unit.symbol) # lables are translated as lowercase in jupyter, so we append two # consecutive underscores to differentiate same ones, like M and m. symbols = [] for unit in units: if unit.symbol.lower() in symbols: unit.symbol = unit.symbol + "__" symbols.append(unit.symbol.lower()) print(asttemplates.dimension.render( comp_definition=comp_definition, dimensions=dimensions, units=units), file=ast_doc) # Get factors for unit in units: unit.factors = [] for unit2 in units: if unit.symbol != unit2.symbol and unit.dimension == unit2.dimension: si_val = model.get_numeric_value( "1%s" % unit.symbol.replace("__", ""), unit.dimension) unit_val = ( (Decimal(si_val) / Decimal(math.pow(10, unit2.power))) / Decimal(unit2.scale)) - Decimal(unit2.offset) conversion = float(unit_val) # to catch 60.0001 etc. if conversion > 1 and int(conversion) != conversion: if conversion - int(conversion) < 0.001: conversion = int(conversion) if conversion > 10000: conversion = '%.2e' % conversion else: conversion = '%s' % conversion if conversion.endswith('.0'): conversion = conversion[:-2] unit.factors.append([conversion, unit2.symbol]) print(asttemplates.unit.render(comp_definition=comp_definition, units=units), file=ast_doc) """Component Types""" for o_comp_type in ordered_comp_types[comp_definition]: o_comp_type = o_comp_type.replace('rdf:', 'rdf_') comp_type = model.component_types[o_comp_type] """Header""" cno = None if " cno_00" in str(comp_type.description): cno = comp_type.description.split(" ")[-1] comp_type.description = comp_type.description.replace(cno, "") comp_type.description = format_description(comp_type.description) if len(comp_type.description) > 0: if comp_type.description[-1] not in "!.": comp_type.description += "." else: comp_type.description = "" print(asttemplates.comp.render(comp_definition=comp_definition, comp_type=comp_type, cno=cno), file=ast_doc) """Process parameters, derived parameters, texts, paths, expsures, requirements and ports""" params = {} derived_params = {} texts = {} paths = {} exposures = {} requirements = {} eventPorts = {} """Get lists of them all""" for param in comp_type.parameters: params[param] = comp_type.name for derived_param in comp_type.derived_parameters: derived_params[derived_param] = comp_type.name for text in comp_type.texts: texts[text] = comp_type.name for path in comp_type.paths: paths[path] = comp_type.paths for exp in comp_type.exposures: exposures[exp] = comp_type.name for req in comp_type.requirements: requirements[req] = comp_type.name for ep in comp_type.event_ports: eventPorts[ep] = comp_type.name """Get parent ComponentType if derived from one.""" extd_comp_type = get_extended_from_comp_type(comp_type.name) """Recursively go up the tree and get attributes inherited from ancestors.""" while extd_comp_type is not None: for param in extd_comp_type.parameters: pk = params.copy().keys() for pp0 in pk: if pp0.name == param.name: del params[pp0] params[param] = extd_comp_type.name for derived_param in extd_comp_type.derived_parameters: derived_params[derived_param] = extd_comp_type.name for text in extd_comp_type.texts: texts[text] = extd_comp_type.name for path in extd_comp_type.paths: paths[path] = extd_comp_type.paths for exp in extd_comp_type.exposures: ek = exposures.copy().keys() for ee0 in ek: if ee0.name == exp.name: del exposures[ee0] exposures[exp] = extd_comp_type.name for req in extd_comp_type.requirements: requirements[req] = extd_comp_type.name for ep in extd_comp_type.event_ports: eventPorts[ep] = extd_comp_type.name """Recurse up the next parent""" extd_comp_type = get_extended_from_comp_type( extd_comp_type.name) if len(params) > 0: keysort = sorted(params.keys(), key=lambda param: param.name) print(asttemplates.params.render(title="Parameters", comp_type=comp_type, entries=params, keysort=keysort), file=ast_doc) if len(derived_params) > 0: keysort = sorted(derived_params.keys(), key=lambda derived_param: derived_param.name) print(asttemplates.params.render(title="Derived parameters", comp_type=comp_type, entries=derived_params, keysort=keysort), file=ast_doc) if len(comp_type.texts ) > 0: # TODO: Check if Text elements are inherited... print(asttemplates.misc2c.render(title="Text fields", textlist=comp_type.texts), file=ast_doc) if len(comp_type.paths ) > 0: # TODO: Check if Path elements are inherited... print(asttemplates.misc2c.render(title="Paths", textlist=comp_type.paths), file=ast_doc) if len(comp_type.component_references) > 0: print(asttemplates.misc3c.render( title="Component References", textlist=comp_type.component_references), file=ast_doc) if len(comp_type.children) > 0: childlist = [] childrenlist = [] for child_or_children in comp_type.children: if not child_or_children.multiple: childlist.append(child_or_children) else: childrenlist.append(child_or_children) if len(childlist) > 0: print(asttemplates.misc3c.render(title="Child list", textlist=childlist), file=ast_doc) if len(childrenlist) > 0: print(asttemplates.misc3c.render(title="Children list", textlist=childrenlist), file=ast_doc) if len(comp_type.constants) > 0: print(asttemplates.constants.render( title="Constants", textlist=comp_type.constants), file=ast_doc) if len(comp_type.properties) > 0: print(asttemplates.properties.render( title="Properties", textlist=comp_type.properties), file=ast_doc) if len(exposures) > 0: keysort = sorted(exposures, key=lambda entry: entry.name) print(asttemplates.exposures.render(title="Exposures", comp_type=comp_type, entries=exposures, keysort=keysort), file=ast_doc) if len(requirements) > 0: keysort = sorted(requirements, key=lambda entry: entry.name) print(asttemplates.requirements.render(title="Requirements", comp_type=comp_type, entries=requirements, keysort=keysort), file=ast_doc) if len(eventPorts) > 0: keysort = sorted(eventPorts, key=lambda entry: entry.name) print(asttemplates.eventPorts.render(title="Event Ports", comp_type=comp_type, entries=eventPorts, keysort=keysort), file=ast_doc) if len(comp_type.attachments) > 0: print(asttemplates.misc3c.render( title="Attachments", textlist=comp_type.attachments), file=ast_doc) if comp_type.dynamics and comp_type.dynamics.has_content(): print(asttemplates.dynamics.render(title="Dynamics", comp_type=comp_type), file=ast_doc) # Examples """ print("{} has: ".format(comp_type.name)) if comp_type_py_api[comp_type.name]: print("\t1 Py def") if len(comp_type_examples[comp_type.name]) > 0: print("\t{} XML examples".format(len(comp_type_examples[comp_type.name]))) """ if comp_type_py_api[comp_type.name] or len( comp_type_examples[comp_type.name]) > 0: print(asttemplates.examples.render( title="Usage", comp_type=comp_type, lemsexamples=comp_type_examples[comp_type.name], pysig=comp_type_py_api[comp_type.name]), file=ast_doc) ast_doc.close() print("Finished processing {}".format(fullpath)) if not srcdir: tempdir.cleanup()
def init_parser(self): """ Initializes the parser """ self.model = Model() self.token_list = None self.prev_token_lists = None self.valid_children = dict() self.valid_children['lems'] = [ 'component', 'componenttype', 'defaultrun', 'dimension', 'include', 'unit' ] self.valid_children['componenttype'] = [ 'behavior', 'behaviour', 'child', 'children', 'componentref', 'exposure', 'eventport', 'fixed', 'link', 'parameter', 'path', 'requirement', 'structure', 'text' ] self.valid_children['behavior'] = [ 'derivedvariable', 'oncondition', 'onentry', 'onevent', 'onstart', 'record', 'run', 'show', 'statevariable', 'timederivative' ] self.valid_children['oncondition'] = ['eventout', 'stateassignment'] self.valid_children['onentry'] = ['eventout', 'stateassignment'] self.valid_children['onevent'] = ['eventout', 'stateassignment'] self.valid_children['onstart'] = ['eventout', 'stateassignment'] self.valid_children['structure'] = [ 'childinstance', 'eventconnection', 'foreach', 'multiinstantiate' ] self.tag_parse_table = dict() self.tag_parse_table['behavior'] = self.parse_behavior self.tag_parse_table['child'] = self.parse_child self.tag_parse_table['childinstance'] = self.parse_child_instance self.tag_parse_table['children'] = self.parse_children self.tag_parse_table['component'] = self.parse_component self.tag_parse_table['componentref'] = self.parse_component_ref self.tag_parse_table['componenttype'] = self.parse_component_type self.tag_parse_table['defaultrun'] = self.parse_default_run self.tag_parse_table['derivedvariable'] = self.parse_derived_variable self.tag_parse_table['dimension'] = self.parse_dimension self.tag_parse_table['eventconnection'] = self.parse_event_connection self.tag_parse_table['eventout'] = self.parse_event_out self.tag_parse_table['eventport'] = self.parse_event_port self.tag_parse_table['exposure'] = self.parse_exposure self.tag_parse_table['fixed'] = self.parse_fixed self.tag_parse_table['foreach'] = self.parse_foreach self.tag_parse_table['include'] = self.parse_include self.tag_parse_table['link'] = self.parse_link self.tag_parse_table['multiinstantiate'] = \ self.parse_multi_instantiate self.tag_parse_table['oncondition'] = self.parse_on_condition self.tag_parse_table['onevent'] = self.parse_on_event self.tag_parse_table['onstart'] = self.parse_on_start self.tag_parse_table['parameter'] = self.parse_parameter self.tag_parse_table['path'] = self.parse_path self.tag_parse_table['record'] = self.parse_record self.tag_parse_table['requirement'] = self.parse_requirement self.tag_parse_table['run'] = self.parse_run self.tag_parse_table['show'] = self.parse_show self.tag_parse_table['stateassignment'] = self.parse_state_assignment self.tag_parse_table['statevariable'] = self.parse_state_variable self.tag_parse_table['structure'] = self.parse_structure self.tag_parse_table['text'] = self.parse_text self.tag_parse_table['timederivative'] = self.parse_time_derivative self.tag_parse_table['unit'] = self.parse_unit def counter(): count = 1 while True: yield count count = count + 1 self.id_counter = counter() """ Counter genertor for generating unique ids.