def test_flattenExternalModelDefinition(): sbml_path = data.DFBA_EMD_SBML print(sbml_path) doc = sbmlio.read_sbml(sbml_path) # test that resource could be read assert doc is not None # test that model in document assert doc.getModel() is not None print(doc) print(doc.getModel().getId()) # check that model exists doc_no_emd = flattenExternalModelDefinitions(doc, validate=True) assert doc_no_emd is not None # check that there are no external model definitions comp_doc_no_emd = doc_no_emd.getPlugin("comp") assert 0 == comp_doc_no_emd.getNumExternalModelDefinitions() # check that all model definitions are still there assert 3 == comp_doc_no_emd.getNumModelDefinitions() # check model consistency Nall, Nerr, Nwarn = validation.check_doc(doc_no_emd)
def check_model_dict(d): """Check that no errors.""" # create model and print SBML core_model = CoreModel.from_dict(model_dict=d) core_model.create_sbml() assert core_model.doc is not None [Nall, Nerr, Nwar] = check_doc(core_model.doc, units_consistency=False) assert Nerr == 0
def test_biomodel_merge(): """ Test model merging. Using the pytest tmpdir fixture :param tmpdir: :return: """ manipulation_dir = os.path.join(data_dir, 'manipulation') # dictionary of ids & paths of models which should be combined # here we just bring together the first Biomodels model_ids = ["BIOMD000000000{}".format(k) for k in range(1, 5)] model_paths = dict(zip(model_ids, [os.path.join(manipulation_dir, "{}.xml".format(mid)) for mid in model_ids]) ) print(model_paths) # merge model out_dir = os.path.join(manipulation_dir, 'output') if not os.path.exists(out_dir): os.mkdir(out_dir) print('out_dir:', out_dir) doc = manipulation.merge_models(model_paths, out_dir=out_dir, validate=False) assert doc is not None Nall, Nerr, Nwarn = validation.check_doc(doc, ucheck=False) assert Nerr == 0 assert Nwarn == 0 assert Nall == 0 # flatten the model doc_flat = comp.flattenSBMLDocument(doc) assert doc_flat is not None libsbml.writeSBMLToFile(doc_flat, os.path.join(out_dir, "merged_flat.xml")) Nall, Nerr, Nwarn = validation.check_doc(doc_flat, ucheck=False) assert Nerr == 0 assert Nwarn in [0, 74] assert Nall in [0, 74]
# dictionary of ids & paths of models which should be combined # here we just bring together the first Biomodels model_ids = ["BIOMD000000000{}".format(k) for k in range(1, 5)] model_paths = dict( zip(model_ids, [os.path.join(merge_dir, "{}.xml".format(mid)) for mid in model_ids])) pprint(model_paths) # create merged model output_dir = os.path.join(merge_dir, 'output') doc = manipulation.merge_models(model_paths, out_dir=output_dir, validate=False) # validate Nall, Nerr, Nwarn = validation.check_doc(doc, units_consistency=False) assert Nerr == 0 assert Nwarn == 0 assert Nall == 0 # write the merged model print(libsbml.writeSBMLToString(doc)) libsbml.writeSBMLToFile(doc, os.path.join(output_dir, "merged.xml")) # flatten the merged model doc_flat = comp.flattenSBMLDocument(doc) Nall, Nerr, Nwarn = validation.check_doc(doc_flat, units_consistency=False) libsbml.writeSBMLToFile(doc_flat, os.path.join(output_dir, "merged_flat.xml")) # In[ ]:
# success probability of Geometric-1 up_mean_geo1 = up.createUncertParameter() # type: libsbml.UncertParameter up_mean_geo1.setType(libsbml.DISTRIB_UNCERTTYPE_EXTERNALPARAMETER) up_mean_geo1.setName("success probability of Geometric 1") up_mean_geo1.setValue(0.4) up_mean_geo1.setDefinitionURL("http://www.probonto.org/ontology#PROB_k0000789") return doc if __name__ == "__main__": functions = [ # distrib_normal, # distrib_all, uncertainty, ] for f_creator in functions: name = f_creator.__name__ print(name) # distrib_example1() doc = f_creator() sbml = libsbml.writeSBMLToString(doc) print("-" * 80) print(sbml) print("-" * 80) sbml_path = "./{}.xml".format(name) libsbml.writeSBMLToFile(doc, sbml_path) validation.check_doc(doc)
def flattenExternalModelDefinitions(doc, validate=False): """ Converts all ExternalModelDefinitions to ModelDefinitions. I.e. the definition of models in external files are read and directly included in the top model. The resulting comp model consists than only of a single file. The model refs in the submodel do not change in the process, so no need to update the submodels. :param doc: SBMLDocument :return: SBMLDocument with ExternalModelDefinitions replaced """ logging.debug('* flattenExternalModelDefinitions') # FIXME: handle multiple levels of hierarchies. Recursively to handle the ExternalModelDefinitions of submodels warnings.warn("flattenExternalModelDefinitions does not work recursively!") warnings.warn( "flattenExternalModelDefinitions: THIS DOES NOT WORK - ONLY USE IF YOU KNOW WHAT YOU ARE DOING" ) comp_doc = doc.getPlugin("comp") if comp_doc is None: logging.warning( "Model is not a comp model, no ExternalModelDefinitions") return doc emd_list = comp_doc.getListOfExternalModelDefinitions() if (emd_list is None) or (len(emd_list) == 0): # no ExternalModelDefinitions logging.warning("Model does not contain any ExternalModelDefinitions") return doc else: model = doc.getModel() comp_model = model.getPlugin("comp") emd_ids = [] for emd in emd_list: logging.debug(emd) emd_ids.append(emd.getId()) # get the model definition from the model ref_model = emd.getReferencedModel() ref_doc = ref_model.getSBMLDocument() # print(ref_model) for k in range(ref_doc.getNumPlugins()): plugin = ref_doc.getPlugin(k) # print(k, plugin) # enable the package on the main SBMLDocument uri = plugin.getURI() prefix = plugin.getPrefix() name = plugin.getPackageName() doc.enablePackage(uri, prefix, True) # print(k, plugin) # print(uri, prefix) # print("\n") # add model definition for model md = libsbml.ModelDefinition(ref_model) comp_doc.addModelDefinition(md) # remove the emds afterwards for emd_id in emd_ids: # remove the emd from the model comp_doc.removeExternalModelDefinition(emd_id) # validate if validate: validation.check_doc(doc) return doc
def flattenExternalModelDefinitions(doc, validate=False): """ Converts all ExternalModelDefinitions to ModelDefinitions. I.e. the definition of models in external files are read and directly included in the top model. The resulting comp model consists than only of a single file. The model refs in the submodel do not change in the process, so no need to update the submodels. :param doc: SBMLDocument :return: SBMLDocument with ExternalModelDefinitions replaced """ logging.debug('* flattenExternalModelDefinitions') # FIXME: handle multiple levels of hierarchies. Recursively to handle the ExternalModelDefinitions of submodels warnings.warn("flattenExternalModelDefinitions does not work recursively!") warnings.warn("flattenExternalModelDefinitions: THIS DOES NOT WORK - ONLY USE IF YOU KNOW WHAT YOU ARE DOING") comp_doc = doc.getPlugin("comp") if comp_doc is None: logging.warning("Model is not a comp model, no ExternalModelDefinitions") return doc emd_list = comp_doc.getListOfExternalModelDefinitions() if (emd_list is None) or (len(emd_list) == 0): # no ExternalModelDefinitions logging.warning("Model does not contain any ExternalModelDefinitions") return doc else: model = doc.getModel() comp_model = model.getPlugin("comp") emd_ids = [] for emd in emd_list: logging.debug(emd) emd_ids.append(emd.getId()) # get the model definition from the model ref_model = emd.getReferencedModel() ref_doc = ref_model.getSBMLDocument() # print(ref_model) for k in range(ref_doc.getNumPlugins()): plugin = ref_doc.getPlugin(k) # print(k, plugin) # enable the package on the main SBMLDocument uri = plugin.getURI() prefix = plugin.getPrefix() name = plugin.getPackageName() doc.enablePackage(uri, prefix, True) # print(k, plugin) # print(uri, prefix) # print("\n") # add model definition for model md = libsbml.ModelDefinition(ref_model) comp_doc.addModelDefinition(md) # remove the emds afterwards for emd_id in emd_ids: # remove the emd from the model comp_doc.removeExternalModelDefinition(emd_id) # validate if validate: validation.check_doc(doc) return doc
up_mean_geo1 = up.createUncertParameter() # type: libsbml.UncertParameter up_mean_geo1.setType(libsbml.DISTRIB_UNCERTTYPE_EXTERNALPARAMETER) up_mean_geo1.setName("success probability of Geometric 1") up_mean_geo1.setValue(0.4) up_mean_geo1.setDefinitionURL( "http://www.probonto.org/ontology#PROB_k0000789") return doc if __name__ == "__main__": functions = [ # distrib_normal, # distrib_all, uncertainty, ] for f_creator in functions: name = f_creator.__name__ print(name) # distrib_example1() doc = f_creator() sbml = libsbml.writeSBMLToString(doc) print("-" * 80) print(sbml) print("-" * 80) sbml_path = "./{}.xml".format(name) libsbml.writeSBMLToFile(doc, sbml_path) validation.check_doc(doc)
length=UNIT_m, area=UNIT_m2, volume=UNIT_KIND_LITRE), 'units': [UNIT_hr, UNIT_m, UNIT_m2, UNIT_mM], 'parameters': [Parameter(sid="p1", value=0.0, unit=UNIT_mM)], 'assignments': [ InitialAssignment('p1', 'normal(0 mM, 1 mM)'), ] } # create model and print SBML core_model = CoreModel.from_dict(model_dict=model_dict) print_xml(core_model.get_sbml()) # validate model check_doc(core_model.doc, units_consistency=False) # ### Using a normal distribution # In this example, the initial value of y is set as a draw from the normal distribution `normal(z,10)`: # In[4]: model_dict = { 'mid': 'normal', 'packages': ['distrib'], 'parameters': [ Parameter('y', value=1.0), Parameter('z', value=1.0), ], 'assignments': [ InitialAssignment('y', 'normal(z, 10)'),