def test_biomodel_merge(tmp_path: Path) -> None: """Test model merging.""" merge_dir = DATA_DIR / "manipulation" / "merge" # dictionary of ids & paths of models which should be combined # here we just bring together the first Biomodels model_ids = ["BIOMD000000000{}".format(k) for k in range(1, 5)] model_paths = dict(zip(model_ids, [merge_dir / f"{mid}.xml" for mid in model_ids])) # merge model out_dir = tmp_path / "output" out_dir.mkdir() doc = merge.merge_models(model_paths, output_dir=out_dir, validate=False) assert doc is not None vresults = validation.validate_doc(doc, units_consistency=False) assert vresults.error_count == 0 assert vresults.warning_count == 0 assert vresults.all_count == 0 # flatten the model doc_flat = comp.flatten_sbml_doc(doc) assert doc_flat is not None vresults = validation.validate_doc(doc_flat, units_consistency=False) assert vresults.error_count == 0 assert vresults.warning_count in [0, 74] assert vresults.all_count in [0, 74] merged_sbml_path = out_dir / "merged_flat.xml" write_sbml(doc_flat, filepath=merged_sbml_path) assert merged_sbml_path.exists()
def test_biomodel_merge(tmp_path): """Test model merging. Using the pytest tmpdir fixture :param tmpdir: :return: """ merge_dir = DATA_DIR / "manipulation" / "merge" # dictionary of ids & paths of models which should be combined # here we just bring together the first Biomodels model_ids = ["BIOMD000000000{}".format(k) for k in range(1, 5)] model_paths = dict( zip(model_ids, [merge_dir / f"{mid}.xml" for mid in model_ids])) # merge model out_dir = tmp_path / "output" out_dir.mkdir() doc = merge.merge_models(model_paths, out_dir=out_dir, validate=False) assert doc is not None Nall, Nerr, Nwarn = validation.validate_doc(doc, units_consistency=False) assert Nerr == 0 assert Nwarn == 0 assert Nall == 0 # flatten the model doc_flat = comp.flatten_sbml_doc(doc) assert doc_flat is not None Nall, Nerr, Nwarn = validation.validate_doc(doc_flat, units_consistency=False) assert Nerr == 0 assert Nwarn in [0, 74] assert Nall in [0, 74] merged_sbml_path = out_dir / "merged_flat.xml" write_sbml(doc_flat, filepath=merged_sbml_path) assert merged_sbml_path.exists()
def merge_models( model_paths: Dict[str, Path], output_dir: Path = None, merged_id: str = "merged", validate: bool = True, ) -> libsbml.SBMLDocument: """Merge models in model path. All models must exist in the same subfolder. Relative paths are set in the merged models. Output directory must exist. :param output_dir: :param merged_id: :param validate: :param model_paths: absolute paths to models :return: """ # necessary to convert models to SBML L3V1 cur_dir = os.getcwd() os.chdir(str(output_dir)) base_dir = None for model_id, path in model_paths.items(): if path.exists(): logging.error(f"Path for SBML file does not exist: {path}") # get base dir of all model files from first file if base_dir is None: base_dir = path.parent else: new_dir = path.parent if not new_dir != base_dir: raise IOError(f"All SBML files for merging must be in same " f"directory: {new_dir} != {base_dir}") # convert to L3V1 path_L3: Path = output_dir / f"{model_id}_L3.xml" # type: ignore doc = read_sbml(path_L3) if doc.getLevel() < SBML_LEVEL: doc.setLevelAndVersion(SBML_LEVEL, SBML_VERSION) write_sbml(doc, path_L3) model_paths[model_id] = path_L3 if validate is True: for path in model_paths: # type: ignore validate_sbml(source=path, name=str(path)) # create comp model merged_doc = create_merged_doc( model_paths, merged_id=merged_id) # type: libsbml.SBMLDocument if validate is True: validate_sbml(path, name=str(path)) # write merged doc f_out = os.path.join(output_dir, "{}.xml".format(merged_id)) # type: ignore libsbml.writeSBMLToFile(merged_doc, f_out) os.chdir(cur_dir) return merged_doc
def merge_models( model_paths: Dict[str, Path], output_dir: Path, merged_id: str = "merged", flatten: bool = True, validate: bool = True, validate_input: bool = True, units_consistency: bool = False, modeling_practice: bool = False, sbml_level: int = 3, sbml_version: int = 1, ) -> libsbml.SBMLDocument: """Merge SBML models. Merges SBML models given in `model_paths` in the `output_dir`. Models are provided as dictionary { 'model1_id': model1_path, 'model2_id': model2_path, ... } The model ids are used as ids for the ExternalModelDefinitions. Relative paths are set in the merged models. The created model is either in SBML L3V1 (default) or SBML L3V2. :param model_paths: absolute paths to models :param output_dir: output directory for merged model :param merged_id: model id of the merged model :param flatten: flattens the merged model :param validate: boolean flag to validate the merged model :param validate_input: boolean flag to validate the input models :param units_consistency: boolean flag to check units consistency :param modeling_practice: boolean flag to check modeling practise :param sbml_level: SBML Level of the merged model in [3] :param sbml_version: SBML Version of the merged model in [1, 2] :return: SBMLDocument of the merged models """ # necessary to convert models to SBML L3V1 if isinstance(output_dir, str): logger.warning( f"'output_dir' should be a Path but: '{type(output_dir)}'") output_dir = Path(output_dir) if not output_dir.exists(): raise IOError(f"'output_dir' does not exist: {output_dir}") validate_kwargs: Dict[str, bool] = { "units_consistency": units_consistency, "modeling_practice": modeling_practice, } for model_id, path in model_paths.items(): if not path.exists(): raise IOError(f"Path for SBML file does not exist: {path}") if isinstance(path, str): path = Path(path) # convert to L3V1 path_L3: Path = output_dir / f"{model_id}_L3.xml" doc = read_sbml(path) doc.setLevelAndVersion(sbml_level, sbml_version) write_sbml(doc, path_L3) model_paths[model_id] = path_L3 if validate_input: validate_sbml( source=path_L3, name=str(path), **validate_kwargs, ) # create comp model cur_dir = os.getcwd() os.chdir(str(output_dir)) merged_doc: libsbml.SBMLDocument = _create_merged_doc(model_paths, merged_id=merged_id) os.chdir(cur_dir) # write merged doc merged_path = output_dir / f"{merged_id}.xml" write_sbml(merged_doc, filepath=merged_path) if validate: validate_sbml(merged_path, name=str(merged_path), **validate_kwargs) if flatten: flat_path = output_dir / f"{merged_id}_flat.xml" flatten_sbml(sbml_path=merged_path, sbml_flat_path=flat_path) if validate: validate_sbml(flat_path, name=str(flat_path), **validate_kwargs) return merged_doc
def flatten_sbml_doc(doc: libsbml.SBMLDocument, output_path: Path = None, leave_ports: bool = True) -> libsbml.SBMLDocument: """Flatten SBMLDocument. Validation should be performed before the flattening and is not part of the flattening routine. If an output path is provided the file is written to the output path. :param doc: SBMLDocument to flatten. :param output_path: Path to write flattended SBMLDocument to :param leave_ports: flag to leave ports :return: SBMLDocument """ error_count = doc.getNumErrors() if error_count > 0: if doc.getError(0).getErrorId() == libsbml.XMLFileUnreadable: # Handle case of unreadable file here. logger.error("SBML error in doc: libsbml.XMLFileUnreadable") elif doc.getError(0).getErrorId() == libsbml.XMLFileOperationError: # Handle case of other file error here. logger.error("SBML error in doc: libsbml.XMLFileOperationError") else: # Handle other error cases here. logger.error("SBML errors in doc, see SBMLDocument error log.") # converter options libsbml.CompFlatteningConverter props = libsbml.ConversionProperties() props.addOption("flatten comp", True) # Invokes CompFlatteningConverter props.addOption("leave_ports", leave_ports) # Indicates whether to leave ports props.addOption("abortIfUnflattenable", "none") # flatten current = time.perf_counter() result = doc.convert(props) flattened_status = result == libsbml.LIBSBML_OPERATION_SUCCESS lines = [ "", "-" * 120, str(doc), "{:<25}: {}".format("flattened", str(flattened_status).upper()), "{:<25}: {:.3f}".format("flatten time (ms)", time.perf_counter() - current), "-" * 120, ] info = bcolors.BOLD + "\n".join(lines) + bcolors.ENDC if flattened_status: logger.info(bcolors.OKGREEN + info + bcolors.ENDC) else: logger.error(bcolors.FAIL + info + bcolors.ENDC) raise ValueError( "SBML could not be flattend due to errors in the SBMLDocument.") if output_path is not None: write_sbml(doc, filepath=output_path) logger.info(f"Flattened model created: '{output_path}'") return doc
def create_model( modules: Union[Iterable[str], Dict], output_dir: Path = None, tmp: bool = False, filename: str = None, mid: str = None, suffix: str = None, annotations: Path = None, create_report: bool = True, validate: bool = True, log_errors: bool = True, units_consistency: bool = True, modeling_practice: bool = True, internal_consistency: bool = True, sbml_level: int = SBML_LEVEL, sbml_version: int = SBML_VERSION, ) -> FactoryResult: """Create SBML model from module information. This is the entry point for creating models. The model information is provided as a list of importable python modules. If no filename is provided the filename is created from the id and suffix. Additional model annotations can be provided. :param modules: iterable of strings of python modules or CoreModel instance :param output_dir: directory in which to create SBML file :param tmp: boolean flag to create files in a temporary directory (for testing) :param filename: filename to write to with suffix, if not provided mid and suffix are used :param mid: model id to use for filename :param suffix: suffix for SBML filename :param annotations: Path to annotations file :param create_report: boolean switch to create SBML report :param validate: validates the SBML file :param log_errors: boolean flag to log errors :param units_consistency: boolean flag to check units consistency :param modeling_practice: boolean flag to check modeling practise :param internal_consistency: boolean flag to check internal consistency :param sbml_level: set SBML level for model generation :param sbml_version: set SBML version for model generation :return: FactoryResult """ if output_dir is None and tmp is False: raise TypeError( "create_model() missing 1 required argument: 'output_dir'") # preprocess logger.info(bcolors.OKBLUE + "\n\n" + "-" * 120 + "\n" + str(modules) + "\n" + "-" * 120 + bcolors.ENDC) if isinstance(modules, dict): model_dict = modules else: model_dict = Preprocess.dict_from_modules( modules, keys=CoreModel._keys) # type: ignore core_model = CoreModel.from_dict(model_dict=model_dict) logger.debug(core_model.get_info()) core_model.create_sbml(sbml_level=sbml_level, sbml_version=sbml_version) if not filename: # create filename if mid is None: mid = core_model.model.getId() if suffix is None: suffix = "" filename = f"{mid}{suffix}.xml" if tmp: output_dir = tempfile.mkdtemp() # type: ignore sbml_path = os.path.join(output_dir, filename) # type: ignore else: if isinstance(output_dir, str): output_dir = Path(output_dir) logger.warning(f"'output_dir' should be a Path: {output_dir}") if not output_dir.exists(): # type: ignore logger.warning( f"'output_dir' does not exist and is created: {output_dir}") output_dir.mkdir(parents=True) # type: ignore sbml_path = output_dir / filename # type: ignore # write sbml if core_model.doc is None: core_model.create_sbml() try: write_sbml( doc=core_model.doc, filepath=sbml_path, # type: ignore validate=validate, log_errors=log_errors, units_consistency=units_consistency, modeling_practice=modeling_practice, internal_consistency=internal_consistency, ) # annotate if annotations is not None: # overwrite the normal file annotator.annotate_sbml( source=sbml_path, annotations_path=annotations, filepath=sbml_path # type: ignore ) # create report if create_report: # file is already validated, no validation on report needed sbmlreport.create_report( sbml_path=sbml_path, output_dir=output_dir, validate=False # type: ignore ) finally: if tmp: shutil.rmtree(str(output_dir)) return FactoryResult( model_dict=model_dict, core_model=core_model, sbml_path=sbml_path, # type: ignore )
def test_modelcreator_1(tmpdir): """Test complex model creation. If this test fails the respective notebook must be updated: :return: """ UNIT_TIME = "s" UNIT_VOLUME = "m3" UNIT_LENGTH = "m" UNIT_AREA = "m2" UNIT_AMOUNT = "itm" UNIT_FLUX = "itm_per_s" model_dict = { "packages": ["fbc"], "mid": "example_model", "model_units": ModelUnits( time=UNIT_TIME, extent=UNIT_AMOUNT, substance=UNIT_AMOUNT, length=UNIT_LENGTH, area=UNIT_VOLUME, volume=UNIT_AREA, ), "units": { # using predefined units UNIT_s, UNIT_kg, UNIT_m, UNIT_m2, UNIT_m3, UNIT_mM, UNIT_per_s, # defining some additional units Unit("itm", [(UNIT_KIND_ITEM, 1.0)]), Unit("itm_per_s", [(UNIT_KIND_ITEM, 1.0), (UNIT_KIND_SECOND, -1.0)]), Unit("itm_per_m3", [(UNIT_KIND_ITEM, 1.0), (UNIT_KIND_METRE, -3.0)]), }, "compartments": [ Compartment( sid="extern", name="external compartment", value=1.0, unit=UNIT_VOLUME, constant=True, spatialDimensions=3, ), Compartment( sid="cell", name="cell", value=1.0, unit=UNIT_VOLUME, constant=True, spatialDimensions=3, ), Compartment( sid="membrane", name="membrane", value=1.0, unit=UNIT_AREA, constant=True, spatialDimensions=2, ), ], "species": [ # exchange species Species( sid="A", name="A", initialAmount=0, substanceUnit=UNIT_AMOUNT, hasOnlySubstanceUnits=True, compartment="extern", sboTerm=SBO_SIMPLE_CHEMICAL, ), Species( sid="C", name="C", initialAmount=0, substanceUnit=UNIT_AMOUNT, hasOnlySubstanceUnits=True, compartment="extern", sboTerm=SBO_SIMPLE_CHEMICAL, ), # internal species Species( sid="B1", name="B1", initialAmount=0, substanceUnit=UNIT_AMOUNT, hasOnlySubstanceUnits=True, compartment="cell", sboTerm=SBO_SIMPLE_CHEMICAL, ), Species( sid="B2", name="B2", initialAmount=0, substanceUnit=UNIT_AMOUNT, hasOnlySubstanceUnits=True, compartment="cell", sboTerm=SBO_SIMPLE_CHEMICAL, ), ], "parameters": [ Parameter( sid="ub_R1", value=1.0, unit=UNIT_FLUX, constant=True, sboTerm=SBO_FLUX_BOUND, ), Parameter( sid="zero", value=0.0, unit=UNIT_FLUX, constant=True, sboTerm=SBO_FLUX_BOUND, ), Parameter( sid="ub_default", value=1000, unit=UNIT_FLUX, constant=True, sboTerm=SBO_FLUX_BOUND, ), ], "reactions": [ # metabolic reactions Reaction( sid="R1", name="A import (R1)", equation="A <-> B1", fast=False, reversible=True, compartment="membrane", lowerFluxBound="zero", upperFluxBound="ub_R1", ), Reaction( sid="R2", name="B1 <-> B2 (R2)", equation="B1 <-> B2", fast=False, reversible=True, compartment="cell", lowerFluxBound="zero", upperFluxBound="ub_default", ), Reaction( sid="R3", name="B2 export (R3)", equation="B1 <-> C", fast=False, reversible=True, compartment="membrane", lowerFluxBound="zero", upperFluxBound="ub_default", ), # exchange reactions ExchangeReaction(species_id="A"), ExchangeReaction(species_id="B1"), ], "objectives": [ Objective( sid="R3_maximize", objectiveType="maximize", fluxObjectives={"R3": 1.0}, active=True, ) ], } # create SBMLDocument core_model = CoreModel.from_dict(model_dict) doc = core_model.create_sbml() # write SBML file sbml_str = write_sbml(doc=doc, validate=True)
def create_model( modules: List[str], output_dir: Path, filename: str = None, mid: str = None, suffix: str = None, annotations=None, create_report: bool = True, validate: bool = True, log_errors: bool = True, units_consistency: bool = True, modeling_practice: bool = True, internal_consistency: bool = True, ): """Create SBML model from module information. This is the entry point for creating models. The model information is provided as a list of importable python modules. If no filename is provided the filename is created from the id and suffix. Additional model annotations can be provided. :param modules: iteratable of strings of python modules :param output_dir: directory in which to create SBML file :param filename: filename to write to with suffix, if not provided mid and suffix are used :param mid: model id to use for filename :param suffix: suffix for SBML filename :param annotations: list of annotations for SBML :param create_report: boolean switch to create SBML report :param validate: validates the SBML file :return: """ # preprocess logger.info( bcolors.OKBLUE + "\n\n" + "-" * 120 + "\n" + str(modules) + "\n" + "-" * 120 + bcolors.ENDC ) model_dict = Preprocess.dict_from_modules(modules) # create SBML model core_model = CoreModel.from_dict(model_dict=model_dict) logger.debug(core_model.get_info()) core_model.create_sbml() # write file if isinstance(output_dir, str): output_dir = Path(output_dir) logger.warning(f"'output_dir' should be a Path: {output_dir}") if not output_dir.exists(): logger.warning(f"'output_dir' does not exist and is created: {output_dir}") output_dir.mkdir(parents=True) if not filename: # create filename if mid is None: mid = core_model.model.getId() if suffix is None: suffix = "" filename = f"{mid}{suffix}.xml" # write sbml sbml_path = output_dir / filename if core_model.doc is None: core_model.create_sbml() write_sbml( doc=core_model.doc, filepath=sbml_path, validate=validate, log_errors=log_errors, units_consistency=units_consistency, modeling_practice=modeling_practice, internal_consistency=internal_consistency, ) # annotate if annotations is not None: # overwrite the normal file annotator.annotate_sbml( source=sbml_path, annotations_path=annotations, filepath=sbml_path ) # create report if create_report: # file is already validated, no validation on report needed sbmlreport.create_report( sbml_path=sbml_path, output_dir=output_dir, validate=False ) return [model_dict, core_model, sbml_path]