def test_get_observable_df(): """Test measurements.get_measurement_df.""" # without id observable_df = pd.DataFrame(data={ OBSERVABLE_NAME: ['observable name 1'], OBSERVABLE_FORMULA: ['observable_1'], NOISE_FORMULA: [1], }) with tempfile.NamedTemporaryFile(mode='w', delete=False) as fh: file_name = fh.name observable_df.to_csv(fh, sep='\t', index=False) with pytest.raises(KeyError): petab.get_observable_df(file_name) # with id observable_df[OBSERVABLE_ID] = ['observable_1'] with tempfile.NamedTemporaryFile(mode='w', delete=False) as fh: file_name = fh.name observable_df.to_csv(fh, sep='\t', index=False) df = petab.get_observable_df(file_name) assert (df == observable_df.set_index(OBSERVABLE_ID)).all().all() # test other arguments assert (petab.get_observable_df(observable_df) == observable_df) \ .all().all() assert petab.get_observable_df(None) is None
def test_write_observable_df(): """Test measurements.get_measurement_df.""" observable_df = pd.DataFrame(data={ OBSERVABLE_ID: ['observable_1'], OBSERVABLE_NAME: ['observable name 1'], OBSERVABLE_FORMULA: ['observable_1'], NOISE_FORMULA: [1], }).set_index(OBSERVABLE_ID) with tempfile.NamedTemporaryFile(mode='w', delete=True) as fh: file_name = fh.name petab.write_observable_df(observable_df, file_name) re_df = petab.get_observable_df(file_name) assert (observable_df == re_df).all().all()
def test_write_observable_df(): """Test measurements.get_measurement_df.""" observable_df = pd.DataFrame( data={ OBSERVABLE_ID: ['observable_1'], OBSERVABLE_NAME: ['observable name 1'], OBSERVABLE_FORMULA: ['observable_1'], NOISE_FORMULA: [1], }).set_index(OBSERVABLE_ID) with tempfile.TemporaryDirectory() as temp_dir: file_name = Path(temp_dir) / "observables.tsv" petab.write_observable_df(observable_df, file_name) re_df = petab.get_observable_df(file_name) assert (observable_df == re_df).all().all()
PARAMETER_NAME: parameter_dict0['name'], PARAMETER_SCALE: LIN, NOMINAL_VALUE: 1, ESTIMATE: 0, } else: raise NotImplementedError(parameter_dict0['id']) parameter_dicts.append(parameter_dict) ## Noise parameter_dicts.append({ PARAMETER_ID: noise, PARAMETER_NAME: noise, PARAMETER_SCALE: LOG10, LOWER_BOUND: '1e-12', UPPER_BOUND: '1e3', NOMINAL_VALUE: 0.1, ESTIMATE: 1, }) condition_df = petab.get_condition_df( pd.DataFrame({CONDITION_ID: [condition_id]})) observable_df = petab.get_observable_df(pd.DataFrame(observable_dicts)) measurement_df = petab.get_measurement_df(pd.DataFrame(measurement_dicts)) parameter_df = petab.get_parameter_df(pd.DataFrame(parameter_dicts)) petab.write_condition_df(condition_df, 'output/petab/conditions.tsv') petab.write_observable_df(observable_df, 'output/petab/observables.tsv') petab.write_measurement_df(measurement_df, 'output/petab/measurements.tsv') petab.write_parameter_df(parameter_df, 'output/petab/parameters.tsv') shutil.copy('input/petab_problem.yaml', 'output/petab/petab_problem.yaml')
def import_model(sbml_model: Union[str, 'libsbml.Model'], condition_table: Optional[Union[str, pd.DataFrame]] = None, observable_table: Optional[Union[str, pd.DataFrame]] = None, measurement_table: Optional[Union[str, pd.DataFrame]] = None, model_name: Optional[str] = None, model_output_dir: Optional[str] = None, verbose: Optional[Union[bool, int]] = True, allow_reinit_fixpar_initcond: bool = True, **kwargs) -> None: """ Create AMICI model from PEtab problem :param sbml_model: PEtab SBML model or SBML file name. :param condition_table: PEtab condition table. If provided, parameters from there will be turned into AMICI constant parameters (i.e. parameters w.r.t. which no sensitivities will be computed). :param observable_table: PEtab observable table. :param measurement_table: PEtab measurement table. :param model_name: Name of the generated model. If model file name was provided, this defaults to the file name without extension, otherwise the SBML model ID will be used. :param model_output_dir: Directory to write the model code to. Will be created if doesn't exist. Defaults to current directory. :param verbose: Print/log extra information. :param allow_reinit_fixpar_initcond: See :class:`amici.ode_export.ODEExporter`. Must be enabled if initial states are to be reset after preequilibration. :param kwargs: Additional keyword arguments to be passed to :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. """ set_log_level(logger, verbose) logger.info(f"Importing model ...") # Get PEtab tables observable_df = petab.get_observable_df(observable_table) # to determine fixed parameters condition_df = petab.get_condition_df(condition_table) if observable_df is None: raise NotImplementedError("PEtab import without observables table " "is currently not supported.") # Model name from SBML ID or filename if model_name is None: if isinstance(sbml_model, libsbml.Model): model_name = sbml_model.getId() else: model_name = os.path.splitext(os.path.split(sbml_model)[-1])[0] if model_output_dir is None: model_output_dir = os.path.join(os.getcwd(), model_name) logger.info(f"Model name is '{model_name}'. " f"Writing model code to '{model_output_dir}'.") # Load model if isinstance(sbml_model, str): # from file sbml_reader = libsbml.SBMLReader() sbml_doc = sbml_reader.readSBMLFromFile(sbml_model) sbml_model = sbml_doc.getModel() else: # Create a copy, because it will be modified by SbmlImporter sbml_doc = sbml_model.getSBMLDocument().clone() sbml_model = sbml_doc.getModel() show_model_info(sbml_model) sbml_importer = amici.SbmlImporter(sbml_model) sbml_model = sbml_importer.sbml if observable_df is not None: observables, noise_distrs, sigmas = \ get_observation_model(observable_df) logger.info(f'Observables: {len(observables)}') logger.info(f'Sigmas: {len(sigmas)}') if not len(sigmas) == len(observables): raise AssertionError( f'Number of provided observables ({len(observables)}) and sigmas ' f'({len(sigmas)}) do not match.') # TODO: adding extra output parameters is currently not supported, # so we add any output parameters to the SBML model. # this should be changed to something more elegant # <BeginWorkAround> formulas = chain((val['formula'] for val in observables.values()), sigmas.values()) output_parameters = OrderedDict() for formula in formulas: # we want reproducible parameter ordering upon repeated import free_syms = sorted(sp.sympify(formula).free_symbols, key=lambda symbol: symbol.name) for free_sym in free_syms: sym = str(free_sym) if sbml_model.getElementBySId(sym) is None: output_parameters[sym] = None logger.debug(f"Adding output parameters to model: {output_parameters}") for par in output_parameters.keys(): petab.add_global_parameter(sbml_model, par) # <EndWorkAround> # TODO: to parameterize initial states or compartment sizes, we currently # need initial assignments. if they occur in the condition table, we # create a new parameter initial_${startOrCompartmentID}. # feels dirty and should be changed (see also #924) # <BeginWorkAround> initial_states = [ col for col in condition_df if sbml_model.getSpecies(col) is not None ] initial_sizes = [ col for col in condition_df if sbml_model.getCompartment(col) is not None ] fixed_parameters = [] if len(initial_states) or len(initial_sizes): # add preequilibration indicator variable # NOTE: would only be required if we actually have preequilibration # adding it anyways. can be optimized-out later if sbml_model.getParameter(PREEQ_INDICATOR_ID) is not None: raise AssertionError("Model already has a parameter with ID " f"{PREEQ_INDICATOR_ID}. Cannot handle " "species and compartments in condition table " "then.") indicator = sbml_model.createParameter() indicator.setId(PREEQ_INDICATOR_ID) indicator.setName(PREEQ_INDICATOR_ID) # Can only reset parameters after preequilibration if they are fixed. fixed_parameters.append(PREEQ_INDICATOR_ID) for assignee_id in initial_sizes + initial_states: init_par_id_preeq = f"initial_{assignee_id}_preeq" init_par_id_sim = f"initial_{assignee_id}_sim" for init_par_id in [init_par_id_preeq, init_par_id_sim]: if sbml_model.getElementBySId(init_par_id) is not None: raise ValueError( "Cannot create parameter for initial assignment " f"for {assignee_id} because an entity named " f"{init_par_id} exists already in the model.") init_par = sbml_model.createParameter() init_par.setId(init_par_id) init_par.setName(init_par_id) assignment = sbml_model.createInitialAssignment() assignment.setSymbol(assignee_id) formula = f'{PREEQ_INDICATOR_ID} * {init_par_id_preeq} '\ f'+ (1 - {PREEQ_INDICATOR_ID}) * {init_par_id_sim}' math_ast = libsbml.parseL3Formula(formula) assignment.setMath(math_ast) # <EndWorkAround> fixed_parameters.extend( get_fixed_parameters(sbml_model=sbml_model, condition_df=condition_df)) logger.debug(f"Fixed parameters are {fixed_parameters}") logger.info(f"Overall fixed parameters: {len(fixed_parameters)}") logger.info( "Variable parameters: " + str(len(sbml_model.getListOfParameters()) - len(fixed_parameters))) # Create Python module from SBML model sbml_importer.sbml2amici( model_name=model_name, output_dir=model_output_dir, observables=observables, constant_parameters=fixed_parameters, sigmas=sigmas, allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, noise_distributions=noise_distrs, verbose=verbose, **kwargs)
import petab petab.petab_create_parameter_df( sbml_model=petab.get_sbml_model( "petab/enzymeKinetics/model_enzymeKinetics.xml"), condition_df=petab.get_condition_df( "petab/enzymeKinetics/experimentalCondition_enzymeKinetics.tsv"), observable_df=petab.get_observable_df( "petab/enzymeKinetics/observables_enzymeKinetics.tsv"), measurement_df=petab.get_measurement_df( "petab/enzymeKinetics/measurementData_enzymeKinetics.tsv")) # # import petab # petab.petab_create_parameter_df(sbml_model = petab.get_sbml_model("petab/Boehm_JProteomeRes2014/model_Boehm_JProteomeRes2014.xml"), # condition_df = petab.get_condition_df("petab/Boehm_JProteomeRes2014/experimentalCondition_Boehm_JProteomeRes2014.tsv"), # observable_df = petab.get_observable_df("petab/Boehm_JProteomeRes2014/observables_Boehm_JProteomeRes2014.tsv"), # measurement_df = petab.get_measurement_df("petab/Boehm_JProteomeRes2014/measurementData_Boehm_JProteomeRes2014.tsv"))