def import_from_files(data_file_path, condition_file_path, visualization_file_path, simulation_file_path, dataset_id_list, sim_cond_id_list, sim_cond_num_list, observable_id_list, observable_num_list, plotted_noise): """ Helper function for plotting data and simulations, which imports data from PEtab files. For documentation, see main function plot_data_and_simulation() """ # import measurement data and experimental condition exp_data = petab.get_measurement_df(data_file_path) exp_conditions = petab.get_condition_df(condition_file_path) # import visualization specification, if file was specified if visualization_file_path != '': vis_spec = pd.read_csv(visualization_file_path, sep="\t", index_col=None) else: # create them based on simulation conditions vis_spec = get_default_vis_specs(exp_data, exp_conditions, dataset_id_list, sim_cond_id_list, sim_cond_num_list, observable_id_list, observable_num_list, plotted_noise) # import simulation file, if file was specified if simulation_file_path != '': sim_data = pd.read_csv(simulation_file_path, sep="\t", index_col=None) else: sim_data = None return exp_data, exp_conditions, vis_spec, sim_data
def get_condition_df(): condition_df = pd.DataFrame( data={ 'conditionId': [TIMECOURSE_ID, 'q_pos', 'q_zero', 'q_neg'], 'q_': [None, 1, 0, -1], }) return petab.get_condition_df(condition_df)
def get_fixed_parameters(condition_file_name, sbml_model, constant_species_to_parameters=True): """Determine, set and return fixed model parameters Parameters specified in `condition_file_name` are turned into constants. Only global SBML parameters are considered. Local parameters are ignored. Species which are marked constant within the SBML model will be turned into constant parameters *within* the given `sbml_model`. """ condition_df = petab.get_condition_df(condition_file_name) print(f'Condition table: {condition_df.shape}') # column names are model parameter names that should be made constant # except for any overridden parameters # (Could potentially still be made constant, but leaving them might # increase model reusability) fixed_parameters = list(condition_df.columns) try: fixed_parameters.remove('conditionName') except ValueError: pass # remove overridden parameters fixed_parameters = [ p for p in fixed_parameters if condition_df[p].dtype != 'O' ] # must be unique assert (len(fixed_parameters) == len(set(fixed_parameters))) if constant_species_to_parameters: # Turn species which are marked constant in the SBML model into # parameters constant_species = \ petab.constant_species_to_parameters(sbml_model) print("Constant species converted to parameters", len(constant_species)) print("Non-constant species", len(sbml_model.getListOfSpecies())) # ... and append them to the list of fixed_parameters for species in constant_species: if species not in fixed_parameters: fixed_parameters.append(species) # Ensure mentioned parameters exist in the model. Remove additional ones # from list for fixed_parameter in fixed_parameters[:]: # check global parameters if not sbml_model.getParameter(fixed_parameter) \ and not sbml_model.getSpecies(fixed_parameter): print(f"{Fore.YELLOW}Parameter or species '{fixed_parameter}' " "provided in condition table but not present in model.") fixed_parameters.remove(fixed_parameter) return fixed_parameters
def import_from_files( data_file_path: str, condition_file_path: str, simulation_file_path: str, dataset_id_list: List[IdsList], sim_cond_id_list: List[IdsList], sim_cond_num_list: List[NumList], observable_id_list: List[IdsList], observable_num_list: List[NumList], plotted_noise: str, visualization_file_path: str = None ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame, pd.DataFrame]: """ Helper function for plotting data and simulations, which imports data from PEtab files. If `visualization_file_path` is not provided, the visualization specification DataFrame will be generated automatically. For documentation, see main function plot_data_and_simulation() Returns: A tuple of experimental data, experimental conditions, visualization specification and simulation data DataFrames. """ warnings.warn("This function will be removed in future releases. ", DeprecationWarning) # import measurement data and experimental condition exp_data = petab.get_measurement_df(data_file_path) exp_conditions = petab.get_condition_df(condition_file_path) # import visualization specification, if file was specified if visualization_file_path: vis_spec = petab.get_visualization_df(visualization_file_path) else: # create them based on simulation conditions vis_spec, exp_data = get_default_vis_specs(exp_data, exp_conditions, dataset_id_list, sim_cond_id_list, sim_cond_num_list, observable_id_list, observable_num_list, plotted_noise) # import simulation file, if file was specified if simulation_file_path != '': sim_data = petab.get_simulation_df(simulation_file_path) else: sim_data = None return exp_data, exp_conditions, vis_spec, sim_data
def test_get_condition_df(): """Test conditions.get_condition_df.""" # condition df missing ids condition_df = pd.DataFrame( data={ CONDITION_NAME: ['Condition 1', 'Condition 2'], 'fixedParameter1': [1.0, 2.0] }) with tempfile.NamedTemporaryFile(mode='w', delete=False) as fh: file_name = fh.name condition_df.to_csv(fh, sep='\t', index=False) with pytest.raises(KeyError): petab.get_condition_df(file_name) os.remove(file_name) # with ids condition_df = pd.DataFrame( data={ CONDITION_ID: ['condition1', 'condition2'], CONDITION_NAME: ['', 'Condition 2'], 'fixedParameter1': [1.0, 2.0] }) with tempfile.NamedTemporaryFile(mode='w', delete=False) as fh: file_name = fh.name condition_df.to_csv(fh, sep='\t', index=False) df = petab.get_condition_df(file_name).replace(np.nan, '') assert (df == condition_df.set_index(CONDITION_ID)).all().all() os.remove(file_name) # test other arguments assert (petab.get_condition_df(condition_df) == condition_df).all().all() assert petab.get_condition_df(None) is None
def test_write_condition_df(): """Test conditions.write_condition_df.""" condition_df = pd.DataFrame( data={ CONDITION_ID: ['condition1', 'condition2'], CONDITION_NAME: ['Condition 1', 'Condition 2'], 'fixedParameter1': [1.0, 2.0] }).set_index(CONDITION_ID) with tempfile.NamedTemporaryFile(mode='w', delete=True) as fh: file_name = fh.name petab.write_condition_df(condition_df, file_name) re_df = petab.get_condition_df(file_name) assert (condition_df == re_df).all().all()
def test_write_condition_df(): """Test conditions.write_condition_df.""" condition_df = pd.DataFrame( data={ CONDITION_ID: ['condition1', 'condition2'], CONDITION_NAME: ['Condition 1', 'Condition 2'], 'fixedParameter1': [1.0, 2.0] }).set_index(CONDITION_ID) with tempfile.TemporaryDirectory() as temp_dir: file_name = Path(temp_dir) / "conditions.tsv" petab.write_condition_df(condition_df, file_name) re_df = petab.get_condition_df(file_name) assert (condition_df == re_df).all().all()
def import_directory_of_componentwise_files( directory: TYPE_PATH, timecourse_id: str = None, ) -> Tuple[Timecourse, pd.DataFrame]: directory = Path(directory) if timecourse_id is None: timecourse_id = f'timecourse_{directory.parts[-1]}' regimens = Regimens( {Regimen.from_path(path) for path in Path(directory).iterdir()}) conditions_with_times = regimens.as_conditions() for index, (time, condition) in enumerate(conditions_with_times.items()): conditions_with_times[time] = Condition( pd.Series( data=condition, name=f'{timecourse_id}_condition_{index}', )) unique_conditions, condition_sequence = \ deduplicate_conditions(list(conditions_with_times.values())) timecourse_df = pd.DataFrame(data={ TIMECOURSE_ID: [f'{timecourse_id}'], TIMECOURSE: [TIMECOURSE_ITEM_DELIMETER.join([ f'{timepoint}{TIME_CONDITION_DELIMETER}{condition_id}' for timepoint, condition_id in \ zip(conditions_with_times, condition_sequence) ])], }) timecourse_df = get_timecourse_df(timecourse_df) if len(timecourse_df) != 1: raise ValueError( 'Something went wrong with importing the componentwise timecourse.' 'Multiple timecourses were created.') #timecourse = Timecourse(timecourse_df.iloc[1]) # TODO duplicated from "to_petab_files"... condition_df = pd.DataFrame(data=[{ **{ CONDITION_ID: condition.id, CONDITION_NAME: condition.name, }, **dict(condition), } for condition in unique_conditions.values()]) if set(condition_df[CONDITION_NAME]) is None: condition_df.drop(CONDITION_NAME) condition_df = petab.get_condition_df(condition_df) return timecourse_df, condition_df
def to_petab_dataframes(timecourse: Timecourse) -> Dict[str, pd.DataFrame]: """Convert a timecourse to PEtab dataframes. Parameters ---------- timecourse: The timecourse to convert. Returns ------- A dictionary, where the keys are `'.C.CONDITION'` and `.C.TIMECOURSE`, and the values are the corresponding dataframes. """ condition_df = pd.DataFrame(data=[ dict( CONDITION_ID=condition.id, CONDITION_NAME=condition.name, **dict(condition), ) for condition in timecourse.values() ]) if set(condition_df[CONDITION_NAME]) is None: condition_df.drop(CONDITION_NAME) condition_df = petab.get_condition_df(condition_df) timecourse_df = pd.DataFrame(data=[ dict( TIMECOURSE_ID=timecourse.id, TIMECOURSE=TIMECOURSE_ITEM_DELIMETER.join([ f'{timepoint}{TIME_CONDITION_DELIMETER}{condition.id}' for timepoint, condition in timecourse.items() ]), ) ]) if set(timecourse_df[TIMECOURSE_NAME]) is None: timecourse_df.drop(TIMECOURSE_NAME) timecourse_df = get_timecourse_df(timecourse_df) return { CONDITION: condition_df, TIMECOURSE: timecourse_df, }
PARAMETER_NAME: parameter_dict0['name'], PARAMETER_SCALE: LIN, NOMINAL_VALUE: 1, ESTIMATE: 0, } else: raise NotImplementedError(parameter_dict0['id']) parameter_dicts.append(parameter_dict) ## Noise parameter_dicts.append({ PARAMETER_ID: noise, PARAMETER_NAME: noise, PARAMETER_SCALE: LOG10, LOWER_BOUND: '1e-12', UPPER_BOUND: '1e3', NOMINAL_VALUE: 0.1, ESTIMATE: 1, }) condition_df = petab.get_condition_df( pd.DataFrame({CONDITION_ID: [condition_id]})) observable_df = petab.get_observable_df(pd.DataFrame(observable_dicts)) measurement_df = petab.get_measurement_df(pd.DataFrame(measurement_dicts)) parameter_df = petab.get_parameter_df(pd.DataFrame(parameter_dicts)) petab.write_condition_df(condition_df, 'output/petab/conditions.tsv') petab.write_observable_df(observable_df, 'output/petab/observables.tsv') petab.write_measurement_df(measurement_df, 'output/petab/measurements.tsv') petab.write_parameter_df(parameter_df, 'output/petab/parameters.tsv') shutil.copy('input/petab_problem.yaml', 'output/petab/petab_problem.yaml')
def import_model(sbml_model: Union[str, 'libsbml.Model'], condition_table: Optional[Union[str, pd.DataFrame]] = None, observable_table: Optional[Union[str, pd.DataFrame]] = None, measurement_table: Optional[Union[str, pd.DataFrame]] = None, model_name: Optional[str] = None, model_output_dir: Optional[str] = None, verbose: Optional[Union[bool, int]] = True, allow_reinit_fixpar_initcond: bool = True, **kwargs) -> None: """ Create AMICI model from PEtab problem :param sbml_model: PEtab SBML model or SBML file name. :param condition_table: PEtab condition table. If provided, parameters from there will be turned into AMICI constant parameters (i.e. parameters w.r.t. which no sensitivities will be computed). :param observable_table: PEtab observable table. :param measurement_table: PEtab measurement table. :param model_name: Name of the generated model. If model file name was provided, this defaults to the file name without extension, otherwise the SBML model ID will be used. :param model_output_dir: Directory to write the model code to. Will be created if doesn't exist. Defaults to current directory. :param verbose: Print/log extra information. :param allow_reinit_fixpar_initcond: See :class:`amici.ode_export.ODEExporter`. Must be enabled if initial states are to be reset after preequilibration. :param kwargs: Additional keyword arguments to be passed to :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. """ set_log_level(logger, verbose) logger.info(f"Importing model ...") # Get PEtab tables observable_df = petab.get_observable_df(observable_table) # to determine fixed parameters condition_df = petab.get_condition_df(condition_table) if observable_df is None: raise NotImplementedError("PEtab import without observables table " "is currently not supported.") # Model name from SBML ID or filename if model_name is None: if isinstance(sbml_model, libsbml.Model): model_name = sbml_model.getId() else: model_name = os.path.splitext(os.path.split(sbml_model)[-1])[0] if model_output_dir is None: model_output_dir = os.path.join(os.getcwd(), model_name) logger.info(f"Model name is '{model_name}'. " f"Writing model code to '{model_output_dir}'.") # Load model if isinstance(sbml_model, str): # from file sbml_reader = libsbml.SBMLReader() sbml_doc = sbml_reader.readSBMLFromFile(sbml_model) sbml_model = sbml_doc.getModel() else: # Create a copy, because it will be modified by SbmlImporter sbml_doc = sbml_model.getSBMLDocument().clone() sbml_model = sbml_doc.getModel() show_model_info(sbml_model) sbml_importer = amici.SbmlImporter(sbml_model) sbml_model = sbml_importer.sbml if observable_df is not None: observables, noise_distrs, sigmas = \ get_observation_model(observable_df) logger.info(f'Observables: {len(observables)}') logger.info(f'Sigmas: {len(sigmas)}') if not len(sigmas) == len(observables): raise AssertionError( f'Number of provided observables ({len(observables)}) and sigmas ' f'({len(sigmas)}) do not match.') # TODO: adding extra output parameters is currently not supported, # so we add any output parameters to the SBML model. # this should be changed to something more elegant # <BeginWorkAround> formulas = chain((val['formula'] for val in observables.values()), sigmas.values()) output_parameters = OrderedDict() for formula in formulas: # we want reproducible parameter ordering upon repeated import free_syms = sorted(sp.sympify(formula).free_symbols, key=lambda symbol: symbol.name) for free_sym in free_syms: sym = str(free_sym) if sbml_model.getElementBySId(sym) is None: output_parameters[sym] = None logger.debug(f"Adding output parameters to model: {output_parameters}") for par in output_parameters.keys(): petab.add_global_parameter(sbml_model, par) # <EndWorkAround> # TODO: to parameterize initial states or compartment sizes, we currently # need initial assignments. if they occur in the condition table, we # create a new parameter initial_${startOrCompartmentID}. # feels dirty and should be changed (see also #924) # <BeginWorkAround> initial_states = [ col for col in condition_df if sbml_model.getSpecies(col) is not None ] initial_sizes = [ col for col in condition_df if sbml_model.getCompartment(col) is not None ] fixed_parameters = [] if len(initial_states) or len(initial_sizes): # add preequilibration indicator variable # NOTE: would only be required if we actually have preequilibration # adding it anyways. can be optimized-out later if sbml_model.getParameter(PREEQ_INDICATOR_ID) is not None: raise AssertionError("Model already has a parameter with ID " f"{PREEQ_INDICATOR_ID}. Cannot handle " "species and compartments in condition table " "then.") indicator = sbml_model.createParameter() indicator.setId(PREEQ_INDICATOR_ID) indicator.setName(PREEQ_INDICATOR_ID) # Can only reset parameters after preequilibration if they are fixed. fixed_parameters.append(PREEQ_INDICATOR_ID) for assignee_id in initial_sizes + initial_states: init_par_id_preeq = f"initial_{assignee_id}_preeq" init_par_id_sim = f"initial_{assignee_id}_sim" for init_par_id in [init_par_id_preeq, init_par_id_sim]: if sbml_model.getElementBySId(init_par_id) is not None: raise ValueError( "Cannot create parameter for initial assignment " f"for {assignee_id} because an entity named " f"{init_par_id} exists already in the model.") init_par = sbml_model.createParameter() init_par.setId(init_par_id) init_par.setName(init_par_id) assignment = sbml_model.createInitialAssignment() assignment.setSymbol(assignee_id) formula = f'{PREEQ_INDICATOR_ID} * {init_par_id_preeq} '\ f'+ (1 - {PREEQ_INDICATOR_ID}) * {init_par_id_sim}' math_ast = libsbml.parseL3Formula(formula) assignment.setMath(math_ast) # <EndWorkAround> fixed_parameters.extend( get_fixed_parameters(sbml_model=sbml_model, condition_df=condition_df)) logger.debug(f"Fixed parameters are {fixed_parameters}") logger.info(f"Overall fixed parameters: {len(fixed_parameters)}") logger.info( "Variable parameters: " + str(len(sbml_model.getListOfParameters()) - len(fixed_parameters))) # Create Python module from SBML model sbml_importer.sbml2amici( model_name=model_name, output_dir=model_output_dir, observables=observables, constant_parameters=fixed_parameters, sigmas=sigmas, allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, noise_distributions=noise_distrs, verbose=verbose, **kwargs)
import petab petab.petab_create_parameter_df( sbml_model=petab.get_sbml_model( "petab/enzymeKinetics/model_enzymeKinetics.xml"), condition_df=petab.get_condition_df( "petab/enzymeKinetics/experimentalCondition_enzymeKinetics.tsv"), observable_df=petab.get_observable_df( "petab/enzymeKinetics/observables_enzymeKinetics.tsv"), measurement_df=petab.get_measurement_df( "petab/enzymeKinetics/measurementData_enzymeKinetics.tsv")) # # import petab # petab.petab_create_parameter_df(sbml_model = petab.get_sbml_model("petab/Boehm_JProteomeRes2014/model_Boehm_JProteomeRes2014.xml"), # condition_df = petab.get_condition_df("petab/Boehm_JProteomeRes2014/experimentalCondition_Boehm_JProteomeRes2014.tsv"), # observable_df = petab.get_observable_df("petab/Boehm_JProteomeRes2014/observables_Boehm_JProteomeRes2014.tsv"), # measurement_df = petab.get_measurement_df("petab/Boehm_JProteomeRes2014/measurementData_Boehm_JProteomeRes2014.tsv"))