def main(): args = parse_cli_args() script_path = os.path.split(os.path.abspath(__file__))[0] model_name = 'model_steadystate_scaled' sbml_file_name = "model_steadystate_scaled.sbml" measurement_file_name = 'example_data.tsv' condition_file_name = 'example_data_fixed.tsv' parameter_file_name = 'example_data_parameter.tsv' observable_file_name = 'model_steasystate_observables.tsv' yaml_file_name = 'model_steasystate.yaml' yaml_file_name_test = 'model_steasystate_test.yaml' print(f'{__file__} running in {os.getcwd()}') print(f'Processing model {sbml_file_name}') # Create sbml model from scratch cmd = f'bash -c "{script_path}/createSteadystateExampleSBML.py '\ f'> {sbml_file_name}"' print(cmd) out = subprocess.check_output(cmd, shell=True) print(out.decode('utf-8')) print() print_model_info(sbml_file_name) print() # create condition table condition_df = pd.DataFrame( data={ ptc.CONDITION_ID: ["condition_0", "condition_1", "condition_2", "condition_3"], "k0": [1, 1.1, 1.2, 1.3] }) condition_df.set_index([ptc.CONDITION_ID], inplace=True) # create observables observable_df = pd.DataFrame( data={ ptc.OBSERVABLE_ID: [ "obs_x1", "obs_x2", "obs_x3", "obs_x1_scaled", "obs_x2_offsetted", "obs_x1withsigma" ], ptc.OBSERVABLE_FORMULA: [ "x1", "x2", "x3", "observableParameter1_obs_x1_scaled * x1", "observableParameter1_obs_x2_offsetted + x2", "x1" ], ptc.NOISE_FORMULA: [ "noiseParameter1_obs_x1", "noiseParameter1_obs_x2", "noiseParameter1_obs_x3", "noiseParameter1_obs_x1_scaled", "noiseParameter1_obs_x2_offsetted", "noiseParameter1_obs_x1withsigma" ], }) observable_df.set_index([ptc.OBSERVABLE_ID], inplace=True) create_module(sbml_model_file=sbml_file_name, model_name=model_name, model_output_dir=args.model_output_dir, observable_df=observable_df, condition_df=condition_df) # load model sys.path.insert(0, args.model_output_dir) model_module = importlib.import_module(model_name) print() print("--- Creating data ---") measurement_df, true_parameters, expected_llh = create_data_tables( model=model_module.getModel(), condition_df=condition_df) # assemble PEtab problem pp = petab.Problem.from_files(sbml_file=sbml_file_name) pp.observable_df = observable_df pp.measurement_df = measurement_df pp.condition_df = condition_df create_parameter_table(problem=pp, nominal_parameters=true_parameters) # check for valid PEtab petab.lint_problem(pp) # Save remaining tables pp.to_files(measurement_file=measurement_file_name, condition_file=condition_file_name, observable_file=observable_file_name, parameter_file=parameter_file_name) # Create PEtab yaml file config = { 'format_version': petab.__format_version__, 'parameter_file': parameter_file_name, 'problems': [ { ptc.SBML_FILES: [sbml_file_name], ptc.CONDITION_FILES: [condition_file_name], ptc.MEASUREMENT_FILES: [measurement_file_name], ptc.OBSERVABLE_FILES: [observable_file_name], }, ] } petab.validate(config) #, path_prefix=model_dir) with open(yaml_file_name, 'w') as outfile: yaml.dump(config, outfile, default_flow_style=False) # create training data generate_hdf5_file(yaml_file=yaml_file_name, model_output_dir=args.model_output_dir, hdf5_file_name=args.hdf5_file_name, model_name=model_name) create_test_data(measurement_file_name, parameter_file_name, config, yaml_file_name_test, args.model_output_dir, model_name, args.hdf5_file_name) save_expected_results(args.hdf5_file_name, true_parameters, expected_llh) write_starting_points(args.hdf5_file_name, true_parameters)
def write_problem( test_id: int, parameter_df: pd.DataFrame, condition_dfs: Union[List[pd.DataFrame], pd.DataFrame], observable_dfs: Union[List[pd.DataFrame], pd.DataFrame], measurement_dfs: Union[List[pd.DataFrame], pd.DataFrame], sbml_files: Union[List[str], str] = None, ) -> None: """Write problem to files. Parameters ---------- test_id: Identifier of the test. parameter_df: PEtab parameter table. condition_dfs: PEtab condition tables. observable_dfs: PEtab observable tables. measurement_dfs: PEtab measurement tables. sbml_files: PEtab SBML files. If None, then the default petabtests.DEFAULT_MODEL_FILE is used. """ print(f"Writing case {test_id}...") # convenience if isinstance(condition_dfs, pd.DataFrame): condition_dfs = [condition_dfs] if isinstance(observable_dfs, pd.DataFrame): observable_dfs = [observable_dfs] if isinstance(measurement_dfs, pd.DataFrame): measurement_dfs = [measurement_dfs] if isinstance(sbml_files, str): sbml_files = [sbml_files] # id to string id_str = test_id_str(test_id) dir_ = os.path.join(CASES_DIR, id_str) # petab yaml config = { FORMAT_VERSION: petab.__format_version__, PROBLEMS: [ { SBML_FILES: [], CONDITION_FILES: [], MEASUREMENT_FILES: [], OBSERVABLE_FILES: [], }, ] } # copy models if sbml_files is None: sbml_files = [DEFAULT_MODEL_FILE] copied_sbml_files = [] for i_sbml, sbml_file in enumerate(sbml_files): if len(sbml_files) == 1: copied_sbml_file = '_model.xml' else: copied_sbml_file = f'_model{i_sbml}.xml' copyfile(os.path.join(dir_, sbml_file), os.path.join(dir_, copied_sbml_file)) copied_sbml_files.append(copied_sbml_file) config[PROBLEMS][0][SBML_FILES] = copied_sbml_files # write parameters parameters_file = '_parameters.tsv' petab.write_parameter_df(parameter_df, os.path.join(dir_, parameters_file)) config[PARAMETER_FILE] = parameters_file # write conditions _write_dfs_to_files(id_str, 'conditions', petab.write_condition_df, condition_dfs, config[PROBLEMS][0][CONDITION_FILES]) # write observables _write_dfs_to_files(id_str, 'observables', petab.write_observable_df, observable_dfs, config[PROBLEMS][0][OBSERVABLE_FILES]) # write measurements _write_dfs_to_files(id_str, 'measurements', petab.write_measurement_df, measurement_dfs, config[PROBLEMS][0][MEASUREMENT_FILES]) # validate petab yaml petab.validate(config, path_prefix=dir_) # write yaml yaml_file = problem_yaml_name(test_id) with open(os.path.join(dir_, yaml_file), 'w') as outfile: yaml.dump(config, outfile, default_flow_style=False) # validate written PEtab files problem = petab.Problem.from_yaml(os.path.join(dir_, yaml_file)) petab.lint_problem(problem)
def write_problem(test_id: int, parameter_df: pd.DataFrame, condition_dfs: Union[List[pd.DataFrame], pd.DataFrame], observable_dfs: Union[List[pd.DataFrame], pd.DataFrame], measurement_dfs: Union[List[pd.DataFrame], pd.DataFrame], model_files: Union[List[str], str], format_: str = 'sbml') -> None: """Write problem to files. Parameters ---------- test_id: Identifier of the test. parameter_df: PEtab parameter table. condition_dfs: PEtab condition tables. observable_dfs: PEtab observable tables. measurement_dfs: PEtab measurement tables. model_files: PEtab SBML/PySB files. format: Model format (SBML/PySB) """ print(f"Writing case {test_id} {format_} ...") # convenience if isinstance(condition_dfs, pd.DataFrame): condition_dfs = [condition_dfs] if isinstance(observable_dfs, pd.DataFrame): observable_dfs = [observable_dfs] if isinstance(measurement_dfs, pd.DataFrame): measurement_dfs = [measurement_dfs] if isinstance(model_files, str): model_files = [model_files] # id to string dir_ = case_dir(test_id, format_) # petab yaml config = { FORMAT_VERSION: petab.__format_version__, PROBLEMS: [ { SBML_FILES: [], CONDITION_FILES: [], MEASUREMENT_FILES: [], OBSERVABLE_FILES: [], }, ] } if format_ == 'sbml': suffix = '.xml' else: suffix = '.py' # copy models copied_model_files = [] for i_sbml, model_file in enumerate(model_files): if len(model_files) == 1: copied_model_file = f'_model{suffix}' else: copied_model_file = f'_model{i_sbml}{suffix}' copyfile(os.path.join(dir_, model_file), os.path.join(dir_, copied_model_file)) copied_model_files.append(copied_model_file) config[PROBLEMS][0][SBML_FILES] = copied_model_files # write parameters parameters_file = '_parameters.tsv' petab.write_parameter_df(parameter_df, os.path.join(dir_, parameters_file)) config[PARAMETER_FILE] = parameters_file # write conditions _write_dfs_to_files(dir_, 'conditions', petab.write_condition_df, condition_dfs, config[PROBLEMS][0][CONDITION_FILES]) # write observables _write_dfs_to_files(dir_, 'observables', petab.write_observable_df, observable_dfs, config[PROBLEMS][0][OBSERVABLE_FILES]) # write measurements _write_dfs_to_files(dir_, 'measurements', petab.write_measurement_df, measurement_dfs, config[PROBLEMS][0][MEASUREMENT_FILES]) # validate petab yaml petab.validate(config, path_prefix=dir_) # write yaml yaml_file = problem_yaml_name(test_id) with open(os.path.join(dir_, yaml_file), 'w') as outfile: yaml.dump(config, outfile, default_flow_style=False) # validate written PEtab files problem = petab.Problem.from_yaml(os.path.join(dir_, yaml_file)) petab.lint_problem(problem)