def import_from_files(data_file_path, condition_file_path,
                      visualization_file_path, simulation_file_path,
                      dataset_id_list, sim_cond_id_list, sim_cond_num_list,
                      observable_id_list, observable_num_list, plotted_noise):
    """
    Helper function for plotting data and simulations, which imports data
    from PEtab files.

    For documentation, see main function plot_data_and_simulation()
    """

    # import measurement data and experimental condition
    exp_data = petab.get_measurement_df(data_file_path)
    exp_conditions = petab.get_condition_df(condition_file_path)

    # import visualization specification, if file was specified
    if visualization_file_path != '':
        vis_spec = pd.read_csv(visualization_file_path,
                               sep="\t",
                               index_col=None)
    else:
        # create them based on simulation conditions
        vis_spec = get_default_vis_specs(exp_data, exp_conditions,
                                         dataset_id_list, sim_cond_id_list,
                                         sim_cond_num_list, observable_id_list,
                                         observable_num_list, plotted_noise)

    # import simulation file, if file was specified
    if simulation_file_path != '':
        sim_data = pd.read_csv(simulation_file_path, sep="\t", index_col=None)
    else:
        sim_data = None

    return exp_data, exp_conditions, vis_spec, sim_data
Exemple #2
0
def get_measurement_df():
    T = np.linspace(0, 100, 1001)
    X = [1 for _ in T]
    measurement_df = pd.DataFrame(
        data={
            'observableId': 'observable_x_',
            'simulationConditionId': TIMECOURSE_ID,
            'time': [f'{t:.1f}' for t in T],
            'measurement': X,
        })
    return petab.get_measurement_df(measurement_df)
Exemple #3
0
def test_get_measurement_df():
    """Test measurements.get_measurement_df."""
    measurement_df = pd.DataFrame(
        data={
            OBSERVABLE_ID: ['obs1', 'obs2'],
            OBSERVABLE_PARAMETERS: ['', 'p1;p2'],
            NOISE_PARAMETERS: ['p3;p4', 'p5']
        })

    with tempfile.NamedTemporaryFile(mode='w', delete=False) as fh:
        file_name = fh.name
        measurement_df.to_csv(fh, sep='\t', index=False)

    df = petab.get_measurement_df(file_name).replace(np.nan, '')
    assert (df == measurement_df).all().all()

    # test other arguments
    assert (petab.get_measurement_df(measurement_df) == measurement_df) \
        .all().all()
    assert petab.get_measurement_df(None) is None
def import_from_files(
        data_file_path: str,
        condition_file_path: str,
        simulation_file_path: str,
        dataset_id_list: List[IdsList],
        sim_cond_id_list: List[IdsList],
        sim_cond_num_list: List[NumList],
        observable_id_list: List[IdsList],
        observable_num_list: List[NumList],
        plotted_noise: str,
        visualization_file_path: str = None
) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame, pd.DataFrame]:
    """
    Helper function for plotting data and simulations, which imports data
    from PEtab files. If `visualization_file_path` is not provided, the
    visualization specification DataFrame will be generated automatically.

    For documentation, see main function plot_data_and_simulation()

    Returns:
        A tuple of experimental data, experimental conditions,
        visualization specification and simulation data DataFrames.
    """
    warnings.warn("This function will be removed in future releases. ",
                  DeprecationWarning)

    # import measurement data and experimental condition
    exp_data = petab.get_measurement_df(data_file_path)
    exp_conditions = petab.get_condition_df(condition_file_path)

    # import visualization specification, if file was specified
    if visualization_file_path:
        vis_spec = petab.get_visualization_df(visualization_file_path)
    else:
        # create them based on simulation conditions
        vis_spec, exp_data = get_default_vis_specs(exp_data,
                                                   exp_conditions,
                                                   dataset_id_list,
                                                   sim_cond_id_list,
                                                   sim_cond_num_list,
                                                   observable_id_list,
                                                   observable_num_list,
                                                   plotted_noise)

    # import simulation file, if file was specified
    if simulation_file_path != '':
        sim_data = petab.get_simulation_df(simulation_file_path)
    else:
        sim_data = None

    return exp_data, exp_conditions, vis_spec, sim_data
Exemple #5
0
def test_write_measurement_df():
    """Test measurements.get_measurement_df."""
    measurement_df = pd.DataFrame(
        data={
            OBSERVABLE_ID: ['obs1', 'obs2'],
            OBSERVABLE_PARAMETERS: ['', 'p1;p2'],
            NOISE_PARAMETERS: ['p3;p4', 'p5']
        })

    with tempfile.NamedTemporaryFile(mode='w', delete=True) as fh:
        file_name = fh.name
        petab.write_measurement_df(measurement_df, file_name)
        re_df = petab.get_measurement_df(file_name).replace(np.nan, '')
        assert (measurement_df == re_df).all().all()
def test_write_measurement_df():
    """Test measurements.get_measurement_df."""
    measurement_df = pd.DataFrame(
        data={
            OBSERVABLE_ID: ['obs1', 'obs2'],
            OBSERVABLE_PARAMETERS: ['', 'p1;p2'],
            NOISE_PARAMETERS: ['p3;p4', 'p5']
        })

    with tempfile.TemporaryDirectory() as temp_dir:
        file_name = Path(temp_dir) / "parameters.tsv"
        petab.write_measurement_df(measurement_df, file_name)
        re_df = petab.get_measurement_df(file_name).replace(np.nan, '')
        assert (measurement_df == re_df).all().all()
Exemple #7
0
def create_test_data(measurement_file_name, parameter_file_name, yaml_config,
                     yaml_file_name_test, model_output_dir, model_name,
                     hdf5_file_name):
    """Create some synthetic data to emulate a test set"""

    test_measurement_file_name = \
        "-testset".join(os.path.splitext(measurement_file_name))
    test_parameter_file_name = \
        "-testset".join(os.path.splitext(parameter_file_name))

    # measurements
    df = petab.get_measurement_df(measurement_file_name)
    df.loc[df.observableParameters == 'scaling_x1_common', 'measurement'] = \
        df.loc[df.observableParameters == 'scaling_x1_common', 'measurement'] \
        * 2.0
    df.loc[~df.observableParameters.isnull(), 'observableParameters'] = \
        df.loc[~df.observableParameters.isnull(), 'observableParameters'] \
        + "_test"
    petab.write_parameter_df(df, test_measurement_file_name)

    # parameters
    df = petab.get_parameter_df(parameter_file_name)
    df.rename(index={
        'scaling_x1_common': 'scaling_x1_common_test',
        'offset_x2_batch_0': 'offset_x2_batch_0_test',
        'offset_x2_batch_1': 'offset_x2_batch_1_test'
    },
              inplace=True)
    petab.write_parameter_df(df, test_parameter_file_name)

    # yaml
    yaml_config[ptc.PARAMETER_FILE] = test_parameter_file_name
    yaml_config[ptc.PROBLEMS][0][ptc.MEASUREMENT_FILES][0] = \
        test_measurement_file_name
    with open(yaml_file_name_test, 'w') as outfile:
        yaml.dump(yaml_config, outfile, default_flow_style=False)

    generate_hdf5_file(yaml_file=yaml_file_name_test,
                       model_output_dir=model_output_dir,
                       hdf5_file_name="-testset".join(
                           os.path.splitext(hdf5_file_name)),
                       model_name=model_name)
def petab_yaml_path(yaml2sbml_model_string):
    petab_path = Path('output') / TEST_ID
    petab_path.mkdir(parents=True, exist_ok=True)

    petab_yaml_filename = 'petab.yaml'
    measurement_filename = 'measurements.tsv'

    with NamedTemporaryFile('w') as yaml2sbml_file:
        yaml2sbml_file.write(yaml2sbml_model_string)
        yaml2sbml_file.flush()

        yaml2sbml.yaml2petab(
            yaml_dir=yaml2sbml_file.name,
            output_dir=str(petab_path),
            sbml_name=TEST_ID,
            petab_yaml_name=petab_yaml_filename,
            measurement_table_name=measurement_filename,
        )

    # Dummy measurements
    # FIXME replace with expected values...
    # FIXME use timecourse ID as simulation condition ID
    T = np.linspace(0, 100, 1001)
    X = [1 for _ in T]
    measurement_df = pd.DataFrame(
        data={
            'observableId': 'observable_x_',
            'simulationConditionId': 'condition1',
            'time': [f'{t:.1f}' for t in T],
            'measurement': X,
        })
    measurement_df = petab.get_measurement_df(measurement_df)
    petab.write_measurement_df(
        measurement_df,
        str(petab_path / measurement_filename),
    )

    return petab_path / petab_yaml_filename
            PARAMETER_NAME: parameter_dict0['name'],
            PARAMETER_SCALE: LIN,
            NOMINAL_VALUE: 1,
            ESTIMATE: 0,
        }
    else:
        raise NotImplementedError(parameter_dict0['id'])
    parameter_dicts.append(parameter_dict)
## Noise
parameter_dicts.append({
    PARAMETER_ID: noise,
    PARAMETER_NAME: noise,
    PARAMETER_SCALE: LOG10,
    LOWER_BOUND: '1e-12',
    UPPER_BOUND: '1e3',
    NOMINAL_VALUE: 0.1,
    ESTIMATE: 1,
})

condition_df = petab.get_condition_df(
    pd.DataFrame({CONDITION_ID: [condition_id]}))
observable_df = petab.get_observable_df(pd.DataFrame(observable_dicts))
measurement_df = petab.get_measurement_df(pd.DataFrame(measurement_dicts))
parameter_df = petab.get_parameter_df(pd.DataFrame(parameter_dicts))

petab.write_condition_df(condition_df, 'output/petab/conditions.tsv')
petab.write_observable_df(observable_df, 'output/petab/observables.tsv')
petab.write_measurement_df(measurement_df, 'output/petab/measurements.tsv')
petab.write_parameter_df(parameter_df, 'output/petab/parameters.tsv')
shutil.copy('input/petab_problem.yaml', 'output/petab/petab_problem.yaml')
Exemple #10
0
def import_model(sbml_file: str,
                 condition_file: str,
                 measurement_file: str = None,
                 model_name: str = None,
                 model_output_dir: str = None,
                 verbose: bool = True,
                 allow_reinit_fixpar_initcond: bool = False,
                 **kwargs):
    """Import AMICI model"""

    if model_name is None:
        model_name = os.path.splitext(os.path.split(sbml_file)[-1])[0]

    if model_output_dir is None:
        model_output_dir = os.path.join(os.getcwd(), model_name)

    if verbose:
        print(f"{Fore.GREEN}Importing model '{sbml_file}' using fixed "
              f"parameters file '{condition_file}'")
        print(f"{Fore.GREEN}Model name is '{model_name}' Writing model code "
              f"to '{model_output_dir}'")

    sbml_importer = amici.SbmlImporter(sbml_file)
    sbml_model = sbml_importer.sbml

    show_model_info(sbml_model)

    observables = petab.get_observables(sbml_importer.sbml, remove=True)

    sigmas = petab.get_sigmas(sbml_importer.sbml, remove=True)

    measurement_df = petab.get_measurement_df(measurement_file)

    noise_distrs = petab_noise_distributions_to_amici(
        petab.get_noise_distributions(measurement_df))

    # Replace observables in assignment
    import sympy as sp
    for observable_id, formula in sigmas.items():
        repl = sp.sympify(formula).subs(observable_id,
                                        observables[observable_id]['formula'])
        sigmas[observable_id] = str(repl)

    if verbose:
        print('Observables', len(observables))
        print('Sigmas', len(sigmas))

    if not len(sigmas) == len(observables):
        raise AssertionError(
            f'Number of provided observables ({len(observables)}) and sigmas '
            f'({len(sigmas)}) do not match.')

    fixed_parameters = get_fixed_parameters(condition_file, sbml_model)

    if verbose:
        print("Overall fixed parameters", len(fixed_parameters))
        print("Non-constant global parameters",
              len(sbml_model.getListOfParameters()) - len(fixed_parameters))

    # Create Python module from SBML model
    start = time.time()
    sbml_importer.sbml2amici(
        model_name,
        output_dir=model_output_dir,
        observables=observables,
        constantParameters=fixed_parameters,
        sigmas=sigmas,
        allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond,
        noise_distributions=noise_distrs,
        **kwargs)
    end = time.time()

    if verbose:
        print(f"{Fore.GREEN}Model imported successfully in "
              f"{round(end - start, 2)}s")
Exemple #11
0
def main():
    args = parse_cli_args()

    script_path = os.path.split(os.path.abspath(__file__))[0]
    model_name = 'model_steadystate_scaled'

    print(f'{__file__} running in {os.getcwd()}')
    print(f'Processing model {args.sbml_file_name}')

    # Create sbml model from scratch
    cmd = f'bash -c "{script_path}/createSteadystateExampleSBML.py > {args.sbml_file_name}"'
    print(cmd)
    out = subprocess.check_output(cmd, shell=True)
    print(out.decode('utf-8'))
    print()

    print_model_info(args.sbml_file_name)
    print()

    fixed_parameters, observables = create_module(args.sbml_file_name,
                                                  model_name,
                                                  args.model_output_dir)

    print('Observables:', observables)
    print('Fixed parameters', fixed_parameters)
    print()

    # load model
    sys.path.insert(0, args.model_output_dir)
    model_module = importlib.import_module(model_name)

    print()
    print("--- Creating data ---")
    true_parameters, expected_llh = create_data_tables(
        model=model_module.getModel(),
        measurement_file=args.measurement_file_name,
        fixed_parameter_file=args.condition_file_name,
        fixed_parameters=fixed_parameters)

    # check for valid PEtab
    pp = petab.Problem.from_files(sbml_file=args.sbml_file_name,
                                  condition_file=args.condition_file_name,
                                  measurement_file=args.measurement_file_name)

    create_parameter_table(problem=pp,
                           parameter_file=args.parameter_file_name,
                           nominal_parameters=true_parameters)

    petab.lint_problem(pp)

    # create training data
    generate_hdf5_file(sbml_file_name=args.sbml_file_name,
                       model_output_dir=args.model_output_dir,
                       measurement_file_name=args.measurement_file_name,
                       condition_file_name=args.condition_file_name,
                       hdf5_file_name=args.hdf5_file_name,
                       parameter_file_name=args.parameter_file_name,
                       model_name=model_name)

    # create test data
    args.test_measurement_file_name = \
        "-testset".join(os.path.splitext(args.measurement_file_name))
    args.test_parameter_file_name = \
        "-testset".join(os.path.splitext(args.parameter_file_name))
    df = petab.get_measurement_df(args.measurement_file_name)
    df.loc[df.observableParameters == 'scaling_x1_common', 'measurement'] = \
        df.loc[df.observableParameters == 'scaling_x1_common', 'measurement'] \
        * 2.0
    df.loc[~df.observableParameters.isnull(), 'observableParameters'] = \
        df.loc[~df.observableParameters.isnull(), 'observableParameters'] \
        + "_test"
    df.to_csv(args.test_measurement_file_name, sep='\t', index=False)
    df = petab.get_parameter_df(args.parameter_file_name)
    df.rename(index={
        'scaling_x1_common': 'scaling_x1_common_test',
        'offset_x2_batch-0': 'offset_x2_batch-0_test',
        'offset_x2_batch-1': 'offset_x2_batch-1_test'
    },
              inplace=True)
    df.to_csv(args.test_parameter_file_name, sep='\t')
    generate_hdf5_file(sbml_file_name=args.sbml_file_name,
                       model_output_dir=args.model_output_dir,
                       measurement_file_name=args.test_measurement_file_name,
                       condition_file_name=args.condition_file_name,
                       hdf5_file_name="-testset".join(
                           os.path.splitext(args.hdf5_file_name)),
                       parameter_file_name=args.test_parameter_file_name,
                       model_name=model_name)

    save_expected_results(args.hdf5_file_name, true_parameters, expected_llh)

    write_starting_points(args.hdf5_file_name, true_parameters)
Exemple #12
0
import petab
petab.petab_create_parameter_df(
    sbml_model=petab.get_sbml_model(
        "petab/enzymeKinetics/model_enzymeKinetics.xml"),
    condition_df=petab.get_condition_df(
        "petab/enzymeKinetics/experimentalCondition_enzymeKinetics.tsv"),
    observable_df=petab.get_observable_df(
        "petab/enzymeKinetics/observables_enzymeKinetics.tsv"),
    measurement_df=petab.get_measurement_df(
        "petab/enzymeKinetics/measurementData_enzymeKinetics.tsv"))
#
# import petab
# petab.petab_create_parameter_df(sbml_model    = petab.get_sbml_model("petab/Boehm_JProteomeRes2014/model_Boehm_JProteomeRes2014.xml"),
#                          condition_df   = petab.get_condition_df("petab/Boehm_JProteomeRes2014/experimentalCondition_Boehm_JProteomeRes2014.tsv"),
#                          observable_df  = petab.get_observable_df("petab/Boehm_JProteomeRes2014/observables_Boehm_JProteomeRes2014.tsv"),
#                          measurement_df = petab.get_measurement_df("petab/Boehm_JProteomeRes2014/measurementData_Boehm_JProteomeRes2014.tsv"))