Beispiel #1
0
def main():
    """
    Command line interface to import a model in the PEtab
    (https://github.com/ICB-DCM/PEtab/) format into AMICI.
    """
    args = parse_cli_args()

    if args.yaml_file_name:
        pp = petab.Problem.from_yaml(args.yaml_file_name)
    else:
        pp = petab.Problem.from_files(
            sbml_file=args.sbml_file_name,
            condition_file=args.condition_file_name,
            measurement_file=args.measurement_file_name,
            parameter_file=args.parameter_file_name,
            observable_files=args.observable_file_name)

    # First check for valid PEtab
    petab.lint_problem(pp)

    import_model(model_name=args.model_name,
                 sbml_model=pp.sbml_model,
                 condition_table=pp.condition_df,
                 observable_table=pp.observable_df,
                 measurement_table=pp.measurement_df,
                 model_output_dir=args.model_output_dir,
                 compile=args.compile,
                 verbose=args.verbose)
Beispiel #2
0
def main():
    args = parse_cli_args()

    init_colorama(autoreset=True)

    # First check for valid PEtab
    pp = petab.Problem.from_files(sbml_file=args.sbml_file_name,
                                  condition_file=args.condition_file_name,
                                  measurement_file=args.measurement_file_name,
                                  parameter_file=args.parameter_file_name)
    petab.lint_problem(pp)

    import_model(args.sbml_file_name,
                 args.condition_file_name,
                 args.measurement_file_name,
                 model_output_dir=args.model_output_dir,
                 compile=args.compile,
                 verbose=True)
Beispiel #3
0
def run_import(model_name):
    git_dir = os.path.join(os.curdir, 'CS_Signalling_ERBB_RAS_AKT')
    if not os.path.exists(git_dir):
        subprocess.run([
            'git', 'clone', '--depth', '1',
            'https://github.com/ICB-DCM/CS_Signalling_ERBB_RAS_AKT']
        )
    os.chdir(os.path.join(os.curdir, 'CS_Signalling_ERBB_RAS_AKT'))

    pp = petab.Problem.from_yaml(
        'FroehlichKes2018/PEtab/FroehlichKes2018.yaml'
    )
    petab.lint_problem(pp)
    import_model(model_name=model_name,
                 sbml_model=pp.sbml_model,
                 condition_table=pp.condition_df,
                 observable_table=pp.observable_df,
                 measurement_table=pp.measurement_df,
                 compile=False,
                 verbose=True)
Beispiel #4
0
    def __init__(self,
                 petab_problem: petab.Problem,
                 amici_model: amici.Model,
                 verbose=1):
        """
        fileNameSBML: filename of model SBML file (PEtab-style)
        fileMeasurements: filename of measurement file
        fileFixedParameters: filename with AMICI fixed parameter vectors for
            all conditions referred to in the measurement file
        fileParameters: PEtab parameter table filename
        """
        self.condition_ids = None
        self.f: Optional[h5py.File] = None
        self.num_condition_vectors: int = 0
        self.unique_timepoints = None
        self.parameter_mapping: Optional[petab.ParMappingDict] = None
        self.parameter_scale_mapping: Optional[petab.ScaleMappingDict] = None
        self.optimization_parameter_name_to_index: Dict[str, int] = {}
        self.nk: int = 0
        self.condition_map = None
        self.observable_ids = None
        self.ny: int = 0

        self.verbose = verbose
        self.petab_problem: petab.Problem = petab_problem
        self.amici_model: amici.Model = amici_model

        # ensure we have valid inputs
        petab.lint_problem(self.petab_problem)

        # index for no reference/preequilibration condition
        self.NO_PREEQ_CONDITION_IDX: int = NO_PREEQ_CONDITION_IDX

        # value for unmapped model parameter in opt<->sim mapping
        self.UNMAPPED_PARAMETER: int = UNMAPPED_PARAMETER

        # hdf5 dataset compression
        self.compression = "gzip"
def main():
    condition_file_template = 'exp_table.tsv'
    measurement_file_template = 'cost_func_newFormat_original.txt'
    sbml_file = '../PEtab/CS_Signalling_ERBB_RAS_AKT_petab.xml'
    condition_file = '../PEtab/conditions_petab.tsv'
    measurement_file = '../PEtab/measurements_petab.tsv'
    parameter_file = '../PEtab/parameters_petab.tsv'

    sbml_reader = libsbml.SBMLReader()
    sbml_document = sbml_reader.readSBML(sbml_file)
    sbml_model = sbml_document.getModel()

    update_condition_table(condition_file_template, condition_file, sbml_model)
    update_measurement_table(measurement_file_template, measurement_file)
    create_parameter_table(sbml_file, condition_file, measurement_file,
                           parameter_file)

    # check for valid PEtab
    pp = petab.Problem.from_files(
        sbml_file=sbml_file,
        condition_file=condition_file,
        measurement_file=measurement_file,
        parameter_file=parameter_file)
    petab.lint_problem(pp)
def update_ccle_mclp():
    condition_file_template = 'exp_table.tsv'
    measurement_file_template = 'measurements_Training_dataset1-2.tsv'
    sbml_file = 'CS_Signalling_ERBB_RAS_AKT_CCLE_MCLP_petab.xml'
    condition_file = 'conditions_petab.tsv'
    measurement_file = 'measurements_CCLE_MCLP_petab.tsv'
    parameter_file = 'parameters_CCLE_MCLP_petab.tsv'

    sbml_reader = libsbml.SBMLReader()
    sbml_document = sbml_reader.readSBML(sbml_file)
    sbml_model = sbml_document.getModel()

    #update_condition_table(condition_file_template, condition_file, sbml_model)
    update_measurement_table(measurement_file_template, measurement_file)
    create_parameter_table(sbml_file, condition_file, measurement_file,
                           parameter_file)

    # check for valid PEtab
    pp = petab.Problem.from_files(
        sbml_file=sbml_file,
        condition_file=condition_file,
        measurement_file=measurement_file,
        parameter_file=parameter_file)
    petab.lint_problem(pp)
Beispiel #7
0
    def __init__(
        self,
        petab_problem: 'petab.Problem',
        output_folder: str = None,
        model_name: str = None,
        validate_petab: bool = True,
    ):
        """Initialize importer.

        Parameters
        ----------
        petab_problem:
            Managing access to the model and data.
        output_folder:
            Folder to contain the amici model. Defaults to
            './amici_models/{model_name}'.
        model_name:
            Name of the model, which will in particular be the name of the
            compiled model python module.
        validate_petab:
            Flag indicating if the PEtab problem shall be validated.
        """
        self.petab_problem = petab_problem

        if validate_petab:
            if petab.lint_problem(petab_problem):
                raise ValueError("Invalid PEtab problem.")

        if output_folder is None:
            output_folder = _find_output_folder_name(
                self.petab_problem,
                model_name=model_name,
            )
        self.output_folder = output_folder

        if model_name is None:
            model_name = _find_model_name(self.output_folder)
        self.model_name = model_name
Beispiel #8
0
def main():
    args = parse_cli_args()

    script_path = os.path.split(os.path.abspath(__file__))[0]
    model_name = 'model_steadystate_scaled'

    print(f'{__file__} running in {os.getcwd()}')
    print(f'Processing model {args.sbml_file_name}')

    # Create sbml model from scratch
    cmd = f'bash -c "{script_path}/createSteadystateExampleSBML.py > {args.sbml_file_name}"'
    print(cmd)
    out = subprocess.check_output(cmd, shell=True)
    print(out.decode('utf-8'))
    print()

    print_model_info(args.sbml_file_name)
    print()

    fixed_parameters, observables = create_module(args.sbml_file_name,
                                                  model_name,
                                                  args.model_output_dir)

    print('Observables:', observables)
    print('Fixed parameters', fixed_parameters)
    print()

    # load model
    sys.path.insert(0, args.model_output_dir)
    model_module = importlib.import_module(model_name)

    print()
    print("--- Creating data ---")
    true_parameters, expected_llh = create_data_tables(
        model=model_module.getModel(),
        measurement_file=args.measurement_file_name,
        fixed_parameter_file=args.condition_file_name,
        fixed_parameters=fixed_parameters)

    # check for valid PEtab
    pp = petab.Problem.from_files(sbml_file=args.sbml_file_name,
                                  condition_file=args.condition_file_name,
                                  measurement_file=args.measurement_file_name)

    create_parameter_table(problem=pp,
                           parameter_file=args.parameter_file_name,
                           nominal_parameters=true_parameters)

    petab.lint_problem(pp)

    # create training data
    generate_hdf5_file(sbml_file_name=args.sbml_file_name,
                       model_output_dir=args.model_output_dir,
                       measurement_file_name=args.measurement_file_name,
                       condition_file_name=args.condition_file_name,
                       hdf5_file_name=args.hdf5_file_name,
                       parameter_file_name=args.parameter_file_name,
                       model_name=model_name)

    # create test data
    args.test_measurement_file_name = \
        "-testset".join(os.path.splitext(args.measurement_file_name))
    args.test_parameter_file_name = \
        "-testset".join(os.path.splitext(args.parameter_file_name))
    df = petab.get_measurement_df(args.measurement_file_name)
    df.loc[df.observableParameters == 'scaling_x1_common', 'measurement'] = \
        df.loc[df.observableParameters == 'scaling_x1_common', 'measurement'] \
        * 2.0
    df.loc[~df.observableParameters.isnull(), 'observableParameters'] = \
        df.loc[~df.observableParameters.isnull(), 'observableParameters'] \
        + "_test"
    df.to_csv(args.test_measurement_file_name, sep='\t', index=False)
    df = petab.get_parameter_df(args.parameter_file_name)
    df.rename(index={
        'scaling_x1_common': 'scaling_x1_common_test',
        'offset_x2_batch-0': 'offset_x2_batch-0_test',
        'offset_x2_batch-1': 'offset_x2_batch-1_test'
    },
              inplace=True)
    df.to_csv(args.test_parameter_file_name, sep='\t')
    generate_hdf5_file(sbml_file_name=args.sbml_file_name,
                       model_output_dir=args.model_output_dir,
                       measurement_file_name=args.test_measurement_file_name,
                       condition_file_name=args.condition_file_name,
                       hdf5_file_name="-testset".join(
                           os.path.splitext(args.hdf5_file_name)),
                       parameter_file_name=args.test_parameter_file_name,
                       model_name=model_name)

    save_expected_results(args.hdf5_file_name, true_parameters, expected_llh)

    write_starting_points(args.hdf5_file_name, true_parameters)
    def __init__(self,
                 n_hidden: int,
                 datafiles: Tuple[str, str, str],
                 pathway_name: str,
                 par_modulation_scale: float = 1 / 2):
        """
        loads the mechanistic model as theano operator with loss as output and
        decoder output as input

        :param datafiles:
            tuple of paths to measurements, conditions and observables files

        :param pathway_name:
            name of pathway to use for model

        :param n_hidden:
            number of nodes in the hidden layer of the encoder

        :param par_modulation_scale:
            currently this parameter only influences the strength of l2
            regularization on the inflate layer (the respective gaussian
            prior has its standard deviation defined based on the value of
            this parameter). For bounded inflate functions, this parameter
            is also intended to rescale the inputs accordingly.

        """
        self.data_name = '__'.join(
            os.path.splitext(os.path.basename(
                datafiles[0]))[0].split('__')[:-1])
        self.pathway_name = pathway_name

        self.par_modulation_scale = par_modulation_scale
        self.petab_importer = load_petab(datafiles, 'pw_' + pathway_name,
                                         par_modulation_scale)

        full_measurements = self.petab_importer.petab_problem.measurement_df
        filter_observables(self.petab_importer.petab_problem)
        petab.lint_problem(self.petab_importer.petab_problem)

        self.pypesto_subproblem = self.petab_importer.create_problem()

        # extract sample names, ordering of those is important since samples
        # must match when reshaping the inflated matrix
        samples = []
        for name in self.pypesto_subproblem.x_names:
            if not name.startswith(MODEL_FEATURE_PREFIX):
                continue

            sample = name.split('__')[-1]
            if sample not in samples:
                samples.append(sample)

        input_data = full_measurements.loc[
            full_measurements.apply(lambda x:
                                    (x[petab.SIMULATION_CONDITION_ID] == x[
                                        petab.PREEQUILIBRATION_CONDITION_ID]) &
                                    (x[petab.TIME] == 0.0),
                                    axis=1), :].pivot_table(
                                        index=petab.SIMULATION_CONDITION_ID,
                                        columns=petab.OBSERVABLE_ID,
                                        values=petab.MEASUREMENT,
                                        aggfunc=np.nanmean).loc[samples, :]
        # remove missing values
        input_data.dropna(axis='columns', how='any', inplace=True)

        self.n_visible = input_data.shape[1]
        self.n_samples = input_data.shape[0]
        self.n_model_inputs = int(
            sum(
                name.startswith(MODEL_FEATURE_PREFIX)
                for name in self.pypesto_subproblem.x_names) / self.n_samples)
        self.n_kin_params = \
            self.pypesto_subproblem.dim - self.n_model_inputs * self.n_samples

        # zero center input data, this is equivalent to estimating biases
        # for linear autoencoders
        # https://link.springer.com/article/10.1007/BF00332918
        # https://arxiv.org/pdf/1901.08168.pdf
        input_data -= input_data.mean()

        self.sample_names = list(input_data.index)
        super().__init__(input_data=input_data.values,
                         n_hidden=n_hidden,
                         n_params=self.n_model_inputs)

        # set tolerances
        self.pypesto_subproblem.objective._objectives[0].amici_solver\
            .setAbsoluteTolerance(1e-12)
        self.pypesto_subproblem.objective._objectives[0].amici_solver\
            .setRelativeTolerance(1e-10)
        self.pypesto_subproblem.objective._objectives[0].amici_solver\
            .setAbsoluteToleranceSteadyState(1e-10)
        self.pypesto_subproblem.objective._objectives[0].amici_solver\
            .setRelativeToleranceSteadyState(1e-8)
        #self.pypesto_subproblem.objective._objectives[0].amici_solver\
        #    .setSensitivityMethod(amici.SensitivityMethod.adjoint)

        # define model theano op
        self.loss = TheanoLogProbability(self.pypesto_subproblem)

        # these are the kinetic parameters that are shared across all samples
        self.kin_pars = tt.specify_shape(tt.vector('kinetic_parameters'),
                                         (self.n_kin_params, ))

        self.x_names = self.x_names + [
            name for ix, name in enumerate(self.pypesto_subproblem.x_names)
            if not name.startswith(MODEL_FEATURE_PREFIX)
            and ix in self.pypesto_subproblem.x_free_indices
        ]

        # assemble input to model theano op
        encoded_pars = self.encode_params(self.encoder_pars)
        self.model_pars = tt.concatenate([
            self.kin_pars,
            tt.reshape(encoded_pars, (self.n_model_inputs * self.n_samples, ))
        ],
                                         axis=0)
Beispiel #10
0
def write_problem(
    test_id: int,
    parameter_df: pd.DataFrame,
    condition_dfs: Union[List[pd.DataFrame], pd.DataFrame],
    observable_dfs: Union[List[pd.DataFrame], pd.DataFrame],
    measurement_dfs: Union[List[pd.DataFrame], pd.DataFrame],
    sbml_files: Union[List[str], str] = None,
) -> None:
    """Write problem to files.

    Parameters
    ----------
    test_id: Identifier of the test.
    parameter_df: PEtab parameter table.
    condition_dfs: PEtab condition tables.
    observable_dfs: PEtab observable tables.
    measurement_dfs: PEtab measurement tables.
    sbml_files: PEtab SBML files. If None, then the default
        petabtests.DEFAULT_MODEL_FILE is used.
    """
    print(f"Writing case {test_id}...")
    # convenience
    if isinstance(condition_dfs, pd.DataFrame):
        condition_dfs = [condition_dfs]
    if isinstance(observable_dfs, pd.DataFrame):
        observable_dfs = [observable_dfs]
    if isinstance(measurement_dfs, pd.DataFrame):
        measurement_dfs = [measurement_dfs]
    if isinstance(sbml_files, str):
        sbml_files = [sbml_files]

    # id to string
    id_str = test_id_str(test_id)
    dir_ = os.path.join(CASES_DIR, id_str)

    # petab yaml
    config = {
        FORMAT_VERSION:
        petab.__format_version__,
        PROBLEMS: [
            {
                SBML_FILES: [],
                CONDITION_FILES: [],
                MEASUREMENT_FILES: [],
                OBSERVABLE_FILES: [],
            },
        ]
    }

    # copy models
    if sbml_files is None:
        sbml_files = [DEFAULT_MODEL_FILE]
    copied_sbml_files = []
    for i_sbml, sbml_file in enumerate(sbml_files):
        if len(sbml_files) == 1:
            copied_sbml_file = '_model.xml'
        else:
            copied_sbml_file = f'_model{i_sbml}.xml'
        copyfile(os.path.join(dir_, sbml_file),
                 os.path.join(dir_, copied_sbml_file))
        copied_sbml_files.append(copied_sbml_file)
    config[PROBLEMS][0][SBML_FILES] = copied_sbml_files

    # write parameters
    parameters_file = '_parameters.tsv'
    petab.write_parameter_df(parameter_df, os.path.join(dir_, parameters_file))
    config[PARAMETER_FILE] = parameters_file

    # write conditions
    _write_dfs_to_files(id_str, 'conditions', petab.write_condition_df,
                        condition_dfs, config[PROBLEMS][0][CONDITION_FILES])

    # write observables
    _write_dfs_to_files(id_str, 'observables', petab.write_observable_df,
                        observable_dfs, config[PROBLEMS][0][OBSERVABLE_FILES])

    # write measurements
    _write_dfs_to_files(id_str, 'measurements', petab.write_measurement_df,
                        measurement_dfs,
                        config[PROBLEMS][0][MEASUREMENT_FILES])

    # validate petab yaml
    petab.validate(config, path_prefix=dir_)

    # write yaml
    yaml_file = problem_yaml_name(test_id)
    with open(os.path.join(dir_, yaml_file), 'w') as outfile:
        yaml.dump(config, outfile, default_flow_style=False)

    # validate written PEtab files
    problem = petab.Problem.from_yaml(os.path.join(dir_, yaml_file))
    petab.lint_problem(problem)
Beispiel #11
0
def test_flatten_timepoint_specific_output_overrides():
    """Test flatten_timepoint_specific_output_overrides"""
    observable_df = pd.DataFrame(data={
        OBSERVABLE_ID: ['obs1'],
        OBSERVABLE_FORMULA: [
            'observableParameter1_obs1 + observableParameter2_obs1'],
        NOISE_FORMULA: ['noiseParameter1_obs1']
    })
    observable_df.set_index(OBSERVABLE_ID, inplace=True)

    observable_df_expected = pd.DataFrame(data={
        OBSERVABLE_ID: ['obs1_1', 'obs1_2', 'obs1_3'],
        OBSERVABLE_FORMULA: [
            'observableParameter1_obs1_1 + observableParameter2_obs1_1',
            'observableParameter1_obs1_2 + observableParameter2_obs1_2',
            'observableParameter1_obs1_3 + observableParameter2_obs1_3'],
        NOISE_FORMULA: ['noiseParameter1_obs1_1',
                        'noiseParameter1_obs1_2',
                        'noiseParameter1_obs1_3']
    })
    observable_df_expected.set_index(OBSERVABLE_ID, inplace=True)

    # Measurement table with timepoint-specific overrides
    measurement_df = pd.DataFrame(data={
        OBSERVABLE_ID:
            ['obs1', 'obs1', 'obs1', 'obs1'],
        SIMULATION_CONDITION_ID:
            ['condition1', 'condition1', 'condition1', 'condition1'],
        PREEQUILIBRATION_CONDITION_ID:
            ['', '', '', ''],
        TIME:
            [1.0, 1.0, 2.0, 2.0],
        MEASUREMENT:
            [np.nan] * 4,
        OBSERVABLE_PARAMETERS:
            ['obsParOverride1;1.0', 'obsParOverride2;1.0',
             'obsParOverride2;1.0', 'obsParOverride2;1.0'],
        NOISE_PARAMETERS:
            ['noiseParOverride1', 'noiseParOverride1',
             'noiseParOverride2', 'noiseParOverride2']
    })

    measurement_df_expected = pd.DataFrame(data={
        OBSERVABLE_ID:
            ['obs1_1', 'obs1_2', 'obs1_3', 'obs1_3'],
        SIMULATION_CONDITION_ID:
            ['condition1', 'condition1', 'condition1', 'condition1'],
        PREEQUILIBRATION_CONDITION_ID:
            ['', '', '', ''],
        TIME:
            [1.0, 1.0, 2.0, 2.0],
        MEASUREMENT:
            [np.nan] * 4,
        OBSERVABLE_PARAMETERS:
            ['obsParOverride1;1.0', 'obsParOverride2;1.0',
             'obsParOverride2;1.0', 'obsParOverride2;1.0'],
        NOISE_PARAMETERS:
            ['noiseParOverride1', 'noiseParOverride1',
             'noiseParOverride2', 'noiseParOverride2']
    })

    problem = petab.Problem(measurement_df=measurement_df,
                            observable_df=observable_df)

    assert petab.lint_problem(problem) is False

    # Ensure having timepoint-specific overrides
    assert petab.lint.measurement_table_has_timepoint_specific_mappings(
        measurement_df) is True

    petab.flatten_timepoint_specific_output_overrides(problem)

    # Timepoint-specific overrides should be gone now
    assert petab.lint.measurement_table_has_timepoint_specific_mappings(
        problem.measurement_df) is False

    assert problem.observable_df.equals(observable_df_expected) is True
    assert problem.measurement_df.equals(measurement_df_expected) is True

    assert petab.lint_problem(problem) is False
Beispiel #12
0
def test_flatten_timepoint_specific_output_overrides(minimal_sbml_model):
    document, model = minimal_sbml_model
    petab.sbml.add_global_parameter(
        sbml_model=model, parameter_id='observableParameter1_obs1')
    petab.sbml.add_model_output_with_sigma(
        sbml_model=model, observable_id='obs1',
        observable_formula='observableParameter1_obs1')

    # Measurement table with timepoint-specific overrides
    measurement_df = pd.DataFrame(data={
        'observableId':
            ['obs1', 'obs1', 'obs1', 'obs1'],
        'simulationConditionId':
            ['condition1', 'condition1', 'condition1', 'condition1'],
        'preequilibrationConditionId':
            ['', '', '', ''],
        'time':
            [1.0, 1.0, 2.0, 2.0],
        'measurement':
            [np.nan] * 4,
        'observableParameters':
            ['obsParOverride1', 'obsParOverride2',
             'obsParOverride2', 'obsParOverride2'],
        'noiseParameters':
            ['noiseParOverride1', 'noiseParOverride1',
             'noiseParOverride2', 'noiseParOverride2']
    })

    measurement_df_expected = pd.DataFrame(data={
        'observableId':
            ['obs1_1', 'obs1_2', 'obs1_3', 'obs1_3'],
        'simulationConditionId':
            ['condition1', 'condition1', 'condition1', 'condition1'],
        'preequilibrationConditionId':
            ['', '', '', ''],
        'time':
            [1.0, 1.0, 2.0, 2.0],
        'measurement':
            [np.nan] * 4,
        'observableParameters':
            ['obsParOverride1', 'obsParOverride2',
             'obsParOverride2', 'obsParOverride2'],
        'noiseParameters':
            ['noiseParOverride1', 'noiseParOverride1',
             'noiseParOverride2', 'noiseParOverride2']
    })

    problem = petab.Problem(sbml_model=model,
                            measurement_df=measurement_df)

    assert petab.lint_problem(problem) is False

    # Ensure having timepoint-specific overrides
    assert petab.lint.measurement_table_has_timepoint_specific_mappings(
        measurement_df) is True

    petab.flatten_timepoint_specific_output_overrides(problem)

    # Timepoint-specific overrides should be gone now
    assert petab.lint.measurement_table_has_timepoint_specific_mappings(
        problem.measurement_df) is False

    assert problem.measurement_df.equals(measurement_df_expected) is True

    assert petab.lint_problem(problem) is False
Beispiel #13
0
def main():
    args = parse_cli_args()

    script_path = os.path.split(os.path.abspath(__file__))[0]
    model_name = 'model_steadystate_scaled'
    sbml_file_name = "model_steadystate_scaled.sbml"
    measurement_file_name = 'example_data.tsv'
    condition_file_name = 'example_data_fixed.tsv'
    parameter_file_name = 'example_data_parameter.tsv'
    observable_file_name = 'model_steasystate_observables.tsv'
    yaml_file_name = 'model_steasystate.yaml'
    yaml_file_name_test = 'model_steasystate_test.yaml'

    print(f'{__file__} running in {os.getcwd()}')
    print(f'Processing model {sbml_file_name}')

    # Create sbml model from scratch
    cmd = f'bash -c "{script_path}/createSteadystateExampleSBML.py '\
          f'> {sbml_file_name}"'
    print(cmd)
    out = subprocess.check_output(cmd, shell=True)
    print(out.decode('utf-8'))
    print()

    print_model_info(sbml_file_name)
    print()

    # create condition table
    condition_df = pd.DataFrame(
        data={
            ptc.CONDITION_ID:
            ["condition_0", "condition_1", "condition_2", "condition_3"],
            "k0": [1, 1.1, 1.2, 1.3]
        })
    condition_df.set_index([ptc.CONDITION_ID], inplace=True)

    # create observables
    observable_df = pd.DataFrame(
        data={
            ptc.OBSERVABLE_ID: [
                "obs_x1", "obs_x2", "obs_x3", "obs_x1_scaled",
                "obs_x2_offsetted", "obs_x1withsigma"
            ],
            ptc.OBSERVABLE_FORMULA: [
                "x1", "x2", "x3", "observableParameter1_obs_x1_scaled * x1",
                "observableParameter1_obs_x2_offsetted + x2", "x1"
            ],
            ptc.NOISE_FORMULA: [
                "noiseParameter1_obs_x1", "noiseParameter1_obs_x2",
                "noiseParameter1_obs_x3", "noiseParameter1_obs_x1_scaled",
                "noiseParameter1_obs_x2_offsetted",
                "noiseParameter1_obs_x1withsigma"
            ],
        })
    observable_df.set_index([ptc.OBSERVABLE_ID], inplace=True)

    create_module(sbml_model_file=sbml_file_name,
                  model_name=model_name,
                  model_output_dir=args.model_output_dir,
                  observable_df=observable_df,
                  condition_df=condition_df)

    # load model
    sys.path.insert(0, args.model_output_dir)
    model_module = importlib.import_module(model_name)

    print()
    print("--- Creating data ---")

    measurement_df, true_parameters, expected_llh = create_data_tables(
        model=model_module.getModel(), condition_df=condition_df)

    # assemble PEtab problem
    pp = petab.Problem.from_files(sbml_file=sbml_file_name)
    pp.observable_df = observable_df
    pp.measurement_df = measurement_df
    pp.condition_df = condition_df
    create_parameter_table(problem=pp, nominal_parameters=true_parameters)

    # check for valid PEtab
    petab.lint_problem(pp)

    # Save remaining tables
    pp.to_files(measurement_file=measurement_file_name,
                condition_file=condition_file_name,
                observable_file=observable_file_name,
                parameter_file=parameter_file_name)

    # Create PEtab yaml file
    config = {
        'format_version':
        petab.__format_version__,
        'parameter_file':
        parameter_file_name,
        'problems': [
            {
                ptc.SBML_FILES: [sbml_file_name],
                ptc.CONDITION_FILES: [condition_file_name],
                ptc.MEASUREMENT_FILES: [measurement_file_name],
                ptc.OBSERVABLE_FILES: [observable_file_name],
            },
        ]
    }
    petab.validate(config)  #, path_prefix=model_dir)
    with open(yaml_file_name, 'w') as outfile:
        yaml.dump(config, outfile, default_flow_style=False)

    # create training data
    generate_hdf5_file(yaml_file=yaml_file_name,
                       model_output_dir=args.model_output_dir,
                       hdf5_file_name=args.hdf5_file_name,
                       model_name=model_name)

    create_test_data(measurement_file_name, parameter_file_name, config,
                     yaml_file_name_test, args.model_output_dir, model_name,
                     args.hdf5_file_name)

    save_expected_results(args.hdf5_file_name, true_parameters, expected_llh)

    write_starting_points(args.hdf5_file_name, true_parameters)
Beispiel #14
0
        PARAMETER_ID: ['k1', 'k2'],
        PARAMETER_SCALE: [LOG] * 2,
        LOWER_BOUND: [1e-5] * 2,
        UPPER_BOUND: [1e5] * 2,
        NOMINAL_VALUE: [k1, k2],
        ESTIMATE: [1, 1],
    }).set_index(PARAMETER_ID)

petab.write_condition_df(condition_df, "conditions.tsv")
petab.write_measurement_df(measurement_df, "measurements.tsv")
petab.write_observable_df(observable_df, "observables.tsv")
petab.write_parameter_df(parameter_df, "parameters.tsv")

yaml_config = {
    FORMAT_VERSION:
    1,
    PARAMETER_FILE:
    "parameters.tsv",
    PROBLEMS: [{
        SBML_FILES: ["model_conversion_reaction.xml"],
        CONDITION_FILES: ["conditions.tsv"],
        MEASUREMENT_FILES: ["measurements.tsv"],
        OBSERVABLE_FILES: ["observables.tsv"]
    }]
}
petab.write_yaml(yaml_config, "conversion_reaction.yaml")

# validate written PEtab files
problem = petab.Problem.from_yaml("conversion_reaction.yaml")
petab.lint_problem(problem)
Beispiel #15
0
def write_problem(test_id: int,
                  parameter_df: pd.DataFrame,
                  condition_dfs: Union[List[pd.DataFrame], pd.DataFrame],
                  observable_dfs: Union[List[pd.DataFrame], pd.DataFrame],
                  measurement_dfs: Union[List[pd.DataFrame], pd.DataFrame],
                  model_files: Union[List[str], str],
                  format_: str = 'sbml') -> None:
    """Write problem to files.

    Parameters
    ----------
    test_id: Identifier of the test.
    parameter_df: PEtab parameter table.
    condition_dfs: PEtab condition tables.
    observable_dfs: PEtab observable tables.
    measurement_dfs: PEtab measurement tables.
    model_files: PEtab SBML/PySB files.
    format: Model format (SBML/PySB)
    """
    print(f"Writing case {test_id} {format_} ...")
    # convenience
    if isinstance(condition_dfs, pd.DataFrame):
        condition_dfs = [condition_dfs]
    if isinstance(observable_dfs, pd.DataFrame):
        observable_dfs = [observable_dfs]
    if isinstance(measurement_dfs, pd.DataFrame):
        measurement_dfs = [measurement_dfs]
    if isinstance(model_files, str):
        model_files = [model_files]

    # id to string
    dir_ = case_dir(test_id, format_)

    # petab yaml
    config = {
        FORMAT_VERSION:
        petab.__format_version__,
        PROBLEMS: [
            {
                SBML_FILES: [],
                CONDITION_FILES: [],
                MEASUREMENT_FILES: [],
                OBSERVABLE_FILES: [],
            },
        ]
    }

    if format_ == 'sbml':
        suffix = '.xml'
    else:
        suffix = '.py'

    # copy models
    copied_model_files = []
    for i_sbml, model_file in enumerate(model_files):
        if len(model_files) == 1:
            copied_model_file = f'_model{suffix}'
        else:
            copied_model_file = f'_model{i_sbml}{suffix}'
        copyfile(os.path.join(dir_, model_file),
                 os.path.join(dir_, copied_model_file))
        copied_model_files.append(copied_model_file)
    config[PROBLEMS][0][SBML_FILES] = copied_model_files

    # write parameters
    parameters_file = '_parameters.tsv'
    petab.write_parameter_df(parameter_df, os.path.join(dir_, parameters_file))
    config[PARAMETER_FILE] = parameters_file

    # write conditions
    _write_dfs_to_files(dir_, 'conditions', petab.write_condition_df,
                        condition_dfs, config[PROBLEMS][0][CONDITION_FILES])

    # write observables
    _write_dfs_to_files(dir_, 'observables', petab.write_observable_df,
                        observable_dfs, config[PROBLEMS][0][OBSERVABLE_FILES])

    # write measurements
    _write_dfs_to_files(dir_, 'measurements', petab.write_measurement_df,
                        measurement_dfs,
                        config[PROBLEMS][0][MEASUREMENT_FILES])

    # validate petab yaml
    petab.validate(config, path_prefix=dir_)

    # write yaml
    yaml_file = problem_yaml_name(test_id)
    with open(os.path.join(dir_, yaml_file), 'w') as outfile:
        yaml.dump(config, outfile, default_flow_style=False)

    # validate written PEtab files
    problem = petab.Problem.from_yaml(os.path.join(dir_, yaml_file))
    petab.lint_problem(problem)
Beispiel #16
0
def main():
    arg = sys.argv[1]

    if arg == 'compilation':
        git_dir = os.path.join(os.curdir, 'CS_Signalling_ERBB_RAS_AKT')
        if not os.path.exists(git_dir):
            subprocess.run([
                'git', 'clone', '--depth', '1',
                'https://github.com/ICB-DCM/CS_Signalling_ERBB_RAS_AKT'
            ])
        os.chdir(os.path.join(os.curdir, 'CS_Signalling_ERBB_RAS_AKT'))

        pp = petab.Problem.from_yaml(
            'FroehlichKes2018/PEtab/FroehlichKes2018.yaml')
        petab.lint_problem(pp)
        os.chdir(os.path.dirname(os.path.abspath(os.curdir)))
        import_model(model_name='CS_Signalling_ERBB_RAS_AKT_petab',
                     sbml_model=pp.sbml_model,
                     condition_table=pp.condition_df,
                     observable_table=pp.observable_df,
                     measurement_table=pp.measurement_df,
                     compile=False,
                     verbose=True)
        os.chdir(os.path.join(os.curdir, 'CS_Signalling_ERBB_RAS_AKT_petab'))

        subprocess.run(['python', 'setup.py', 'install'])

        return
    else:
        import CS_Signalling_ERBB_RAS_AKT_petab as model_module
        model = model_module.getModel()
        solver = model.getSolver()
        # TODO
        edata = amici.ExpData(model)
        edata.setTimepoints([1e8])
        edata.setObservedData([1.0])
        edata.setObservedDataStdDev([1.0])

    if arg == 'forward_simulation':
        solver.setSensitivityMethod(amici.SensitivityMethod.none)
        solver.setSensitivityOrder(amici.SensitivityOrder.none)
    elif arg == 'forward_sensitivities':
        model.setParameterList(list(range(100)))
        solver.setSensitivityMethod(amici.SensitivityMethod.forward)
        solver.setSensitivityOrder(amici.SensitivityOrder.first)
    elif arg == 'adjoint_sensitivities':
        solver.setSensitivityMethod(amici.SensitivityMethod.adjoint)
        solver.setSensitivityOrder(amici.SensitivityOrder.first)
    elif arg == 'forward_simulation_non_optimal_parameters':
        tmpPar = model.getParameters()
        model.setParameters([0.1 for _ in tmpPar])
        solver.setSensitivityMethod(amici.SensitivityMethod.none)
        solver.setSensitivityOrder(amici.SensitivityOrder.none)
    elif arg == 'adjoint_sensitivities_non_optimal_parameters':
        tmpPar = model.getParameters()
        model.setParameters([0.1 for _ in tmpPar])
        solver.setSensitivityMethod(amici.SensitivityMethod.adjoint)
        solver.setSensitivityOrder(amici.SensitivityOrder.first)
    elif arg == 'forward_steadystate_sensitivities_non_optimal_parameters':
        tmpPar = model.getParameters()
        model.setParameters([0.1 for _ in tmpPar])
        solver.setSensitivityMethod(amici.SensitivityMethod.forward)
        solver.setSensitivityOrder(amici.SensitivityOrder.first)
        edata.setTimepoints([float('inf')])
    elif arg == 'adjoint_steadystate_sensitivities_non_optimal_parameters':
        tmpPar = model.getParameters()
        model.setParameters([0.1 for _ in tmpPar])
        solver.setSensitivityMethod(amici.SensitivityMethod.adjoint)
        solver.setSensitivityOrder(amici.SensitivityOrder.first)
        edata.setTimepoints([float('inf')])
    else:
        print("Unknown argument:", arg)
        sys.exit(1)
    rdata = amici.runAmiciSimulation(model, solver, edata)

    diagnostics = [
        'numsteps', 'numstepsB', 'numrhsevals', 'numrhsevalsB',
        'numerrtestfails', 'numerrtestfailsB', 'numnonlinsolvconvfails',
        'numnonlinsolvconvfailsB', 'preeq_cpu_time', 'preeq_cpu_timeB',
        'cpu_time', 'cpu_timeB', 'posteq_cpu_time', 'posteq_cpu_timeB'
    ]
    for d in diagnostics:
        print(d, rdata[d])
    assert rdata['status'] == amici.AMICI_SUCCESS
Beispiel #17
0
def test_flatten_timepoint_specific_output_overrides_special_cases():
    """Test flatten_timepoint_specific_output_overrides
    for special cases:
    * no preequilibration
    * no observable parameters
    """
    observable_df = pd.DataFrame(data={
        OBSERVABLE_ID: ['obs1'],
        OBSERVABLE_FORMULA: ['species1'],
        NOISE_FORMULA: ['noiseParameter1_obs1']
    })
    observable_df.set_index(OBSERVABLE_ID, inplace=True)

    observable_df_expected = pd.DataFrame(data={
        OBSERVABLE_ID: ['obs1__noiseParOverride1__condition1',
                        'obs1__noiseParOverride2__condition1'],
        OBSERVABLE_FORMULA: [
            'species1',
            'species1'],
        NOISE_FORMULA: ['noiseParameter1_obs1__noiseParOverride1__condition1',
                        'noiseParameter1_obs1__noiseParOverride2__condition1']
    })
    observable_df_expected.set_index(OBSERVABLE_ID, inplace=True)

    # Measurement table with timepoint-specific overrides
    measurement_df = pd.DataFrame(data={
        OBSERVABLE_ID:
            ['obs1', 'obs1', 'obs1', 'obs1'],
        SIMULATION_CONDITION_ID:
            ['condition1', 'condition1', 'condition1', 'condition1'],
        TIME:
            [1.0, 1.0, 2.0, 2.0],
        MEASUREMENT:
            [.1] * 4,
        NOISE_PARAMETERS:
            ['noiseParOverride1', 'noiseParOverride1',
             'noiseParOverride2', 'noiseParOverride2'],
    })

    measurement_df_expected = pd.DataFrame(data={
        OBSERVABLE_ID:
            ['obs1__noiseParOverride1__condition1',
             'obs1__noiseParOverride1__condition1',
             'obs1__noiseParOverride2__condition1',
             'obs1__noiseParOverride2__condition1'],
        SIMULATION_CONDITION_ID:
            ['condition1', 'condition1', 'condition1', 'condition1'],
        TIME:
            [1.0, 1.0, 2.0, 2.0],
        MEASUREMENT:
            [.1] * 4,
        NOISE_PARAMETERS:
            ['noiseParOverride1', 'noiseParOverride1',
             'noiseParOverride2', 'noiseParOverride2'],
    })

    problem = petab.Problem(measurement_df=measurement_df,
                            observable_df=observable_df)

    assert petab.lint_problem(problem) is False

    # Ensure having timepoint-specific overrides
    assert petab.lint.measurement_table_has_timepoint_specific_mappings(
        measurement_df) is True

    petab.flatten_timepoint_specific_output_overrides(problem)

    # Timepoint-specific overrides should be gone now
    assert petab.lint.measurement_table_has_timepoint_specific_mappings(
        problem.measurement_df) is False

    assert problem.observable_df.equals(observable_df_expected) is True
    assert problem.measurement_df.equals(measurement_df_expected) is True

    assert petab.lint_problem(problem) is False