Example #1
0
def create_parameter_table(problem: petab.Problem, nominal_parameters):
    """Create PEtab parameter table"""

    df = petab.create_parameter_df(problem.sbml_model,
                                   problem.condition_df,
                                   problem.observable_df,
                                   problem.measurement_df,
                                   include_optional=True,
                                   lower_bound=1e-3,
                                   upper_bound=1e5)

    df['hierarchicalOptimization'] = 0
    df.loc['scaling_x1_common', 'hierarchicalOptimization'] = 1
    df.loc['offset_x2_batch_0', 'hierarchicalOptimization'] = 1
    df.loc['offset_x2_batch_1', 'hierarchicalOptimization'] = 1
    df.loc['x1withsigma_sigma', 'hierarchicalOptimization'] = 1

    for pid, val in nominal_parameters.items():
        if pid in df.index:
            df.loc[pid, ptc.NOMINAL_VALUE] = val
            df.loc[pid, ptc.PARAMETER_SCALE] = ptc.LOG10
            df.loc[pid, ptc.ESTIMATE] = 1
        elif pid.startswith('noiseParameter') \
                or pid.startswith('observableParameter'):
            continue
        else:
            print("extra parameter", pid, val)

    # offsets can be negative: adapt scaling and bounds:
    offsets = df.index.str.startswith('offset_')
    df.loc[offsets, ptc.PARAMETER_SCALE] = ptc.LIN

    problem.parameter_df = df
Example #2
0
def test_create_parameter_df(condition_df_2_conditions):
    document = libsbml.SBMLDocument(3, 1)
    model = document.createModel()
    model.setTimeUnits("second")
    model.setExtentUnits("mole")
    model.setSubstanceUnits('mole')

    s = model.createSpecies()
    s.setId('x1')

    petab.sbml.add_global_parameter(model,
                                    parameter_id='fixedParameter1',
                                    parameter_name='FixedParameter1',
                                    value=2.0)

    petab.sbml.add_global_parameter(model,
                                    parameter_id='p0',
                                    parameter_name='Parameter 0',
                                    value=3.0)

    petab.sbml.add_model_output_with_sigma(sbml_model=model,
                                           observable_id='obs1',
                                           observable_name='Observable 1',
                                           observable_formula='x1')

    petab.add_model_output_with_sigma(sbml_model=model,
                                      observable_id='obs2',
                                      observable_name='Observable 2',
                                      observable_formula='2*x1')

    measurement_df = pd.DataFrame(
        data={
            'observableId': ['obs1', 'obs2'],
            'observableParameters': ['', 'p1;p2'],
            'noiseParameters': ['p3;p4', 'p5']
        })

    parameter_df = petab.create_parameter_df(model, condition_df_2_conditions,
                                             measurement_df)

    # first model parameters, then row by row noise and sigma overrides
    expected = ['p0', 'p3', 'p4', 'p1', 'p2', 'p5']
    actual = parameter_df.index.values.tolist()
    assert actual == expected

    assert parameter_df.loc['p0', 'nominalValue'] == 3.0
Example #3
0
def test_create_parameter_df(
        minimal_sbml_model,  # pylint: disable=W0621
        condition_df_2_conditions):  # pylint: disable=W0621
    """Test petab.create_parameter_df."""
    _, model = minimal_sbml_model
    s = model.createSpecies()
    s.setId('x1')

    petab.sbml.add_global_parameter(
        model,
        parameter_id='fixedParameter1',
        parameter_name='FixedParameter1',
        value=2.0)

    petab.sbml.add_global_parameter(
        model,
        parameter_id='p0',
        parameter_name='Parameter 0',
        value=3.0)

    observable_df = pd.DataFrame(data={
        OBSERVABLE_ID: ['obs1', 'obs2'],
        OBSERVABLE_FORMULA: ['x1', '2*x1']
    }).set_index(OBSERVABLE_ID)

    # Add assignment rule target which should be ignored
    petab.add_global_parameter(sbml_model=model,
                               parameter_id='assignment_target')
    petab.create_assigment_rule(sbml_model=model,
                                assignee_id='assignment_target', formula='1.0')

    measurement_df = pd.DataFrame(data={
        OBSERVABLE_ID: ['obs1', 'obs2'],
        OBSERVABLE_PARAMETERS: ['', 'p1;p2'],
        NOISE_PARAMETERS: ['p3;p4', 'p5']
    })

    parameter_df = petab.create_parameter_df(
        model,
        condition_df_2_conditions,
        observable_df,
        measurement_df)

    # first model parameters, then row by row noise and sigma overrides
    expected = ['p3', 'p4', 'p1', 'p2', 'p5']
    actual = parameter_df.index.values.tolist()
    assert actual == expected

    # test with condition parameter override:
    condition_df_2_conditions.loc['condition2', 'fixedParameter1'] \
        = 'overrider'
    expected = ['p3', 'p4', 'p1', 'p2', 'p5', 'overrider']

    parameter_df = petab.create_parameter_df(
        model,
        condition_df_2_conditions,
        observable_df,
        measurement_df)
    actual = parameter_df.index.values.tolist()
    assert actual == expected

    # test with optional parameters
    expected = ['p0', 'p3', 'p4', 'p1', 'p2', 'p5', 'overrider']

    parameter_df = petab.create_parameter_df(
        model,
        condition_df_2_conditions,
        observable_df,
        measurement_df,
        include_optional=True)
    actual = parameter_df.index.values.tolist()
    assert actual == expected
    assert parameter_df.loc['p0', NOMINAL_VALUE] == 3.0