Esempio n. 1
0
    def test_no_condition_specific(self, condition_df_2_conditions):
        # Trivial case - no condition-specific parameters

        condition_df = condition_df_2_conditions

        measurement_df = pd.DataFrame(
            data={
                'observableId': ['obs1', 'obs2'],
                'simulationConditionId': ['condition1', 'condition2'],
                'preequilibrationConditionId': ['', ''],
                'observableParameters': ['', ''],
                'noiseParameters': ['', '']
            })

        expected = [[
            'dynamicParameter1', 'dynamicParameter2', 'dynamicParameter3'
        ], ['dynamicParameter1', 'dynamicParameter2', 'dynamicParameter3']]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            par_sim_ids=[
                'dynamicParameter1', 'dynamicParameter2', 'dynamicParameter3'
            ])

        assert actual == expected
Esempio n. 2
0
    def test_parameterized_condition_table(minimal_sbml_model):
        condition_df = pd.DataFrame(
            data={
                CONDITION_ID: ['condition1', 'condition2', 'condition3'],
                CONDITION_NAME: ['', 'Condition 2', ''],
                'dynamicParameter1':
                ['dynamicOverride1_1', 'dynamicOverride1_2', 0]
            })
        condition_df.set_index(CONDITION_ID, inplace=True)

        measurement_df = pd.DataFrame(
            data={
                SIMULATION_CONDITION_ID:
                ['condition1', 'condition2', 'condition3'],
                OBSERVABLE_ID: ['obs1', 'obs2', 'obs1'],
                OBSERVABLE_PARAMETERS: '',
                NOISE_PARAMETERS: '',
            })

        parameter_df = pd.DataFrame(
            data={
                PARAMETER_ID: ['dynamicOverride1_1', 'dynamicOverride1_2'],
                PARAMETER_NAME: ['', '...'],
                ESTIMATE: [1, 1]
            })
        parameter_df.set_index(PARAMETER_ID, inplace=True)

        document, model = minimal_sbml_model
        model.createParameter().setId('dynamicParameter1')

        assert petab.get_model_parameters(model) == ['dynamicParameter1']

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df,
            sbml_model=model)

        expected = [({}, {
            'dynamicParameter1': 'dynamicOverride1_1'
        }, {}, {
            'dynamicParameter1': LIN
        }),
                    ({}, {
                        'dynamicParameter1': 'dynamicOverride1_2'
                    }, {}, {
                        'dynamicParameter1': LIN
                    }),
                    ({}, {
                        'dynamicParameter1': 0
                    }, {}, {
                        'dynamicParameter1': LIN
                    })]

        assert actual == expected
    def test_all_override(condition_df_2_conditions, minimal_sbml_model):
        # Condition-specific parameters overriding original parameters
        condition_df = condition_df_2_conditions

        _, sbml_model = minimal_sbml_model
        add_global_parameter(sbml_model, 'dynamicParameter1')
        add_global_parameter(sbml_model, 'dynamicParameter2')
        add_global_parameter(sbml_model, 'observableParameter1_obs1')
        add_global_parameter(sbml_model, 'observableParameter2_obs1')
        add_global_parameter(sbml_model, 'observableParameter1_obs2')

        measurement_df = pd.DataFrame(
            data={
                'observableId': ['obs1', 'obs2', 'obs1', 'obs2'],
                'simulationConditionId':
                ['condition1', 'condition1', 'condition2', 'condition2'],
                'preequilibrationConditionId': ['', '', '', ''],
                'observableParameters': [
                    'obs1par1override;obs1par2cond1override',
                    'obs2par1cond1override',
                    'obs1par1override;obs1par2cond2override',
                    'obs2par1cond2override'
                ],
                'noiseParameters': ['', '', '', '']
            })

        expected = [({}, {
            'dynamicParameter1': 'dynamicParameter1',
            'dynamicParameter2': 'dynamicParameter2',
            'observableParameter1_obs1': 'obs1par1override',
            'observableParameter2_obs1': 'obs1par2cond1override',
            'observableParameter1_obs2': 'obs2par1cond1override',
        }),
                    ({}, {
                        'dynamicParameter1': 'dynamicParameter1',
                        'dynamicParameter2': 'dynamicParameter2',
                        'observableParameter1_obs1': 'obs1par1override',
                        'observableParameter2_obs1': 'obs1par2cond2override',
                        'observableParameter1_obs2': 'obs2par1cond2override'
                    })]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            sbml_model=sbml_model)

        assert actual == expected
    def test_parameterized_condition_table(minimal_sbml_model):
        condition_df = pd.DataFrame(
            data={
                'conditionId': ['condition1', 'condition2', 'condition3'],
                'conditionName': ['', 'Condition 2', ''],
                'dynamicParameter1':
                ['dynamicOverride1_1', 'dynamicOverride1_2', 0]
            })
        condition_df.set_index('conditionId', inplace=True)

        measurement_df = pd.DataFrame(
            data={
                'simulationConditionId':
                ['condition1', 'condition2', 'condition3'],
                'observableId': ['obs1', 'obs2', 'obs1'],
                'observableParameters': '',
                'noiseParameters': '',
            })

        parameter_df = pd.DataFrame(
            data={
                'parameterId': ['dynamicOverride1_1', 'dynamicOverride1_2'],
                'parameterName': ['', '...'],  # ...
            })
        parameter_df.set_index('parameterId', inplace=True)

        document, model = minimal_sbml_model
        model.createParameter().setId('dynamicParameter1')

        assert petab.get_model_parameters(model) == ['dynamicParameter1']

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df,
            sbml_model=model)

        expected = [({}, {
            'dynamicParameter1': 'dynamicOverride1_1'
        }), ({}, {
            'dynamicParameter1': 'dynamicOverride1_2'
        }), ({}, {
            'dynamicParameter1': 0
        })]

        assert actual == expected
Esempio n. 5
0
    def test_all_override(self, condition_df_2_conditions):
        # Condition-specific parameters overriding original parameters
        condition_df = condition_df_2_conditions

        measurement_df = pd.DataFrame(
            data={
                'observableId': ['obs1', 'obs2', 'obs1', 'obs2'],
                'simulationConditionId':
                ['condition1', 'condition1', 'condition2', 'condition2'],
                'preequilibrationConditionId': ['', '', '', ''],
                'observableParameters': [
                    'obs1par1override;obs1par2cond1override',
                    'obs2par1cond1override',
                    'obs1par1override;obs1par2cond2override',
                    'obs2par1cond2override'
                ],
                'noiseParameters': ['', '', '', '']
            })

        expected = [[
            'dynamicParameter1',
            'dynamicParameter2',
            'obs1par1override',
            'obs1par2cond1override',
            'obs2par1cond1override',
        ],
                    [
                        'dynamicParameter1', 'dynamicParameter2',
                        'obs1par1override', 'obs1par2cond2override',
                        'obs2par1cond2override'
                    ]]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            par_sim_ids=[
                'dynamicParameter1', 'dynamicParameter2',
                'observableParameter1_obs1', 'observableParameter2_obs1',
                'observableParameter1_obs2'
            ])

        assert actual == expected
    def test_no_condition_specific(condition_df_2_conditions,
                                   minimal_sbml_model):
        # Trivial case - no condition-specific parameters

        condition_df = condition_df_2_conditions

        measurement_df = pd.DataFrame(
            data={
                'observableId': ['obs1', 'obs2'],
                'simulationConditionId': ['condition1', 'condition2'],
                'preequilibrationConditionId': ['', ''],
                'observableParameters': ['', ''],
                'noiseParameters': ['', '']
            })

        _, sbml_model = minimal_sbml_model
        add_global_parameter(sbml_model, 'dynamicParameter1')
        add_global_parameter(sbml_model, 'dynamicParameter2')
        add_global_parameter(sbml_model, 'dynamicParameter3')

        expected = [({}, {
            'dynamicParameter1': 'dynamicParameter1',
            'dynamicParameter2': 'dynamicParameter2',
            'dynamicParameter3': 'dynamicParameter3'
        }),
                    ({}, {
                        'dynamicParameter1': 'dynamicParameter1',
                        'dynamicParameter2': 'dynamicParameter2',
                        'dynamicParameter3': 'dynamicParameter3'
                    })]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            sbml_model=sbml_model,
            measurement_df=measurement_df,
            condition_df=condition_df,
        )

        assert actual == expected
Esempio n. 7
0
    def test_parameterized_condition_table_changed_scale(
            minimal_sbml_model):
        """Test overriding a dynamic parameter `overridee` with
        - a log10 parameter to be estimated (condition 1)
        - lin parameter not estimated (condition2)
        - log10 parameter not estimated (condition 3)
        - constant override (condition 4)"""

        # overridden parameter
        overridee_id = "overridee"

        # set up model
        document, model = minimal_sbml_model
        p = model.createParameter()
        p.setId(overridee_id)
        p.setValue(2.0)
        assert petab.get_model_parameters(model) == [overridee_id]
        assert petab.get_model_parameters(model, with_values=True) \
            == {overridee_id: 2.0}

        # set up condition table
        condition_df = pd.DataFrame(data={
            CONDITION_ID:
                ['condition1', 'condition2', 'condition3', 'condition4'],
            overridee_id:
                ['dynamicOverrideLog10', 'fixedOverrideLin',
                 'fixedOverrideLog10', 10.0]
        })
        condition_df.set_index('conditionId', inplace=True)

        # set up measurement table
        measurement_df = pd.DataFrame(data={
            SIMULATION_CONDITION_ID:
                ['condition1', 'condition2', 'condition3', 'condition4'],
            OBSERVABLE_ID:
                ['obs1', 'obs2', 'obs1', 'obs2'],
            OBSERVABLE_PARAMETERS: '',
            NOISE_PARAMETERS: '',
        })

        # set up parameter table
        parameter_df = pd.DataFrame(data={
            PARAMETER_ID: ['dynamicOverrideLog10',
                           'fixedOverrideLin',
                           'fixedOverrideLog10'],
            ESTIMATE: [1, 0, 0],
            NOMINAL_VALUE: [np.nan, -2, 1000],
            PARAMETER_SCALE: [LOG10, LIN, LOG10]
        })
        parameter_df.set_index(PARAMETER_ID, inplace=True)

        # test without preequilibration condition; unscaled known parameters

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df,
            sbml_model=model
        )

        expected = [
            ({}, {overridee_id: 'dynamicOverrideLog10'},
             {}, {overridee_id: LOG10}),
            ({}, {overridee_id: -2.0}, {}, {overridee_id: LIN}),
            # not scaled:
            ({}, {overridee_id: 1000.0}, {}, {overridee_id: LIN}),
            ({}, {overridee_id: 10.0}, {}, {overridee_id: LIN})
        ]

        assert actual == expected

        # test without preequilibration condition; scaled known parameters

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df,
            sbml_model=model,
            scaled_parameters=True
        )

        expected = [
            ({}, {overridee_id: 'dynamicOverrideLog10'},
             {}, {overridee_id: LOG10}),
            ({}, {overridee_id: -2.0}, {}, {overridee_id: LIN}),
            # scaled fixedOverrideLog10:
            ({}, {overridee_id: 3.0}, {}, {overridee_id: LOG10}),
            ({}, {overridee_id: 10.0}, {}, {overridee_id: LIN})
        ]

        assert actual == expected

        # Add preeq condition

        measurement_df[PREEQUILIBRATION_CONDITION_ID] = \
            ['condition1', 'condition1', 'condition3', 'condition3']
        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df,
            sbml_model=model)

        expected = [
            ({overridee_id: 'dynamicOverrideLog10'},
             {overridee_id: 'dynamicOverrideLog10'},
             {overridee_id: LOG10}, {overridee_id: LOG10}),
            ({overridee_id: 'dynamicOverrideLog10'}, {overridee_id: -2.0},
             {overridee_id: LOG10}, {overridee_id: LIN}),
            # not rescaled:
            ({overridee_id: 1000.0}, {overridee_id: 1000.0},
             {overridee_id: LIN}, {overridee_id: LIN}),
            ({overridee_id: 1000.0}, {overridee_id: 10.0},
             {overridee_id: LIN}, {overridee_id: LIN})]

        assert actual == expected
Esempio n. 8
0
    def test_partial_override(condition_df_2_conditions,
                              minimal_sbml_model):
        # Condition-specific parameters, keeping original parameters
        condition_df = condition_df_2_conditions

        _, sbml_model = minimal_sbml_model
        add_global_parameter(sbml_model, 'dynamicParameter1')
        add_global_parameter(sbml_model, 'observableParameter1_obs1')
        add_global_parameter(sbml_model, 'observableParameter2_obs1')
        add_global_parameter(sbml_model, 'observableParameter1_obs2')

        measurement_df = pd.DataFrame(data={
            OBSERVABLE_ID: ['obs1', 'obs2', 'obs1', 'obs2'],
            SIMULATION_CONDITION_ID: ['condition1', 'condition1',
                                      'condition2', 'condition2'],
            PREEQUILIBRATION_CONDITION_ID: ['', '', '', ''],
            OBSERVABLE_PARAMETERS: ['obs1par1override;obs1par2cond1override',
                                    '',
                                    'obs1par1override;obs1par2cond2override',
                                    'obs2par1cond2override'],
            NOISE_PARAMETERS: ['', '', '', '']
        })

        parameter_df = pd.DataFrame(data={
            PARAMETER_ID: [
                'dynamicParameter1', 'obs1par1override',
                'obs1par2cond1override', 'obs1par2cond2override',
                'obs2par1cond2override'],
            ESTIMATE: [1, 1, 1, 1, 1],
        })
        parameter_df.set_index(PARAMETER_ID, inplace=True)

        expected = [({},
                     {'fixedParameter1': 1.0,
                      'dynamicParameter1': 'dynamicParameter1',
                      'observableParameter1_obs1': 'obs1par1override',
                      'observableParameter2_obs1': 'obs1par2cond1override',
                      'observableParameter1_obs2': np.nan,
                      },
                     {},
                     {'fixedParameter1': LIN,
                      'dynamicParameter1': LIN,
                      'observableParameter1_obs1': LIN,
                      'observableParameter2_obs1': LIN,
                      'observableParameter1_obs2': LIN}),
                    ({},
                     {'fixedParameter1': 2.0,
                      'dynamicParameter1': 'dynamicParameter1',
                      'observableParameter1_obs1': 'obs1par1override',
                      'observableParameter2_obs1': 'obs1par2cond2override',
                      'observableParameter1_obs2': 'obs2par1cond2override'
                      },
                     {},
                     {'fixedParameter1': LIN,
                      'dynamicParameter1': LIN,
                      'observableParameter1_obs1': LIN,
                      'observableParameter2_obs1': LIN,
                      'observableParameter1_obs2': LIN}),
                    ]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            sbml_model=sbml_model, parameter_df=parameter_df
        )

        # Comparison with NaN containing expected results fails after pickling!
        # Need to test first for correct NaNs, then for the rest.
        assert np.isnan(expected[0][1]['observableParameter1_obs2'])
        assert np.isnan(actual[0][1]['observableParameter1_obs2'])
        expected[0][1]['observableParameter1_obs2'] = 0.0
        actual[0][1]['observableParameter1_obs2'] = 0.0

        assert actual == expected
Esempio n. 9
0
    def test_all_override(condition_df_2_conditions,
                          minimal_sbml_model):
        # Condition-specific parameters overriding original parameters
        condition_df = condition_df_2_conditions

        _, sbml_model = minimal_sbml_model
        add_global_parameter(sbml_model, 'dynamicParameter1')
        add_global_parameter(sbml_model, 'dynamicParameter2')

        measurement_df = pd.DataFrame(data={
            OBSERVABLE_ID: ['obs1', 'obs2', 'obs1', 'obs2'],
            SIMULATION_CONDITION_ID: ['condition1', 'condition1',
                                      'condition2', 'condition2'],
            PREEQUILIBRATION_CONDITION_ID: ['', '', '', ''],
            OBSERVABLE_PARAMETERS: ['obs1par1override;obs1par2cond1override',
                                    'obs2par1cond1override',
                                    'obs1par1override;obs1par2cond2override',
                                    'obs2par1cond2override'],
            NOISE_PARAMETERS: ['', '', '', '']
        })

        parameter_df = pd.DataFrame(data={
            PARAMETER_ID: [
                'dynamicParameter1', 'dynamicParameter2', 'obs1par1override',
                'obs1par2cond1override', 'obs1par2cond2override',
                'obs2par1cond1override', 'obs2par1cond2override'
            ],
            ESTIMATE: [1] * 7
        })
        parameter_df.set_index(PARAMETER_ID, inplace=True)

        expected = [
            (
                {},
                {'fixedParameter1': 1.0,
                 'dynamicParameter1': 'dynamicParameter1',
                 'dynamicParameter2': 'dynamicParameter2',
                 'observableParameter1_obs1': 'obs1par1override',
                 'observableParameter2_obs1': 'obs1par2cond1override',
                 'observableParameter1_obs2': 'obs2par1cond1override',
                 },
                {},
                {'fixedParameter1': LIN,
                 'dynamicParameter1': LIN,
                 'dynamicParameter2': LIN,
                 'observableParameter1_obs1': LIN,
                 'observableParameter2_obs1': LIN,
                 'observableParameter1_obs2': LIN
                 }
            ),
            (
                {},
                {'fixedParameter1': 2.0,
                 'dynamicParameter1': 'dynamicParameter1',
                 'dynamicParameter2': 'dynamicParameter2',
                 'observableParameter1_obs1': 'obs1par1override',
                 'observableParameter2_obs1': 'obs1par2cond2override',
                 'observableParameter1_obs2': 'obs2par1cond2override'
                 },
                {},
                {'fixedParameter1': LIN,
                 'dynamicParameter1': LIN,
                 'dynamicParameter2': LIN,
                 'observableParameter1_obs1': LIN,
                 'observableParameter2_obs1': LIN,
                 'observableParameter1_obs2': LIN
                 }
            )
        ]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            sbml_model=sbml_model, parameter_df=parameter_df)
        assert actual == expected

        # For one case we test parallel execution, which must yield the same
        # result
        os.environ[petab.ENV_NUM_THREADS] = "4"
        actual = petab.get_optimization_to_simulation_parameter_mapping(
            measurement_df=measurement_df,
            condition_df=condition_df,
            sbml_model=sbml_model, parameter_df=parameter_df)
        assert actual == expected
Esempio n. 10
0
    def test_no_condition_specific(condition_df_2_conditions,
                                   minimal_sbml_model):
        # Trivial case - no condition-specific parameters

        condition_df = condition_df_2_conditions

        measurement_df = pd.DataFrame(data={
            OBSERVABLE_ID: ['obs1', 'obs2'],
            SIMULATION_CONDITION_ID: ['condition1', 'condition2'],
            PREEQUILIBRATION_CONDITION_ID: ['', ''],
            OBSERVABLE_PARAMETERS: ['', ''],
            NOISE_PARAMETERS: ['', '']
        })

        _, sbml_model = minimal_sbml_model
        add_global_parameter(sbml_model, 'dynamicParameter1').setValue(1.0)
        add_global_parameter(sbml_model, 'dynamicParameter2').setValue(2.0)
        add_global_parameter(sbml_model, 'dynamicParameter3').setValue(3.0)
        # add species, which will have initial concentration in condition table
        #  but which should not show up in mapping
        s = sbml_model.createSpecies()
        s.setId("someSpecies")
        condition_df["someSpecies"] = [0.0, 0.0]

        # Test without parameter table
        expected = [({},
                     {'dynamicParameter1': 1.0,
                      'dynamicParameter2': 2.0,
                      'dynamicParameter3': 3.0,
                      'fixedParameter1': 1.0},
                     {},
                     {'dynamicParameter1': LIN,
                      'dynamicParameter2': LIN,
                      'dynamicParameter3': LIN,
                      'fixedParameter1': LIN}),
                    ({},
                     {'dynamicParameter1': 1.0,
                      'dynamicParameter2': 2.0,
                      'dynamicParameter3': 3.0,
                      'fixedParameter1': 2.0},
                     {},
                     {'dynamicParameter1': LIN,
                      'dynamicParameter2': LIN,
                      'dynamicParameter3': LIN,
                      'fixedParameter1': LIN}
                     )]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            sbml_model=sbml_model,
            measurement_df=measurement_df,
            condition_df=condition_df,
        )
        assert actual == expected

        # Test with parameter table
        parameter_df = pd.DataFrame(data={
            PARAMETER_ID: ['dynamicParameter1', 'dynamicParameter2',
                           'dynamicParameter3'],
            ESTIMATE: [0, 1, 1],
            NOMINAL_VALUE: [11.0, 12.0, None],
            PARAMETER_SCALE: [LOG, LOG10, LIN],
        })
        parameter_df.set_index(PARAMETER_ID, inplace=True)

        expected = [({},
                     {'dynamicParameter1': 11.0,
                      'dynamicParameter2': 'dynamicParameter2',
                      'dynamicParameter3': 'dynamicParameter3',
                      'fixedParameter1': 1.0},
                     {},
                     {'dynamicParameter1': LIN,
                      'dynamicParameter2': LOG10,
                      'dynamicParameter3': LIN,
                      'fixedParameter1': LIN}),
                    ({},
                     {'dynamicParameter1': 11.0,
                      'dynamicParameter2': 'dynamicParameter2',
                      'dynamicParameter3': 'dynamicParameter3',
                      'fixedParameter1': 2.0},
                     {},
                     {'dynamicParameter1': LIN,
                      'dynamicParameter2': LOG10,
                      'dynamicParameter3': LIN,
                      'fixedParameter1': LIN})
                    ]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            sbml_model=sbml_model,
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df
        )

        assert actual == expected

        # Test with applied scaling

        expected = [
            ({},
             {'dynamicParameter1': np.log(11.0),
              'dynamicParameter2': 'dynamicParameter2',
              'dynamicParameter3': 'dynamicParameter3',
              'fixedParameter1': 1.0},
             {},
             {'dynamicParameter1': LOG,
              'dynamicParameter2': LOG10,
              'dynamicParameter3': LIN,
              'fixedParameter1': LIN}),
            ({},
             {'dynamicParameter1': np.log(11.0),
              'dynamicParameter2': 'dynamicParameter2',
              'dynamicParameter3': 'dynamicParameter3',
              'fixedParameter1': 2.0},
             {},
             {'dynamicParameter1': LOG,
              'dynamicParameter2': LOG10,
              'dynamicParameter3': LIN,
              'fixedParameter1': LIN}),
        ]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            sbml_model=sbml_model,
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df,
            scaled_parameters=True
        )

        assert actual == expected

        # Test without fixed overrides

        expected = [
            ({},
             {'dynamicParameter1': 'dynamicParameter1',
              'dynamicParameter2': 'dynamicParameter2',
              'dynamicParameter3': 'dynamicParameter3',
              'fixedParameter1': 1.0},
             {},
             {'dynamicParameter1': LOG,
              'dynamicParameter2': LOG10,
              'dynamicParameter3': LIN,
              'fixedParameter1': LIN}),
            ({},
             {'dynamicParameter1': 'dynamicParameter1',
              'dynamicParameter2': 'dynamicParameter2',
              'dynamicParameter3': 'dynamicParameter3',
              'fixedParameter1': 2.0},
             {},
             {'dynamicParameter1': LOG,
              'dynamicParameter2': LOG10,
              'dynamicParameter3': LIN,
              'fixedParameter1': LIN}),
        ]

        actual = petab.get_optimization_to_simulation_parameter_mapping(
            sbml_model=sbml_model,
            measurement_df=measurement_df,
            condition_df=condition_df,
            parameter_df=parameter_df,
            fill_fixed_parameters=False
        )

        assert actual == expected
Esempio n. 11
0
    def create_objective(self,
                         model=None,
                         solver=None,
                         edatas=None,
                         force_compile: bool = False):
        """
        Create a pypesto.PetabAmiciObjective.
        """
        # get simulation conditions
        simulation_conditions = petab.get_simulation_conditions(
            self.petab_problem.measurement_df)

        # create model
        if model is None:
            model = self.create_model(force_compile=force_compile)
        # create solver
        if solver is None:
            solver = self.create_solver(model)
        # create conditions and edatas from measurement data
        if edatas is None:
            edatas = self.create_edatas(
                model=model, simulation_conditions=simulation_conditions)

        # simulation <-> optimization parameter mapping
        par_opt_ids = self.petab_problem.get_optimization_parameters()

        parameter_mappings = \
            petab.get_optimization_to_simulation_parameter_mapping(
                condition_df=self.petab_problem.condition_df,
                measurement_df=self.petab_problem.measurement_df,
                parameter_df=self.petab_problem.parameter_df,
                sbml_model=self.petab_problem.sbml_model,
                simulation_conditions=simulation_conditions,
            )

        scale_mappings = \
            petab.get_optimization_to_simulation_scale_mapping(
                parameter_df=self.petab_problem.parameter_df,
                mapping_par_opt_to_par_sim=parameter_mappings,
                measurement_df=self.petab_problem.measurement_df
            )

        # unify and check preeq and sim mappings
        parameter_mapping, scale_mapping = _merge_preeq_and_sim_pars(
            parameter_mappings, scale_mappings)

        # simulation ids (for correct order)
        par_sim_ids = list(model.getParameterIds())

        # create lists from dicts in correct order
        parameter_mapping = _mapping_to_list(parameter_mapping, par_sim_ids)
        scale_mapping = _mapping_to_list(scale_mapping, par_sim_ids)

        # check whether there is something suspicious in the mapping
        _check_parameter_mapping_ok(parameter_mapping, par_sim_ids, model,
                                    edatas)

        # create objective
        obj = PetabAmiciObjective(petab_importer=self,
                                  amici_model=model,
                                  amici_solver=solver,
                                  edatas=edatas,
                                  x_ids=par_opt_ids,
                                  x_names=par_opt_ids,
                                  mapping_par_opt_to_par_sim=parameter_mapping,
                                  mapping_scale_opt_to_scale_sim=scale_mapping)

        return obj
    def test_parameterized_condition_table_changed_scale(minimal_sbml_model):
        """Test overriding a dynamic parameter `overridee` with
        - a log10 parameter to be estimated (condition 1)
        - lin parameter not estimated (condition2)
        - log10 parameter not estimated (condition 3)
        - constant override (condition 4)"""

        document, model = minimal_sbml_model
        model.createParameter().setId('overridee')
        assert petab.get_model_parameters(model) == ['overridee']

        condition_df = pd.DataFrame(
            data={
                'conditionId':
                ['condition1', 'condition2', 'condition3', 'condition4'],
                'conditionName':
                '',
                'overridee': [
                    'dynamicOverrideLog10', 'fixedOverrideLin',
                    'fixedOverrideLog10', 10.0
                ]
            })
        condition_df.set_index('conditionId', inplace=True)

        measurement_df = pd.DataFrame(
            data={
                'simulationConditionId':
                ['condition1', 'condition2', 'condition3', 'condition4'],
                'observableId': ['obs1', 'obs2', 'obs1', 'obs2'],
                'observableParameters':
                '',
                'noiseParameters':
                '',
            })

        parameter_df = pd.DataFrame(
            data={
                'parameterId': [
                    'dynamicOverrideLog10', 'fixedOverrideLin',
                    'fixedOverrideLog10'
                ],
                'parameterName':
                '',
                'estimate': [1, 0, 0],
                'nominalValue': [np.nan, 2, -2],
                'parameterScale': ['log10', 'lin', 'log10']
            })
        parameter_df.set_index('parameterId', inplace=True)

        actual_par_map = \
            petab.get_optimization_to_simulation_parameter_mapping(
                measurement_df=measurement_df,
                condition_df=condition_df,
                parameter_df=parameter_df,
                sbml_model=model
            )

        actual_scale_map = petab.get_optimization_to_simulation_scale_mapping(
            parameter_df=parameter_df,
            measurement_df=measurement_df,
            mapping_par_opt_to_par_sim=actual_par_map)

        expected_par_map = [
            ({}, {
                'overridee': 'dynamicOverrideLog10'
            }),
            ({}, {
                'overridee': 2.0
            }),
            # rescaled:
            ({}, {
                'overridee': 0.01
            }),
            ({}, {
                'overridee': 10.0
            })
        ]

        expected_scale_map = [({}, {
            'overridee': 'log10'
        }), ({}, {
            'overridee': 'lin'
        }), ({}, {
            'overridee': 'lin'
        }), ({}, {
            'overridee': 'lin'
        })]

        assert actual_par_map == expected_par_map
        assert actual_scale_map == expected_scale_map

        # Add preeq condition
        measurement_df['preequilibrationConditionId'] = \
            ['condition1', 'condition1', 'condition3', 'condition3']
        actual_par_map = \
            petab.get_optimization_to_simulation_parameter_mapping(
                measurement_df=measurement_df,
                condition_df=condition_df,
                parameter_df=parameter_df,
                sbml_model=model
            )

        actual_scale_map = petab.get_optimization_to_simulation_scale_mapping(
            parameter_df=parameter_df,
            measurement_df=measurement_df,
            mapping_par_opt_to_par_sim=actual_par_map)

        expected_par_map = [
            ({
                'overridee': 'dynamicOverrideLog10'
            }, {
                'overridee': 'dynamicOverrideLog10'
            }),
            ({
                'overridee': 'dynamicOverrideLog10'
            }, {
                'overridee': 2.0
            }),
            # rescaled:
            ({
                'overridee': 0.01
            }, {
                'overridee': 0.01
            }),
            ({
                'overridee': 0.01
            }, {
                'overridee': 10.0
            })
        ]
        expected_scale_map = [({
            'overridee': 'log10'
        }, {
            'overridee': 'log10'
        }), ({
            'overridee': 'log10'
        }, {
            'overridee': 'lin'
        }), ({
            'overridee': 'lin'
        }, {
            'overridee': 'lin'
        }), ({
            'overridee': 'lin'
        }, {
            'overridee': 'lin'
        })]
        assert actual_par_map == expected_par_map
        assert actual_scale_map == expected_scale_map