예제 #1
0
    def _optimise_model(self, model: gpflow.models.Model,
                        optimise_func: Callable[[gpflow.models.Model, Callable, tf.Variable], None],
                        use_global_step: Optional[bool]=False) -> None:
        """
        Runs optimisation test with given model and optimisation function.
        :param model: Model derived from `gpflow.models.Model`
        :param optimise_func: Function that performs the optimisation. The function should take
        the model, step callback and the `global_step` tensor as the arguments
        :param use_global_step: flag indicating the the `global_step` variable should be used
        """

        session = model.enquire_session()
        global_step_tensor = mon.create_global_step(session) if use_global_step else None

        monitor_task = _DummyMonitorTask()
        monitor = mon.Monitor([monitor_task], session, global_step_tensor)
        monitor.start_monitoring()

        # Calculate LML before the optimisation, run optimisation and calculate LML after that.
        lml_before = model.compute_log_likelihood()
        optimise_func(model, monitor, global_step_tensor)
        lml_after = model.compute_log_likelihood()

        if use_global_step:
            # Check that the 'global_step' has the actual number of iterations
            global_step = session.run(global_step_tensor)
            self.assertEqual(global_step, monitor_task.call_count)
        else:
            # Just check that there were some iterations
            self.assertGreater(monitor_task.call_count, 0)

        # Check that the optimiser has done something
        self.assertGreater(lml_after, lml_before)
예제 #2
0
    def _optimise_model(self, model: gpflow.models.Model,
                        optimise_func: Callable[[gpflow.models.Model, Callable, tf.Variable], None],
                        use_global_step: Optional[bool]=False, optimiser=None) -> None:
        """
        Runs optimisation test with given model and optimisation function.
        :param model: Model derived from `gpflow.models.Model`
        :param optimise_func: Function that performs the optimisation. The function should take
        the model, step callback and the `global_step` tensor as the arguments
        :param use_global_step: flag indicating the the `global_step` variable should be used
        """

        session = model.enquire_session()
        global_step_tensor = mon.create_global_step(session) if use_global_step else None

        monitor_task = _DummyMonitorTask()

        lml_before = model.compute_log_likelihood()

        # Run optimisation
        with mon.Monitor([monitor_task], session, global_step_tensor, optimiser=optimiser) \
                as monitor:
            optimise_func(model, monitor, global_step_tensor)

        lml_after = model.compute_log_likelihood()

        if use_global_step:
            # Check that the 'global_step' has the actual number of iterations
            global_step = session.run(global_step_tensor)
            self.assertEqual(global_step, monitor_task.call_count)
        else:
            # Just check that there were some iterations
            self.assertGreater(monitor_task.call_count, 0)
예제 #3
0
def negative_log_likelihood(model: gpflow.models.Model) -> float:
    r"""Calculate the negative logarithmic likelihood of a model.

    Uses gpflow method `compute_log_likelihood`, which returns:
    ```
        LL = \log p(Y | model, theta)
    ```
    With `theta` being the models parameters and `model` usually being a GP regression model,
    `LL` being the *non-negative* log likelihood.

    We then negate `LL` in order to obtain the negative log likelihood.

    Parameters
    ----------
    model: gpflow.models.Model
        Model to be scored.

    Returns
    -------
    score: float
        Negative logarithmic likelihood score of the passed model.

    """
    return -model.compute_log_likelihood()