Пример #1
0
    def _optimise_model(self, model: gpflow.models.Model,
                        optimise_func: Callable[[gpflow.models.Model, Callable, tf.Variable], None],
                        use_global_step: Optional[bool]=False, optimiser=None) -> None:
        """
        Runs optimisation test with given model and optimisation function.
        :param model: Model derived from `gpflow.models.Model`
        :param optimise_func: Function that performs the optimisation. The function should take
        the model, step callback and the `global_step` tensor as the arguments
        :param use_global_step: flag indicating the the `global_step` variable should be used
        """

        session = model.enquire_session()
        global_step_tensor = mon.create_global_step(session) if use_global_step else None

        monitor_task = _DummyMonitorTask()

        lml_before = model.compute_log_likelihood()

        # Run optimisation
        with mon.Monitor([monitor_task], session, global_step_tensor, optimiser=optimiser) \
                as monitor:
            optimise_func(model, monitor, global_step_tensor)

        lml_after = model.compute_log_likelihood()

        if use_global_step:
            # Check that the 'global_step' has the actual number of iterations
            global_step = session.run(global_step_tensor)
            self.assertEqual(global_step, monitor_task.call_count)
        else:
            # Just check that there were some iterations
            self.assertGreater(monitor_task.call_count, 0)
Пример #2
0
    def _optimise_model(self, model: gpflow.models.Model,
                        optimise_func: Callable[[gpflow.models.Model, Callable, tf.Variable], None],
                        use_global_step: Optional[bool]=False) -> None:
        """
        Runs optimisation test with given model and optimisation function.
        :param model: Model derived from `gpflow.models.Model`
        :param optimise_func: Function that performs the optimisation. The function should take
        the model, step callback and the `global_step` tensor as the arguments
        :param use_global_step: flag indicating the the `global_step` variable should be used
        """

        session = model.enquire_session()
        global_step_tensor = mon.create_global_step(session) if use_global_step else None

        monitor_task = _DummyMonitorTask()
        monitor = mon.Monitor([monitor_task], session, global_step_tensor)
        monitor.start_monitoring()

        # Calculate LML before the optimisation, run optimisation and calculate LML after that.
        lml_before = model.compute_log_likelihood()
        optimise_func(model, monitor, global_step_tensor)
        lml_after = model.compute_log_likelihood()

        if use_global_step:
            # Check that the 'global_step' has the actual number of iterations
            global_step = session.run(global_step_tensor)
            self.assertEqual(global_step, monitor_task.call_count)
        else:
            # Just check that there were some iterations
            self.assertGreater(monitor_task.call_count, 0)

        # Check that the optimiser has done something
        self.assertGreater(lml_after, lml_before)
Пример #3
0
def add_jitter_to_model(model: gpflow.models.Model, mean: float=0, sd: float=0.1) -> None:
    """Add randomness (jitter) to a models parameters.

    Randomness is drawn from a normal distribution with mean `mean` and standard deviation `sd`.

    This method works inplace on the model which is passed.

    Parameters
    ----------
    model: gpflow.models.Model
        Model to jitter.

    mean: float
        Mean of normal distribution that randomness is drawn from.

    sd: float
        Standard deviation of normal distribution that randomness is drawn from.
    """
    for param_pathname, param_value in model.read_values().items():
        model.assign({
            param_pathname: param_value + np.random.normal(loc=mean, scale=sd),
        })
Пример #4
0
    def __init__(self, model: gpflow.models.Model, optimiser: gpflow.training.optimizer.Optimizer,
                 global_step, session=None, var_list=None):
        self.session = model.enquire_session(session)
        self.var_list = var_list

        # Setup timers
        total_time = timer.Stopwatch()
        optimisation_time = timer.Stopwatch()
        iter_count = timer.ElapsedTracker()
        self.timers = {Trigger.TOTAL_TIME: total_time,
                       Trigger.OPTIMISATION_TIME: optimisation_time,
                       Trigger.ITER: iter_count}
        self.tasks = []
        self.model = model
        self.global_step = global_step

        self.set_optimiser(optimiser)
Пример #5
0
def negative_log_likelihood(model: gpflow.models.Model) -> float:
    r"""Calculate the negative logarithmic likelihood of a model.

    Uses gpflow method `compute_log_likelihood`, which returns:
    ```
        LL = \log p(Y | model, theta)
    ```
    With `theta` being the models parameters and `model` usually being a GP regression model,
    `LL` being the *non-negative* log likelihood.

    We then negate `LL` in order to obtain the negative log likelihood.

    Parameters
    ----------
    model: gpflow.models.Model
        Model to be scored.

    Returns
    -------
    score: float
        Negative logarithmic likelihood score of the passed model.

    """
    return -model.compute_log_likelihood()