Esempio n. 1
0
    def _optimise_model(self, model: gpflow.models.Model,
                        optimise_func: Callable[[gpflow.models.Model, Callable, tf.Variable], None],
                        use_global_step: Optional[bool]=False) -> None:
        """
        Runs optimisation test with given model and optimisation function.
        :param model: Model derived from `gpflow.models.Model`
        :param optimise_func: Function that performs the optimisation. The function should take
        the model, step callback and the `global_step` tensor as the arguments
        :param use_global_step: flag indicating the the `global_step` variable should be used
        """

        session = model.enquire_session()
        global_step_tensor = mon.create_global_step(session) if use_global_step else None

        monitor_task = _DummyMonitorTask()
        monitor = mon.Monitor([monitor_task], session, global_step_tensor)
        monitor.start_monitoring()

        # Calculate LML before the optimisation, run optimisation and calculate LML after that.
        lml_before = model.compute_log_likelihood()
        optimise_func(model, monitor, global_step_tensor)
        lml_after = model.compute_log_likelihood()

        if use_global_step:
            # Check that the 'global_step' has the actual number of iterations
            global_step = session.run(global_step_tensor)
            self.assertEqual(global_step, monitor_task.call_count)
        else:
            # Just check that there were some iterations
            self.assertGreater(monitor_task.call_count, 0)

        # Check that the optimiser has done something
        self.assertGreater(lml_after, lml_before)
Esempio n. 2
0
    def _optimise_model(self, model: gpflow.models.Model,
                        optimise_func: Callable[[gpflow.models.Model, Callable, tf.Variable], None],
                        use_global_step: Optional[bool]=False, optimiser=None) -> None:
        """
        Runs optimisation test with given model and optimisation function.
        :param model: Model derived from `gpflow.models.Model`
        :param optimise_func: Function that performs the optimisation. The function should take
        the model, step callback and the `global_step` tensor as the arguments
        :param use_global_step: flag indicating the the `global_step` variable should be used
        """

        session = model.enquire_session()
        global_step_tensor = mon.create_global_step(session) if use_global_step else None

        monitor_task = _DummyMonitorTask()

        lml_before = model.compute_log_likelihood()

        # Run optimisation
        with mon.Monitor([monitor_task], session, global_step_tensor, optimiser=optimiser) \
                as monitor:
            optimise_func(model, monitor, global_step_tensor)

        lml_after = model.compute_log_likelihood()

        if use_global_step:
            # Check that the 'global_step' has the actual number of iterations
            global_step = session.run(global_step_tensor)
            self.assertEqual(global_step, monitor_task.call_count)
        else:
            # Just check that there were some iterations
            self.assertGreater(monitor_task.call_count, 0)
Esempio n. 3
0
    def __init__(self, model: gpflow.models.Model, optimiser: gpflow.training.optimizer.Optimizer,
                 global_step, session=None, var_list=None):
        self.session = model.enquire_session(session)
        self.var_list = var_list

        # Setup timers
        total_time = timer.Stopwatch()
        optimisation_time = timer.Stopwatch()
        iter_count = timer.ElapsedTracker()
        self.timers = {Trigger.TOTAL_TIME: total_time,
                       Trigger.OPTIMISATION_TIME: optimisation_time,
                       Trigger.ITER: iter_count}
        self.tasks = []
        self.model = model
        self.global_step = global_step

        self.set_optimiser(optimiser)