Esempio n. 1
0
    def test_multistart_hyperparameter_optimization(self):
        """Check that multistart optimization (gradient descent) can find the optimum hyperparameters."""
        random_state = numpy.random.get_state()
        numpy.random.seed(87612)

        max_num_steps = 200  # this is generally *too few* steps; we configure it this way so the test will run quickly
        max_num_restarts = 5
        num_steps_averaged = 0
        gamma = 0.2
        pre_mult = 1.0
        max_relative_change = 0.3
        tolerance = 1.0e-11
        gd_parameters = GradientDescentParameters(
            max_num_steps,
            max_num_restarts,
            num_steps_averaged,
            gamma,
            pre_mult,
            max_relative_change,
            tolerance,
        )
        num_multistarts = 3  # again, too few multistarts; but we want the test to run reasonably quickly

        num_sampled = 10
        self.gp_test_environment_input.num_sampled = num_sampled
        _, gaussian_process = self._build_gaussian_process_test_data(
            self.gp_test_environment_input)
        python_cov, historical_data = gaussian_process.get_core_data_copy()

        lml = GaussianProcessLogMarginalLikelihood(python_cov, historical_data)

        domain = TensorProductDomain(
            [ClosedInterval(1.0, 4.0)] *
            self.gp_test_environment_input.num_hyperparameters)

        hyperparameter_optimizer = GradientDescentOptimizer(
            domain, lml, gd_parameters)
        best_hyperparameters = multistart_hyperparameter_optimization(
            hyperparameter_optimizer, num_multistarts)

        # Check that gradients are small
        lml.hyperparameters = best_hyperparameters
        gradient = lml.compute_grad_log_likelihood()
        self.assert_vector_within_relative(
            gradient, numpy.zeros(self.num_hyperparameters), tolerance)

        # Check that output is in the domain
        assert domain.check_point_inside(best_hyperparameters) is True

        numpy.random.set_state(random_state)
Esempio n. 2
0
    def test_multistart_hyperparameter_optimization(self):
        """Check that multistart optimization (gradient descent) can find the optimum hyperparameters."""
        random_state = numpy.random.get_state()
        numpy.random.seed(87612)

        max_num_steps = 200  # this is generally *too few* steps; we configure it this way so the test will run quickly
        max_num_restarts = 5
        num_steps_averaged = 0
        gamma = 0.2
        pre_mult = 1.0
        max_relative_change = 0.3
        tolerance = 1.0e-11
        gd_parameters = GradientDescentParameters(
            max_num_steps,
            max_num_restarts,
            num_steps_averaged,
            gamma,
            pre_mult,
            max_relative_change,
            tolerance,
        )
        num_multistarts = 3  # again, too few multistarts; but we want the test to run reasonably quickly

        num_sampled = 10
        self.gp_test_environment_input.num_sampled = num_sampled
        _, gaussian_process = self._build_gaussian_process_test_data(self.gp_test_environment_input)
        python_cov, historical_data = gaussian_process.get_core_data_copy()

        lml = GaussianProcessLogMarginalLikelihood(python_cov, historical_data)

        domain = TensorProductDomain([ClosedInterval(1.0, 4.0)] * self.gp_test_environment_input.num_hyperparameters)

        hyperparameter_optimizer = GradientDescentOptimizer(domain, lml, gd_parameters)
        best_hyperparameters = multistart_hyperparameter_optimization(hyperparameter_optimizer, num_multistarts)

        # Check that gradients are small
        lml.hyperparameters = best_hyperparameters
        gradient = lml.compute_grad_log_likelihood()
        self.assert_vector_within_relative(gradient, numpy.zeros(self.num_hyperparameters), tolerance)

        # Check that output is in the domain
        assert domain.check_point_inside(best_hyperparameters) is True

        numpy.random.set_state(random_state)