Ejemplo n.º 1
0
 def test_get_hypergrid(self):
     hyperparam_config = [HyperParameter(0, 4, 1), HyperParameter(0, 5, 2)]
     grid = get_hypergrid(hyperparam_config)
     target = np.array([[0., 0.], [0., 2.], [0., 4.], [1., 0.], [1., 2.],
                        [1., 4.], [2., 0.], [2., 2.], [2., 4.], [3., 0.],
                        [3., 2.], [3., 4.], [4., 0.], [4., 2.], [4., 4.]])
     np.testing.assert_almost_equal(grid, target, decimal=5)
Ejemplo n.º 2
0
 def test_prune_hypergrid(self):
     hyperparam_config = [HyperParameter(0, 4, 1), HyperParameter(0, 5, 2)]
     tested_points = np.array([[0, 4.0], [1, 0], [3, 2], [4, 2]])
     fullgrid = get_hypergrid(hyperparam_config)
     grid = prune_hypergrid(fullgrid, tested_points=tested_points)
     target = np.array([[0., 0.], [0., 2.], [1., 2.], [1., 4.], [2., 0.],
                        [2., 2.], [2., 4.], [3., 0.], [3., 4.], [4., 0.],
                        [4., 4.]])
     np.testing.assert_almost_equal(grid, target, decimal=5)
Ejemplo n.º 3
0
def minimize_function(function: Callable,
                      hyperparams_config: List[HyperParameter],
                      extra_function_args: Tuple = (),
                      tolerance: float = 1e-2,
                      max_iterations: int = 1000,
                      seed: int = None) -> Tuple[np.ndarray, float]:
    """
    Find the minimum of a function

    :param function: Function to be optimized. It will be called as ``f(x, *extra_function_args)``,
        where ``x`` is the argument in the form of a 1-D array and ``extra_function_args``
        is a  tuple of any additional fixed parameters needed to completely specify the function.
    :param hyperparams_config: List of hyperparameters
    :param extra_function_args: Function is called as ``f(x, *extra_function_args)``
    :param tolerance: Convergence tolerance, the optimization stops if (last_best_value - new_best_value) < tolerance
    :param max_iterations: Maximum allowed number of iterations
    :param seed: randomizer seed

    :return:
        - Solution (best point)
        - Function value at best point
    """
    if seed is not None:
        np.random.seed(seed)

    hypergrid = get_hypergrid(hyperparams_config)
    tested_points = np.empty((0, len(hyperparams_config)))
    tested_values = np.empty((0, 1))
    maxvalue = hyperparams_config[0].upper_bound + 1

    old_minimum = np.inf
    num_points = 1
    new_points = np.ones(
        (num_points, len(hyperparams_config)), dtype=float) * maxvalue

    if (len(hypergrid) < max_iterations):
        max_iterations = len(hypergrid)

    for idx in range(max_iterations):
        hypergrid, new_points = _propose_new_points(new_points, hypergrid,
                                                    hyperparams_config,
                                                    num_points)

        new_values = np.empty((num_points, 1))
        for idx in range(num_points):
            new_values[idx, 0] = function(new_points[idx, :],
                                          *extra_function_args)

        tested_points = np.concatenate((tested_points, new_points), axis=0)
        tested_values = np.concatenate((tested_values, new_values), axis=0)

        if np.abs(old_minimum - np.min(tested_values)) < tolerance:
            break

    best_point = np.argmin(tested_values, axis=0).item()
    return tested_points[best_point], tested_values[best_point]
Ejemplo n.º 4
0
    def test_get_new_unique_points_UCB(self):
        hyperparam_config = [HyperParameter(0, 3, 1), HyperParameter(0, 5, 2)]
        grid = get_hypergrid(hyperparam_config)
        known_points = np.array([[0, 0]])
        values = np.array([-2])

        gp_settings = dict(kernel=rbf, kernel_params=(0.02, 0.25), noise=1e-6)
        gp = GaussianProcessRegression(gp_settings["kernel"],
                                       *gp_settings["kernel_params"],
                                       noise=gp_settings["noise"])
        gp.fit(known_points, values, grid)
        ucb_tradeoff = 0.5
        new_points = _get_new_unique_point_UCB(grid, gp, ucb_tradeoff)
        target = np.array([[0., 0.]])
        np.testing.assert_allclose(new_points, target, atol=1e-5)
Ejemplo n.º 5
0
    def test_get_new_unique_points_Thompson(self):
        hyperparam_config = [HyperParameter(0, 3, 1), HyperParameter(0, 5, 2)]
        grid = get_hypergrid(hyperparam_config)
        num_points = 5

        gp_settings = dict(kernel=rbf, kernel_params=(0.02, 0.25), noise=1e-6)
        gp = GaussianProcessRegression(gp_settings["kernel"],
                                       *gp_settings["kernel_params"],
                                       noise=gp_settings["noise"])
        gp.initialize(grid)
        new_points = _get_new_unique_points_Thompson(grid, gp, num_points)

        for i in range(len(new_points)):
            for j in range(i + 1, len(new_points)):
                self.assertFalse(np.array_equal(new_points[i], new_points[j]))
Ejemplo n.º 6
0
    def test_get_new_unique_point(self):
        hyperparam_config = [
            HyperParameter(0, 4, 1),
            HyperParameter(0, 5, 2)
        ]
        grid = get_hypergrid(hyperparam_config)
        previous_points = np.array([
            [0, 4],
            [1, 0],
            [3, 2],
            [4, 2]
        ])

        for idx in range(100):
            self.assertTrue(not_in_array(_get_new_unique_point(previous_points, grid, 100), previous_points))

        # Check error
        with self.assertRaises(OptimizerError):
            _get_new_unique_point(previous_points, previous_points)
Ejemplo n.º 7
0
    def test_get_new_unique_point_Thompson(self):
        hyperparam_config = [HyperParameter(0, 3, 1), HyperParameter(0, 5, 2)]
        grid = get_hypergrid(hyperparam_config)
        previous_points = np.array([[0, 4], [1, 0], [3, 2], [4, 2]])

        gp_settings = dict(kernel=rbf, kernel_params=(0.02, 0.25), noise=1e-6)
        gp = GaussianProcessRegression(gp_settings["kernel"],
                                       *gp_settings["kernel_params"],
                                       noise=gp_settings["noise"])
        gp.initialize(grid)

        for idx in range(100):
            self.assertTrue(
                not_in_array(
                    _get_single_point_Thompson(previous_points, grid, gp, 100),
                    previous_points))

        # Check error
        gp.initialize(previous_points)
        with self.assertRaises(OptimizerError):
            _get_single_point_Thompson(previous_points, previous_points, gp)
Ejemplo n.º 8
0
def propose_points(tested_points: np.ndarray,
                   tested_values: np.ndarray,
                   hyperparams_config: List[HyperParameter],
                   num_points: int,
                   seed: int = None,
                   acquisition_function: str = "Thompson",
                   gp_settings: Dict = None) -> np.ndarray:
    """
    Propose new points to test based on past values. The proposed points should leaad function minimum values

    :param tested_points: previously tested points with dims (n_points, n_hyperparameters)
    :param tested_values: List of function values for previously tested points dims: (n_points, 1)
    :param hyperparams_config: List of hyperparameters
    :param num_points: number of points to propose
    :param seed: randomizer seed
    :param acquisition_function: "Thompson" for thompson sampling or "UCB" for Upper condence bound
    :param gp_settings: settings for the GaussianProcessRegressor: kernel, kernel_params and noise
    :return: New points to test with dims (num_points, n_hyperparameters)
    """

    if seed is not None:
        np.random.seed(seed)

    if gp_settings is None:
        gp_settings = dict(kernel=rbf, kernel_params=(0.1, 1), noise=1e-6)
    gp = GaussianProcessRegression(gp_settings["kernel"],
                                   *gp_settings["kernel_params"],
                                   noise=gp_settings["noise"])

    hypergrid = get_hypergrid(hyperparams_config)

    hypergrid, new_points = _propose_new_points(tested_points, tested_points,
                                                tested_values, hypergrid,
                                                hyperparams_config, num_points,
                                                acquisition_function,
                                                gp_settings)

    return new_points
Ejemplo n.º 9
0
def propose_points(tested_points: np.ndarray,
                   tested_values: np.ndarray,
                   hyperparams_config: List[HyperParameter],
                   num_points: int,
                   seed: int = None) -> np.ndarray:
    """
    Propose new points to test based on past values

    :param tested_points: previously tested points with dims (n_points, n_hyperparameters)
    :param tested_values: List of function values for previously tested points dims: (n_points, 1)
    :param hyperparams_config: List of hyperparameters
    :param num_points: number of points to propose
    :param seed: randomizer seed
    :return: New points to test with dims (num_points, n_hyperparameters)
    """

    if seed is not None:
        np.random.seed(seed)

    hypergrid = get_hypergrid(hyperparams_config)
    hypergrid, new_points = _propose_new_points(tested_points, hypergrid,
                                                hyperparams_config, num_points)

    return new_points
Ejemplo n.º 10
0
def minimize_function(function: Callable,
                      hyperparams_config: List[HyperParameter],
                      extra_function_args: Tuple = (),
                      tolerance: float = 1e-2,
                      max_iterations: int = 1000,
                      seed: int = None,
                      acquisition_function: str = "UCB",
                      gp_settings: Dict = None) -> Tuple[np.ndarray, float]:
    """
    Find the minimum of a function

    :param function: Function to be optimized. It will be called as ``f(x, *extra_function_args)``,
        where ``x`` is the argument in the form of a 1-D array and ``extra_function_args``
        is a  tuple of any additional fixed parameters needed to completely specify the function.
    :param hyperparams_config: List of hyperparameters
    :param extra_function_args: Function is called as ``f(x, *extra_function_args)``
    :param tolerance: Convergence tolerance, the optimization stops if (last_best_value - new_best_value) < tolerance
    :param max_iterations: Maximum allowed number of iterations
    :param seed: randomizer seed
    :param acquisition_function: "Thompson" for thompson sampling or "UCB" for Upper condence bound
    :param gp_settings: settings for the GaussianProcessRegressor: kernel, kernel_params and noise

    :return:
        - Solution (best point)
        - Function value at best point
    """
    if seed is not None:
        np.random.seed(seed)

    if gp_settings is None:
        gp_settings = dict(kernel=rbf, kernel_params=(0.1, 1), noise=1e-6)

    # ------ Define the initial grid
    hypergrid = get_hypergrid(hyperparams_config)
    tested_points = np.empty((0, len(hyperparams_config)))
    tested_values = np.empty((0, 1))
    maxvalue = hyperparams_config[0].upper_bound + 1

    # ------ Define useful constants
    old_minimum = np.inf
    num_points = 1
    new_points = np.empty((0, len(hyperparams_config)))

    max_iterations = min(len(hypergrid), max_iterations)
    stopping_counter = 0
    for idx in range(max_iterations):
        # ------ Ask the optimizer for new points
        hypergrid, new_points = _propose_new_points(
            new_points, tested_points, tested_values, hypergrid,
            hyperparams_config, num_points, acquisition_function, gp_settings)

        new_values = np.empty((num_points, 1))
        for idx in range(num_points):
            new_values[idx, 0] = function(new_points[idx, :],
                                          *extra_function_args)

        tested_points = np.concatenate((tested_points, new_points), axis=0)
        tested_values = np.concatenate((tested_values, new_values), axis=0)

        # ------ Compute the stopping criterion
        if np.abs(old_minimum - np.min(tested_values)) < tolerance:
            stopping_counter += 1
        else:
            stopping_counter = 0

        if stopping_counter >= 10:
            break

        old_minimum = np.min(tested_values)

    best_point = np.argmin(tested_values, axis=0).item()
    return tested_points[best_point], tested_values[best_point]