Ejemplo n.º 1
0
class TestOptimizerCreation(unittest.TestCase):
    def __init__(self, *args, **kwargs):
        super(TestOptimizerCreation, self).__init__(*args, **kwargs)

        self.space = [
            {'name': 'var_1', 'type': 'continuous', 'domain': (-1, 1), 'dimensionality': 1},
            {'name': 'var_2', 'type': 'continuous', 'domain': (-1, 1), 'dimensionality': 1}
        ]
        self.design_space = Design_space(self.space)
        self.f = lambda x: np.sum(np.sin(x))

    def test_invalid_optimizer_name_raises_error(self):
        self.assertRaises(InvalidVariableNameError,
                          choose_optimizer, 'asd', None)

    def test_create_lbfgs_optimizer(self):
        optimizer = choose_optimizer('lbfgs', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)

    def test_create_direct_optimizer(self):
        optimizer = choose_optimizer('DIRECT', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)

    def test_create_cma_optimizer(self):
        optimizer = choose_optimizer('CMA', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)
Ejemplo n.º 2
0
def get_GP_optimum(obj):
    """
    Finds the optimal design by maximising the mean of the surrogate
    probabilistic GP model.

    Parameters
    ----------
    obj: GPyOpt object
        The GPyOpt object with a surrogate probabilistic model.
    """

    # Define space
    space = Design_space(obj.domain, obj.constraints)
    bounds = space.get_bounds()

    # Specify Optimizer --- L-BFGS
    optimizer = OptLbfgs(space.get_bounds(), maxiter=1000)

    # Do the optimisation
    x, _ = optimizer.optimize(
        x0=obj.x_opt,
        f=lambda d: fun_dfun(obj, space, d)[0],
        f_df=lambda d: fun_dfun(obj, space, d),
    )
    # TODO: MULTIPLE RE-STARTS FROM PREVIOUS BEST POINTS

    # Round values if space is discrete
    xtest = space.round_optimum(x)[0]

    if space.indicator_constraints(xtest):
        opt = xtest
    else:
        # Rounding mixed things up, so need to look at neighbours

        # Compute neighbours to optimum
        idx_comb = np.array(
            list(itertools.product([-1, 0, 1], repeat=len(bounds))))
        opt_combs = idx_comb + xtest

        # Evaluate
        GP_evals = list()
        combs = list()
        for idx, d in enumerate(opt_combs):

            cons_check = space.indicator_constraints(d)[0][0]
            bounds_check = indicator_boundaries(bounds, d)[0][0]

            if cons_check * bounds_check == 1:
                pred = obj.model.predict(d)[0][0][0]
                GP_evals.append(pred)
                combs.append(d)
            else:
                pass

        idx_opt = np.where(GP_evals == np.min(GP_evals))[0][0]
        opt = combs[idx_opt]

    return opt
Ejemplo n.º 3
0
    def test_bounds(self):
        space = [{
            'name': 'var_1',
            'type': 'continuous',
            'domain': (-3, 1),
            'dimensionality': 1
        }, {
            'name': 'var_3',
            'type': 'discrete',
            'domain': (0, 1, 2, 3)
        }, {
            'name': 'var_3',
            'type': 'categorical',
            'domain': (2, 4)
        }, {
            'name': 'var_4',
            'type': 'bandit',
            'domain': np.array([[-2], [0], [2]])
        }]

        design_space = Design_space(space)
        bounds = design_space.get_bounds()

        # Countinuous variable bound
        self.assertIn((-3, 1), bounds)
        # Discrete variable bound
        self.assertIn((0, 3), bounds)
        # Bandit variable bound
        self.assertIn((-2, 2), bounds)
        # Categorical variable bound
        self.assertIn((0, 1), bounds)
Ejemplo n.º 4
0
    def test_bandit_bounds(self):
        space = [{'name': 'var_4', 'type': 'bandit', 'domain': np.array([[-2],[0],[2]])}]

        design_space = Design_space(space)
        bounds = design_space.get_bounds()

        # Bandit variable bound
        self.assertIn((-2, 2), bounds)
    def test_bandit_bounds(self):
        space = [{
            'name': 'var_4',
            'type': 'bandit',
            'domain': np.array([[-2], [0], [2]])
        }]

        design_space = Design_space(space)
        bounds = design_space.get_bounds()

        # Bandit variable bound
        self.assertIn((-2, 2), bounds)
Ejemplo n.º 6
0
class TestOptimizerCreation(unittest.TestCase):
    def __init__(self, *args, **kwargs):
        super(TestOptimizerCreation, self).__init__(*args, **kwargs)

        self.space = [{
            'name': 'var_1',
            'type': 'continuous',
            'domain': (-1, 1),
            'dimensionality': 1
        }, {
            'name': 'var_2',
            'type': 'continuous',
            'domain': (-1, 1),
            'dimensionality': 1
        }]
        self.design_space = Design_space(self.space)
        self.f = lambda x: np.sum(np.sin(x))

    def test_invalid_optimizer_name_raises_error(self):
        self.assertRaises(InvalidVariableNameError, choose_optimizer, 'asd',
                          None)

    def test_create_lbfgs_optimizer(self):
        optimizer = choose_optimizer('lbfgs', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)

    def test_create_direct_optimizer(self):
        optimizer = choose_optimizer('DIRECT', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)

    def test_create_cma_optimizer(self):
        optimizer = choose_optimizer('CMA', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)
Ejemplo n.º 7
0
    def test_bounds(self):
        space = [
            {'name': 'var_1', 'type': 'continuous', 'domain':(-3,1), 'dimensionality': 1},
            {'name': 'var_2', 'type': 'discrete', 'domain': (0,1,2,3)},
            {'name': 'var_3', 'type': 'categorical', 'domain': (2, 4)}
        ]

        design_space = Design_space(space)
        bounds = design_space.get_bounds()

        # Countinuous variable bound
        self.assertIn((-3, 1), bounds)
        # Discrete variable bound
        self.assertIn((0, 3), bounds)
        # Categorical variable bound
        self.assertIn((0, 1), bounds)
Ejemplo n.º 8
0
    def setUp(self):
        np.random.seed(123)
        domain          = [{'name': 'var1', 'type': 'continuous', 'domain': (-5, 5), 'dimensionality': 5}]
        space           = Design_space(domain)
        func            = alpine1(input_dim=5, bounds=space.get_bounds())
        bo              = BayesianOptimization(f=func.f, domain=domain)
        context         = {'var1_1': 0.3, 'var1_2': 0.4}
        context_manager = ContextManager(space, context)
        x0              = np.array([[0, 0, 0, 0, 0]])

        # initialize the model in a least intrusive way possible
        bo.suggest_next_locations()

        f = bo.acquisition.acquisition_function
        f_df = bo.acquisition.acquisition_function_withGradients
        self.problem_with_context = OptimizationWithContext(x0=x0, f=f, df=None, f_df=f_df, context_manager=context_manager)
        self.x = np.array([[3, -3, 3]])