Пример #1
0
 def __init__(self, dim=2, bounds=None,
              noise_std=0, random_state=None):
     self.dim = dim
     params = {'x%d' % i: (0, 10, 5) for i in range(1, 1+self.dim)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std, optimal_value=0, random_state=random_state)
Пример #2
0
 def get_cs_bc():
     cs_bc = ConfigurationSpace()
     x0 = UniformFloatHyperparameter("x0", scale1[0], scale1[1])
     # x0 = UniformIntegerHyperparameter("x0", scale1[0], scale1[1])  # test int
     x1 = UniformFloatHyperparameter("x1", scale2[0], scale2[1])
     cs_bc.add_hyperparameters([x0, x1])
     return cs_bc
Пример #3
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x%d' % i: (-1.25, 1.25, 1) for i in [1, 2]}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=-0.072,
                      optimal_point=[(0.84852813, -0.84852813), (-0.84852813, 0.84852813)],
                      random_state=random_state)
Пример #4
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x1': (-10, 0, -5), 'x2': (-6.5, 0, -3.25)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=-106.7645367,
                      optimal_point=[(-3.1302468, -1.5821422)],
                      random_state=random_state)
Пример #5
0
 def __init__(self, noise_std=0, random_state=None):
     lb, ub = -4.5, 4.5
     dim = 2
     params = {'x%d' % i: (lb, ub, (lb + ub)/2)
               for i in range(1, dim+1)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std, optimal_value=0, random_state=random_state)
Пример #6
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x1': (-5, 10, 0),
               'x2': (0, 15, 0)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=0.397887,
                      optimal_point=[(-np.pi, 12.275), (np.pi, 2.275), (9.42478, 2.475)],
                      random_state=random_state)
Пример #7
0
    def __init__(self, noise_std=0, random_state=None):
        self.ref_point = [1864.72022, 11.81993945, 0.2903999384]

        params = {'x%d' % i: (1.0, 3.0) for i in range(1, 6)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std,
                         num_objs=3,
                         random_state=random_state)
Пример #8
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x1': (-15.0, -5.0, -10.0),
               'x2': (-3.0, 3.0, 0)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=0,
                      optimal_point=[(-10.0, 1.0)],
                      random_state=random_state)
Пример #9
0
 def __init__(self, dim: int, num_constraints=0, noise_std=0, random_state=None):
     self.dim = dim
     self.ref_point = [11.0, 11.0]
     params = {'x%d' % i: (0, 1) for i in range(1, dim+1)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      num_objs=2, num_constraints=num_constraints,
                      random_state=random_state)
Пример #10
0
 def __init__(self, dim=2, constrained=False, noise_std=0, random_state=None):
     self.dim = dim
     self.constrained = constrained
     params = {'x%d' % i: (-5.0, 10.0, 2.5) for i in range(1, 1+self.dim)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=0,
                      optimal_point=[tuple(1.0 for _ in range(self.dim))],
                      random_state=random_state)
Пример #11
0
    def __init__(self, noise_std=0, random_state=None):
        self.ref_point = [10.0, 10.0]

        params = {'x1': (0.1, 10.0),
                  'x2': (0.0, 5.0)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std,
                         num_objs=2, num_constraints=2,
                         random_state=random_state)
Пример #12
0
def get_xgboost_config_space(task_type='cls'):
    if task_type == 'cls':
        cs = ConfigurationSpace()
        n_estimators = UniformIntegerHyperparameter("n_estimators",
                                                    100,
                                                    1000,
                                                    q=50,
                                                    default_value=500)
        max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)
        learning_rate = UniformFloatHyperparameter("learning_rate",
                                                   1e-3,
                                                   0.9,
                                                   log=True,
                                                   default_value=0.1)
        min_child_weight = UniformFloatHyperparameter("min_child_weight",
                                                      0,
                                                      10,
                                                      q=0.1,
                                                      default_value=1)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.1,
                                               1,
                                               q=0.1,
                                               default_value=1)
        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.1,
                                                      1,
                                                      q=0.1,
                                                      default_value=1)
        gamma = UniformFloatHyperparameter("gamma",
                                           0,
                                           10,
                                           q=0.1,
                                           default_value=0)
        reg_alpha = UniformFloatHyperparameter("reg_alpha",
                                               0,
                                               10,
                                               q=0.1,
                                               default_value=0)
        reg_lambda = UniformFloatHyperparameter("reg_lambda",
                                                1,
                                                10,
                                                q=0.1,
                                                default_value=1)
        cs.add_hyperparameters([
            n_estimators, max_depth, learning_rate, min_child_weight,
            subsample, colsample_bytree, gamma, reg_alpha, reg_lambda
        ])
        return cs
    elif task_type == 'rgs':
        raise NotImplementedError
    else:
        raise ValueError('Unsupported task type: %s.' % (task_type, ))
Пример #13
0
    def __init__(self, constrained=False, noise_std=0, random_state=None):
        self.ref_point = [18.0, 6.0]
        self.constrained = constrained
        num_constraints = 1 if self.constrained else 0

        params = {'x1': (0, 1, 0.5),
                  'x2': (0, 1, 0.5)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std,
                         num_objs=2,
                         num_constraints=num_constraints,
                         random_state=random_state)
Пример #14
0
 def __init__(self, dim, num_objs=2, num_constraints=0, noise_std=0, random_state=None):
     if dim <= num_objs:
         raise ValueError(
             "dim must be > num_objs, but got %s and %s" % (dim, num_objs)
         )
     self.dim = dim
     self.k = self.dim - num_objs + 1
     self.bounds = [(0.0, 1.0) for _ in range(self.dim)]
     self.ref_point = [self._ref_val for _ in range(num_objs)]
     params = {'x%d' % i: (0, 1, i/dim) for i in range(1, dim+1)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std, num_objs, num_constraints, random_state=random_state)
Пример #15
0
    def __init__(self, dim=2, bounds=None, constrained=False,
                 noise_std=0, random_state=None):
        self.constrained = constrained
        if bounds is None:
            if constrained:
                lb, ub = -5, 10
            else:
                lb, ub = -10, 15
        else:
            lb, ub = bounds

        params = {'x%d' % i: (lb, ub, (lb + ub)/2)
                  for i in range(1, dim+1)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std, optimal_value=0, random_state=random_state)
Пример #16
0
    def get_hyperparameter_search_space(space_size='large'):
        """
            ['n_estimators', 'learning_rate', 'max_depth', 'colsample_bytree', 'gamma',
                'min_child_weight',  'reg_alpha', 'reg_lambda', 'subsample']
        """
        cs = ConfigurationSpace()
        if space_size == 'large':
            n_estimators = UniformIntegerHyperparameter("n_estimators", 100, 1000, q=10, default_value=500)
            learning_rate = UniformFloatHyperparameter("learning_rate", 1e-3, 0.9, log=True, default_value=0.1)
            max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)

            colsample_bytree = UniformFloatHyperparameter("colsample_bytree", 0.1, 1, q=0.1, default_value=1)
            gamma = UniformFloatHyperparameter("gamma", 0, 10, q=0.1, default_value=0)

            min_child_weight = UniformFloatHyperparameter("min_child_weight", 0, 10, q=0.1, default_value=1)
            reg_alpha = UniformFloatHyperparameter("reg_alpha", 0, 10, q=0.1, default_value=0)
            reg_lambda = UniformFloatHyperparameter("reg_lambda", 1, 10, q=0.1, default_value=1)
            subsample = UniformFloatHyperparameter("subsample", 0.1, 1, q=0.1, default_value=1)
        elif space_size == 'medium':
            n_estimators = UniformIntegerHyperparameter("n_estimators", 100, 1000, q=10, default_value=500)
            learning_rate = UniformFloatHyperparameter("learning_rate", 1e-3, 0.9, log=True, default_value=0.1)
            max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)

            colsample_bytree = UniformFloatHyperparameter("colsample_bytree", 0.1, 1, q=0.1, default_value=1)
            gamma = UniformFloatHyperparameter("gamma", 0, 10, q=0.1, default_value=0)
            min_child_weight = UniformFloatHyperparameter("min_child_weight", 0, 10, q=0.1, default_value=1)

            reg_alpha = UnParametrizedHyperparameter("reg_alpha", 0)
            reg_lambda = UnParametrizedHyperparameter("reg_lambda", 1)
            subsample = UnParametrizedHyperparameter("subsample", 1)
        else:
            n_estimators = UniformIntegerHyperparameter("n_estimators", 100, 1000, q=10, default_value=500)
            learning_rate = UniformFloatHyperparameter("learning_rate", 1e-3, 0.9, log=True, default_value=0.1)
            max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)

            colsample_bytree = UnParametrizedHyperparameter("colsample_bytree", 1)
            gamma = UnParametrizedHyperparameter("gamma", 0)
            min_child_weight = UnParametrizedHyperparameter("min_child_weight", 1)

            reg_alpha = UnParametrizedHyperparameter("reg_alpha", 0)
            reg_lambda = UnParametrizedHyperparameter("reg_lambda", 1)
            subsample = UnParametrizedHyperparameter("subsample", 1)

        cs.add_hyperparameters([n_estimators, max_depth, learning_rate, min_child_weight, subsample,
                                colsample_bytree, gamma, reg_alpha, reg_lambda])
        return cs
Пример #17
0
def get_lightgbm_config_space(task_type='cls'):
    if task_type == 'cls':
        cs = ConfigurationSpace()
        n_estimators = UniformIntegerHyperparameter("n_estimators",
                                                    100,
                                                    1000,
                                                    default_value=500,
                                                    q=50)
        num_leaves = UniformIntegerHyperparameter("num_leaves",
                                                  31,
                                                  2047,
                                                  default_value=128)
        max_depth = Constant('max_depth', 15)
        learning_rate = UniformFloatHyperparameter("learning_rate",
                                                   1e-3,
                                                   0.3,
                                                   default_value=0.1,
                                                   log=True)
        min_child_samples = UniformIntegerHyperparameter("min_child_samples",
                                                         5,
                                                         30,
                                                         default_value=20)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.7,
                                               1,
                                               default_value=1,
                                               q=0.1)
        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.7,
                                                      1,
                                                      default_value=1,
                                                      q=0.1)
        cs.add_hyperparameters([
            n_estimators, num_leaves, max_depth, learning_rate,
            min_child_samples, subsample, colsample_bytree
        ])
        return cs
    elif task_type == 'rgs':
        raise NotImplementedError
    else:
        raise ValueError('Unsupported task type: %s.' % (task_type, ))
Пример #18
0
 def get_cs_lightgbm():  # todo q and int for compare?
     cs = ConfigurationSpace()
     n_estimators = UniformFloatHyperparameter("n_estimators",
                                               100,
                                               1000,
                                               default_value=500,
                                               q=50)
     num_leaves = UniformIntegerHyperparameter("num_leaves",
                                               31,
                                               2047,
                                               default_value=128)
     # max_depth = Constant('max_depth', 15)
     learning_rate = UniformFloatHyperparameter("learning_rate",
                                                1e-3,
                                                0.3,
                                                default_value=0.1,
                                                log=True)
     min_child_samples = UniformIntegerHyperparameter("min_child_samples",
                                                      5,
                                                      30,
                                                      default_value=20)
     subsample = UniformFloatHyperparameter("subsample",
                                            0.7,
                                            1,
                                            default_value=1,
                                            q=0.1)
     colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                   0.7,
                                                   1,
                                                   default_value=1,
                                                   q=0.1)
     # cs.add_hyperparameters([n_estimators, num_leaves, max_depth, learning_rate, min_child_samples, subsample,
     #                         colsample_bytree])
     cs.add_hyperparameters([
         n_estimators, num_leaves, learning_rate, min_child_samples,
         subsample, colsample_bytree
     ])
     return cs
Пример #19
0
logging.basicConfig(level=logging.INFO)

# Build Configuration Space which defines all parameters and their ranges
cs = ConfigurationSpace()

# We define a few possible types of SVM-kernels and add them as "kernel" to our cs
kernel = CategoricalHyperparameter("kernel",
                                   ["linear", "rbf", "poly", "sigmoid"],
                                   default_value="poly")
cs.add_hyperparameter(kernel)

# There are some hyperparameters shared by all kernels
C = UniformFloatHyperparameter("C", 0.001, 1000.0, default_value=1.0)
shrinking = CategoricalHyperparameter("shrinking", ["true", "false"],
                                      default_value="true")
cs.add_hyperparameters([C, shrinking])

# Others are kernel-specific, so we can add conditions to limit the searchspace
degree = UniformIntegerHyperparameter(
    "degree", 1, 5, default_value=3)  # Only used by kernel poly
coef0 = UniformFloatHyperparameter("coef0", 0.0, 10.0,
                                   default_value=0.0)  # poly, sigmoid
cs.add_hyperparameters([degree, coef0])
use_degree = InCondition(child=degree, parent=kernel, values=["poly"])
use_coef0 = InCondition(child=coef0, parent=kernel, values=["poly", "sigmoid"])
cs.add_conditions([use_degree, use_coef0])

# This also works for parameters that are a mix of categorical and values from a range of numbers
# For example, gamma can be either "auto" or a fixed float
gamma = CategoricalHyperparameter(
    "gamma", ["auto", "value"],
Пример #20
0
    result = dict()
    result['objs'] = [
        t1 + t2 + t3,
    ]
    result['constraints'] = [
        np.sum((X + 5)**2) - 25,
    ]
    return result


if __name__ == "__main__":
    params = {'float': {'x0': (-10, 0, -5), 'x1': (-6.5, 0, -3.25)}}
    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        UniformFloatHyperparameter(name, *para)
        for name, para in params['float'].items()
    ])

    bo = SMBO(mishra,
              cs,
              num_constraints=1,
              num_objs=1,
              acq_optimizer_type='random_scipy',
              max_runs=50,
              time_limit_per_trial=10,
              task_id='soc')
    history = bo.run()

    print(history)

    history.plot_convergence(true_minimum=-106.7645367)
import numpy as np
import matplotlib.pyplot as plt
from openbox.utils.config_space import ConfigurationSpace, UniformFloatHyperparameter
from openbox.optimizer.parallel_smbo import pSMBO

# Define Configuration Space
config_space = ConfigurationSpace()
x1 = UniformFloatHyperparameter("x1", -5, 10, default_value=0)
x2 = UniformFloatHyperparameter("x2", 0, 15, default_value=0)
config_space.add_hyperparameters([x1, x2])


# Define Objective Function
def branin(config):
    config_dict = config.get_dictionary()
    x1 = config_dict['x1']
    x2 = config_dict['x2']

    a = 1.
    b = 5.1 / (4. * np.pi**2)
    c = 5. / np.pi
    r = 6.
    s = 10.
    t = 1. / (8. * np.pi)
    y = a * (x2 - b * x1**2 + c * x1 - r)**2 + s * (1 - t) * np.cos(x1) + s

    ret = dict(objs=(y, ))
    return ret


# Parallel Evaluation on Local Machine
Пример #22
0
def townsend(config):
    config_dict = config.get_dictionary()
    X = np.array([config_dict['x%d' % i] for i in range(2)])
    res = dict()
    res['objs'] = (-(np.cos(
        (X[0] - 0.1) * X[1])**2 + X[0] * np.sin(3 * X[0] + X[1])), )
    res['constraints'] = (-(-np.cos(1.5 * X[0] + np.pi) * np.cos(1.5 * X[1]) +
                            np.sin(1.5 * X[0] + np.pi) * np.sin(1.5 * X[1])), )
    return res


townsend_params = {'float': {'x0': (-2.25, 2.5, 0), 'x1': (-2.5, 1.75, 0)}}
townsend_cs = ConfigurationSpace()
townsend_cs.add_hyperparameters([
    UniformFloatHyperparameter(name, *para)
    for name, para in townsend_params['float'].items()
])


def mishra(config):
    config_dict = config.get_dictionary()
    X = np.array([config_dict['x%d' % i] for i in range(2)])
    x, y = X[0], X[1]
    t1 = np.sin(y) * np.exp((1 - np.cos(x))**2)
    t2 = np.cos(x) * np.exp((1 - np.sin(y))**2)
    t3 = (x - y)**2

    result = dict()
    result['objs'] = [t1 + t2 + t3]
    result['constraints'] = [np.sum((X + 5)**2) - 25]
    return result
Пример #23
0
    f1 = (px2 - 5.1 / (4 * np.pi**2) * px1**2 + 5 / np.pi * px1 -
          6)**2 + 10 * (1 - 1 / (8 * np.pi)) * np.cos(px1) + 10
    f2 = (1 - np.exp(-1 /
                     (2 * x2))) * (2300 * x1**3 + 1900 * x1**2 + 2092 * x1 +
                                   60) / (100 * x1**3 + 500 * x1**2 + 4 * x1 +
                                          20)
    res['objs'] = [f1, f2]
    res['constraints'] = []
    return res


bc_params = {'float': {'x1': (0, 1, 0.5), 'x2': (0, 1, 0.5)}}
bc_cs = ConfigurationSpace()
bc_cs.add_hyperparameters([
    UniformFloatHyperparameter(e, *bc_params['float'][e])
    for e in bc_params['float']
])
bc_max_hv = 59.36011874867746
bc_ref_point = [18., 6.]

bo = SMBO(branin_currin,
          bc_cs,
          advisor_type='mcadvisor',
          task_id='mcparego',
          num_objs=2,
          acq_type='mcparego',
          ref_point=bc_ref_point,
          max_runs=100,
          random_state=2)
bo.run()
Пример #24
0
    c = 5. / np.pi
    r = 6.
    s = 10.
    t = 1. / (8. * np.pi)
    ret = a * (x2 - b * x1 ** 2 + c * x1 - r) ** 2 + s * (1 - t) * np.cos(x1) + s

    result = dict()
    result['objs'] = (ret, )

    return result


cs = ConfigurationSpace()
x1 = UniformFloatHyperparameter("x1", -5, 10, default_value=0)
x2 = UniformFloatHyperparameter("x2", 0, 15, default_value=0)
cs.add_hyperparameters([x1, x2])

i = 10
bo = SMBO(branin, cs, advisor_type='default', surrogate_type='gp',
          acq_optimizer_type='local_random', initial_runs=3,
          task_id='local_random_bo', random_state=i, max_runs=31, time_limit_per_trial=3, logging_dir='logs')
bo.run()

bo2 = SMBO(branin, cs, advisor_type='default', surrogate_type='gp',
           acq_optimizer_type='random_scipy', initial_runs=3,
           task_id='random_scipy_bo', random_state=i, max_runs=31, time_limit_per_trial=3, logging_dir='logs')
bo2.run()

print(bo.get_incumbent())
print(bo2.get_incumbent())
Пример #25
0
    res['objs'] = [
        -(np.cos((X[0] - 0.1) * X[1])**2 + X[0] * np.sin(3 * X[0] + X[1]))
    ]
    res['constraints'] = [
        -(-np.cos(1.5 * X[0] + np.pi) * np.cos(1.5 * X[1]) +
          np.sin(1.5 * X[0] + np.pi) * np.sin(1.5 * X[1]))
    ]
    return res


# Send task id and config space at register
task_id = time.time()
townsend_params = {'float': {'x1': (-2.25, 2.5, 0), 'x2': (-2.5, 1.75, 0)}}
townsend_cs = ConfigurationSpace()
townsend_cs.add_hyperparameters([
    UniformFloatHyperparameter(e, *townsend_params['float'][e])
    for e in townsend_params['float']
])

max_runs = 50
# Create remote advisor
config_advisor = RemoteAdvisor(config_space=townsend_cs,
                               server_ip='127.0.0.1',
                               port=11425,
                               email='*****@*****.**',
                               password='******',
                               num_constraints=1,
                               max_runs=max_runs,
                               task_name="task_test",
                               task_id=task_id)

# Simulate max_runs iterations
Пример #26
0
 def get_configspace():
     cs = ConfigurationSpace()
     x1 = UniformFloatHyperparameter("x1", -5, 10, default_value=0)
     x2 = UniformFloatHyperparameter("x2", 0, 15, default_value=0)
     cs.add_hyperparameters([x1, x2])
     return cs