def test_reproducible_runs(strategy, surrogate):
    # two runs of the optimizer should yield exactly the same results

    optimizer = Optimizer(base_estimator=surrogate(random_state=1),
                          dimensions=[Real(-5.0, 10.0),
                                      Real(0.0, 15.0)],
                          acq_optimizer='sampling',
                          random_state=1)

    points = []
    for i in range(n_steps):
        x = optimizer.ask(n_points, strategy)
        points.append(x)
        optimizer.tell(x, [branin(v) for v in x])

    # the x's should be exaclty as they are in `points`
    optimizer = Optimizer(base_estimator=surrogate(random_state=1),
                          dimensions=[Real(-5.0, 10.0),
                                      Real(0.0, 15.0)],
                          acq_optimizer='sampling',
                          random_state=1)
    for i in range(n_steps):
        x = optimizer.ask(n_points, strategy)

        assert points[i] == x

        optimizer.tell(x, [branin(v) for v in x])
Exemple #2
0
 def train_batchwise(self):
     opt = Optimizer(self.param_ranges,"GP")
     for batch in range(self.n_batches):
         print("starting a batch")
         for start in range(10):
             print("starting a round of optimization")
             self.training_param_range = [start*10, start*10 + 10]
             opt = Optimizer(self.param_ranges[start*10:start*10 + 10],"GP")
             r = opt.run(self.objective,n_iter=self.n_iter)
             self.params[start*10:start*10 + 10] = r.x
def optimize(cfgFilename):
    params = importJsonCfg(cfgFilename)
    BO_params = unpackVariables(params)

    varSpace = BO_params["Variables"]
    opt = Optimizer(varSpace,
                    base_estimator=params["BaseEstimator"],
                    acq_func=params["AcquisitionFunction"],
                    acq_optimizer=params["AcquisitionOptimizer"])

    optMaxIter = int(params["maxOptIter"])

    pointLst = []
    auprcs = []

    if os.path.isfile("tempOpt.txt"):
        pointLst, auprcs = importFromFile(BO_params["VariableNames"],
                                          BO_params["FixedVars"])
        for i in range(0, len(auprcs)):
            opt.tell(pointLst[i], auprcs[i])

    shouldIContinue(pointLst, auprcs, optMaxIter,
                    int(params["EarlyStoppingNBest"]),
                    float(params["EarlyStoppingDelta"]))

    pt = opt.ask()
    pt = convertPoint(pt, BO_params["VariableNames"], BO_params["FixedVars"])
    with open("tempOpt.txt", "a") as fout:
        fout.write(" ".join([str(x) for x in pt]))
Exemple #4
0
def create_guassian_process():
    default_parameters_1 = [1e-3, 3, 200, "block", 10, 'relu', 64]
    default_parameters_2 = [1e-3, 3, 200, "bowtie", 10, 'relu', 64]
    default_parameters_3 = [1e-3, 3, 200, "diamond", 10, 'relu', 64]
    x0 = []
    x0.append(default_parameters_1)
    x0.append(default_parameters_2)
    x0.append(default_parameters_3)
    optimizer = Optimizer(dimensions=dimensions,
                          random_state=1,
                          n_initial_points=3,
                          base_estimator='gp')
    y = Parallel(n_jobs=3)(delayed(fitness)(v) for v in x0)
    optimizer.tell(x0, y)
    for run in range(20):
        x = optimizer.ask(n_points=3)
        y = Parallel(n_jobs=3)(delayed(fitness)(v) for v in x)
        print(str(val) for val in y)
        optimizer.tell(x, y)
    # gp_result = gp_minimize(func=fitness,
    #                         dimensions=dimensions,
    #                         n_calls=12,
    #                         noise= 0.01,
    #                         n_jobs=-1,
    #                         kappa = 5,
    #                         x0=default_parameters)
    results_file = open("df_gp_res.pickle", "wb")
    pickle.dump(optimizer, results_file)
    def __init__(self, num_workers=None, num_samples=None, num_samples_best_eval=None, space=None, train_eval_model: TrainEvalModelFactory=None,
                 params_converter=None):
        self.num_workers = num_workers
        self.num_samples = num_samples
        self.num_samples_best_eval = num_samples_best_eval

        self.optimizer = Optimizer(
            dimensions=space,
            random_state=1
        )
        self.res = None
        self.params_converter = params_converter
        self.train_eval_model = train_eval_model
        self.space = space

        self.best_model_loss = None
        self.best_model_params = None
        self.best_model_dir = None
        self.best_model = None

        os.makedirs('tensorboard', exist_ok=True)

        self.best_model_train_eval = {}
        self.best_model_validation_eval = {}
        self.best_model_test_eval = {}

        self.best_model_train_ll = None
        self.best_model_valid_ll = None
        self.best_model_test_ll = None
Exemple #6
0
    def prepare(self, pipeline_elements: list, maximize_metric: bool):

        self.hyperparameter_list = []
        self.maximize_metric = maximize_metric
        # build space
        space = []
        for pipe_element in pipeline_elements:
            if hasattr(pipe_element, "hyperparameters"):
                for name, value in pipe_element.hyperparameters.items():
                    # if we only have one value we do not need to optimize
                    if isinstance(value, list) and len(value) < 2:
                        self.constant_dictionary[name] = value[0]
                        continue
                    if isinstance(value,
                                  PhotonCategorical) and len(value.values) < 2:
                        self.constant_dictionary[name] = value.values[0]
                        continue
                    skopt_param = self._convert_PHOTON_to_skopt_space(
                        value, name)
                    if skopt_param is not None:
                        space.append(skopt_param)
        if len(space) == 0:
            logger.warn(
                "Did not find any hyperparameters to convert into skopt space")
            self.optimizer = None
        else:
            self.optimizer = Optimizer(
                space,
                "ET",
                acq_func=self.acq_func,
                acq_func_kwargs=self.acq_func_kwargs,
            )
        self.ask = self.ask_generator()
Exemple #7
0
 def __init__(self,
              oracle: BaseConditionalGenerationOracle,
              x: torch.Tensor,
              lr: float = 1.,
              base_estimator="gp",
              acq_func='gp_hedge',
              acq_optimizer="sampling",
              *args,
              **kwargs):
     super().__init__(oracle, x, *args, **kwargs)
     from skopt import Optimizer
     self._x.requires_grad_(True)
     self._lr = lr
     self._alpha_k = self._lr
     self._opt_result = None
     borders = []
     x_step = self._x_step
     if x_step is None:
         x_step = self._lr
     for xi in x.detach().cpu().numpy():
         borders.append((xi - x_step, xi + x_step))
     self._base_optimizer = Optimizer(borders,
                                      base_estimator=base_estimator,
                                      acq_func=acq_func,
                                      acq_optimizer=acq_optimizer,
                                      acq_optimizer_kwargs={"n_jobs": -1})
def test_same_set_of_points_ask(strategy, surrogate):
    """
    For n_points not None, tests whether two consecutive calls to ask
    return the same sets of points.

    Parameters
    ----------
    * `strategy` [string]:
        Name of the strategy to use during optimization.

    * `surrogate` [scikit-optimize surrogate class]:
        A class of the scikit-optimize surrogate used in Optimizer.
    """

    optimizer = Optimizer(base_estimator=surrogate(),
                          dimensions=[Real(-5.0, 10.0),
                                      Real(0.0, 15.0)],
                          acq_optimizer='sampling',
                          random_state=2)

    for i in range(n_steps):
        xa = optimizer.ask(n_points, strategy)
        xb = optimizer.ask(n_points, strategy)
        optimizer.tell(xa, [branin(v) for v in xa])
        assert_equal(xa, xb)  # check if the sets of points generated are equal
Exemple #9
0
def test_dict_list_space_representation():
    """
    Tests whether the conversion of the dictionary and list representation
    of a point from a search space works properly.
    """

    chef_space = {
        'Cooking time': (0, 1200),  # in minutes
        'Main ingredient': [
            'cheese', 'cherimoya', 'chicken', 'chard', 'chocolate', 'chicory'
        ],
        'Secondary ingredient': [
            'love', 'passion', 'dedication'
        ],
        'Cooking temperature': (-273.16, 10000.0)  # in Celsius
    }

    opt = Optimizer(dimensions=dimensions_aslist(chef_space))
    point = opt.ask()

    # check if the back transformed point and original one are equivalent
    assert_equal(
        point,
        point_aslist(chef_space, point_asdict(chef_space, point))
    )
 def __init__(self, configspace, **kwargs):
     super().__init__(
         configspace, reward_attribute=kwargs.get('reward_attribute'))
     self.hp_ordering = configspace.get_hyperparameter_names() # fix order of hyperparams in configspace.
     skopt_hpspace = []
     for hp in self.hp_ordering:
         hp_obj = configspace.get_hyperparameter(hp)
         hp_type = str(type(hp_obj)).lower() # type of hyperparam
         if 'integer' in hp_type:
             hp_dimension = Integer(low=int(hp_obj.lower), high=int(hp_obj.upper),name=hp)
         elif 'float' in hp_type:
             if hp_obj.log: # log10-scale hyperparmeter
                 hp_dimension = Real(low=float(hp_obj.lower), high=float(hp_obj.upper), prior='log-uniform', name=hp)
             else:
                 hp_dimension = Real(low=float(hp_obj.lower), high=float(hp_obj.upper), name=hp)
         elif 'categorical' in hp_type:
             hp_dimension = Categorical(hp_obj.choices, name=hp)
         elif 'ordinal' in hp_type:
             hp_dimension = Categorical(hp_obj.sequence, name=hp)
         else:
             raise ValueError("unknown hyperparameter type: %s" % hp)
         skopt_hpspace.append(hp_dimension)
     skopt_keys = {
         'base_estimator', 'n_random_starts', 'n_initial_points',
         'acq_func', 'acq_optimizer', 'random_state',  'model_queue_size',
         'acq_func_kwargs', 'acq_optimizer_kwargs'}
     skopt_kwargs = self._filter_skopt_kwargs(kwargs, skopt_keys)
     self.bayes_optimizer = Optimizer(
         dimensions=skopt_hpspace, **skopt_kwargs)
def test_constant_liar_runs(strategy, surrogate, acq_func):
    """
    Tests whether the optimizer runs properly during the random
    initialization phase and beyond

    Parameters
    ----------
    * `strategy` [string]:
        Name of the strategy to use during optimization.

    * `surrogate` [scikit-optimize surrogate class]:
        A class of the scikit-optimize surrogate used in Optimizer.
    """
    optimizer = Optimizer(base_estimator=surrogate(),
                          dimensions=[Real(-5.0, 10.0),
                                      Real(0.0, 15.0)],
                          acq_func=acq_func,
                          acq_optimizer='sampling',
                          random_state=0)

    # test arguments check
    assert_raises(ValueError, optimizer.ask, {"strategy": "cl_maen"})
    assert_raises(ValueError, optimizer.ask, {"n_points": "0"})
    assert_raises(ValueError, optimizer.ask, {"n_points": 0})

    for i in range(n_steps):
        x = optimizer.ask(n_points=n_points, strategy=strategy)
        # check if actually n_points was generated
        assert_equal(len(x), n_points)

        if "ps" in acq_func:
            optimizer.tell(x, [[branin(v), 1.1] for v in x])
        else:
            optimizer.tell(x, [branin(v) for v in x])
    def __init__(self, simulation=0):
        self.is_simulation = simulation
        self.exp_state = 0
        try:
            with open('exp_data_incomplete.pkl', 'rb') as f:
                self.opt = pickle.load(f)
        except:
            self.opt = Optimizer(
                base_estimator='GP',
                acq_func='LCB',
                acq_optimizer='auto',
                dimensions=[
                    (0.0,
                     4.0),  # range for param 1 (eg trajectory final height?)
                    (1.0,
                     5.0),  # range for param 2 (eg trajectory final pitch?)
                    (1.0, 5.0),  # range for param 3 (eg audio gain?)
                    (-2.0, 2.0)
                ],  # range for param 4 (eg vibrational state duration?)
                acq_func_kwargs={
                    'kappa': 5
                },  # we should prefer explore. howver, with higher dim it will naturally tend to diversify, so kappa could be decreased
                n_initial_points=10)
            # NB pitching 90 is problematic for the quaternion, debug required. for now, range should be [0-89]

        if not self.is_simulation:
            BaseModule.__init__(self)
def evaluate_optimizer(surrogate, model, dataset, n_calls, random_state):
    """
    Evaluates some estimator for the task of optimization of parameters of some
    model, given limited number of model evaluations.

    Parameters
    ----------
    * `surrogate`:
        Estimator to use for optimization.
    * `model`: scikit-learn estimator.
        sklearn estimator used for parameter tuning.
    * `dataset`: str
        Name of dataset to train ML model on.
    * `n_calls`: int
        Budget of evaluations
    * `random_state`: seed
        Set the random number generator in numpy.

    Returns
    -------
    * `trace`: list of tuples
        (p, f(p), best), where p is a dictionary of the form "param name":value,
        and f(p) is performance achieved by the model for configuration p
        and best is the best value till that index.
        Such a list contains history of execution of optimization.
    """
    # below seed is necessary for processes which fork at the same time
    # so that random numbers generated in processes are different
    np.random.seed(random_state)
    problem = MLBench(model, dataset)
    space = problem.space

    # initialization
    estimator = surrogate(random_state=random_state)
    dimensions_names = sorted(space)
    dimensions = [space[d][0] for d in dimensions_names]
    solver = Optimizer(dimensions, estimator, random_state=random_state)

    trace = []
    best_y = np.inf

    # optimization loop
    for i in range(n_calls):
        point_list = solver.ask()

        # convert list of dimension values to dictionary
        point_dct = dict(zip(dimensions_names, point_list))

        # the result of "evaluate" is accuracy / r^2, which is the more the better
        objective_at_point = -problem.evaluate(point_dct)

        if best_y > objective_at_point:
            best_y = objective_at_point

        # remember the point, objective pair
        trace.append((point_dct, objective_at_point, best_y))
        print("Evaluation no. " + str(i + 1))

        solver.tell(point_list, objective_at_point)
    return trace
Exemple #14
0
 def __init__(self, configspace, **kwargs):
     BaseSearcher.__init__(self, configspace)
     self.hp_ordering = configspace.get_hyperparameter_names(
     )  # fix order of hyperparams in configspace.
     skopt_hpspace = []
     for hp in self.hp_ordering:
         hp_obj = configspace.get_hyperparameter(hp)
         hp_type = str(type(hp_obj)).lower()  # type of hyperparam
         if 'integer' in hp_type:
             hp_dimension = Integer(low=int(hp_obj.lower),
                                    high=int(hp_obj.upper),
                                    name=hp)
         elif 'float' in hp_type:
             if hp_obj.log:  # log10-scale hyperparmeter
                 hp_dimension = Real(low=float(hp_obj.lower),
                                     high=float(hp_obj.upper),
                                     prior='log-uniform',
                                     name=hp)
             else:
                 hp_dimension = Real(low=float(hp_obj.lower),
                                     high=float(hp_obj.upper),
                                     name=hp)
         elif 'categorical' in hp_type:
             hp_dimension = Categorical(hp_obj.choices, name=hp)
         elif 'ordinal' in hp_type:
             hp_dimension = Categorical(hp_obj.sequence, name=hp)
         else:
             raise ValueError("unknown hyperparameter type: %s" % hp)
         skopt_hpspace.append(hp_dimension)
     self.bayes_optimizer = Optimizer(dimensions=skopt_hpspace, **kwargs)
def test_all_points_different(strategy, surrogate):
    """
    Tests whether the parallel optimizer always generates
    different points to evaluate.

    Parameters
    ----------
    * `strategy` [string]:
        Name of the strategy to use during optimization.

    * `surrogate` [scikit-optimize surrogate class]:
        A class of the scikit-optimize surrogate used in Optimizer.
    """
    optimizer = Optimizer(base_estimator=surrogate(),
                          dimensions=[Real(-5.0, 10.0),
                                      Real(0.0, 15.0)],
                          acq_optimizer='sampling',
                          random_state=1)

    tolerance = 1e-3  # distance above which points are assumed same
    for i in range(n_steps):
        x = optimizer.ask(n_points, strategy)
        optimizer.tell(x, [branin(v) for v in x])
        distances = pdist(x)
        assert all(distances > tolerance)
Exemple #16
0
def setup_tune_scheduler():
    from ray.tune.suggest.skopt import SkOptSearch
    from ray.tune.suggest.suggestion import ConcurrencyLimiter
    from skopt import Optimizer

    exp_metrics = workload.exp_metric()

    search_space, dim_names = workload.create_skopt_space()
    algo = ConcurrencyLimiter(
        SkOptSearch(
            Optimizer(search_space),
            dim_names,
            **exp_metrics,
        ),
        81,
    )

    scheduler = FluidBandScheduler(
        max_res=81,
        reduction_factor=3,
        **exp_metrics,
    )
    return dict(
        search_alg=algo,
        scheduler=scheduler,
        trial_executor=MyRayTrialExecutor(),
        resources_per_trial=com.detect_baseline_resource(),
    )
Exemple #17
0
def job(loss):
    scorer = SklearnScorer(
        X, y, words, postfx,
        rules_apply=0.8,
        max_endings=75
    )

    space = {
        'alpha': (0.0001, 1.0, 'log-uniform'),
        'l1_ratio': (0.001, 0.999),
        'loss': [loss],
        'epsilon': (0.001, 10.0, 'log-uniform'),
        'threshold': (0.00001, 0.001, 'log-uniform'),
    }

    opt = Optimizer(point_aslist(space, space))

    for i in range(128):
        p = opt.ask()
        p = point_asdict(space, p)

        f = scorer(p)

        opt.tell(point_aslist(space, p), f)
        print(f)
        print(i, scorer.best_obj, scorer.best_params)

    import json
    json.dump(scorer.result, open(loss + '.json', 'w'), indent=2, sort_keys=True)
    def _set_search_alg(search_alg, search_alg_params, recipe, search_space):
        if search_alg:
            if not isinstance(search_alg, str):
                raise ValueError(f"search_alg should be of type str."
                                 f" Got {search_alg.__class__.__name__}")
            search_alg = search_alg.lower()
            if search_alg not in SEARCH_ALG_ALLOWED:
                raise ValueError(
                    f"search_alg must be one of {SEARCH_ALG_ALLOWED}. "
                    f"Got: {search_alg}")
            if search_alg == "skopt":
                from skopt import Optimizer
                opt_params = recipe.opt_params()
                optimizer = Optimizer(opt_params)
                search_alg_params.update(
                    dict(
                        optimizer=optimizer,
                        parameter_names=list(search_space.keys()),
                    ))
            if search_alg == "bayesopt":
                search_alg_params.update(
                    {"space": recipe.manual_search_space()})

            search_alg_params.update(dict(
                metric="reward_metric",
                mode="max",
            ))
            search_alg = create_searcher(search_alg, **search_alg_params)
        return search_alg
Exemple #19
0
 def get_optimizer(self, cpu_count) -> Optimizer:
     return Optimizer(
         self.hyperopt_space(),
         base_estimator="ET",
         acq_optimizer="auto",
         n_initial_points=30,
         acq_optimizer_kwargs={'n_jobs': cpu_count}
     )
Exemple #20
0
def test_purely_categorical_space():
    # Test reproduces the bug in #908, make sure it doesn't come back
    dims = [Categorical(['a', 'b', 'c']), Categorical(['A', 'B', 'C'])]
    optimizer = Optimizer(dims, n_initial_points=1, random_state=3)

    x = optimizer.ask()
    # before the fix this call raised an exception
    optimizer.tell(x, 1.)
Exemple #21
0
 def get_optimizer(self, cpu_count) -> Optimizer:
     return Optimizer(self.hyperopt_space(),
                      base_estimator="ET",
                      acq_optimizer="auto",
                      n_initial_points=INITIAL_POINTS,
                      acq_optimizer_kwargs={'n_jobs': cpu_count},
                      random_state=self.config.get('hyperopt_random_state',
                                                   None))
Exemple #22
0
    def __init__(self, space, **kwargs):
        super(BayesianOptimizer, self).__init__(space)

        self.optimizer = Optimizer(
            base_estimator=GaussianProcessRegressor(**kwargs),
            dimensions=convert_orion_space_to_skopt_space(space))

        self.strategy = "cl_min"
Exemple #23
0
    def buildModel(self):
        if "cost" in self.metric:
            data = getResult(self.filename, dir='hyperparam_cost/')
        else:
            data = getResult(self.filename, dir='hyperparam/')

        X = list()
        Y = list()
        for type in self.types:
            for size in self.sizes:
                for num in self.number_of_nodes[size]:
                    # x = self.convertToDom([type, size, num])
                    x = type, size, num
                    # print(x)
                    X.append(list(x))
                    # print(x)
                    Y.append(self.getObjective(x))

        # print(X, Y)
        mse = list()
        ae = list()
        mae = list()
        for trials in data['experiments']:
            opt = Optimizer(
                self.domain,
                base_estimator=self.optimizer,
                n_random_starts=0,
                acq_optimizer="sampling",
                acq_func=self.acquisition_method,
                #acq_optimizer_kwargs={'n_points': 100}
            )
            conf = list()
            runtimes = list()
            for trial in trials:
                x = [
                    trial['params']['type'], trial['params']['size'],
                    trial['params']['num']
                ]
                # print(x)
                conf.append(self.convertToDom(x))
                runtimes.append(trial['value'])
                # opt.base_estimator_.fit(opt.space.transform([conf]), [trial['runtime']])
            opt.base_estimator_.fit(opt.space.transform(conf), runtimes)

            yTrue = list()
            yPred = list()
            for x, y in zip(X, Y):
                conf = self.convertToDom(x)
                pred = opt.base_estimator_.predict(opt.space.transform([conf]),
                                                   [y])
                yTrue.append(y)
                yPred.append(pred[0][0])
            mse.append(mean_squared_error(yTrue, yPred))
            ae.append(mean_absolute_error(yTrue, yPred))
            mae.append(median_absolute_error(yTrue, yPred))
            # print(mse)
        rmse = np.sqrt(mse)
        return {'ae': ae, 'mae': mae, 'mse': mse, 'rmse': rmse}
Exemple #24
0
 def _init_optimizer(self, n_calls):
     return Optimizer(
         dimensions=dimensions_aslist(search_space=self.search_space),
         base_estimator=RandomForestRegressor(n_estimators=10),
         n_initial_points=int(n_calls * self.random_ratio),
         acq_func="EI",
         acq_optimizer="sampling",
         acq_optimizer_kwargs=dict(n_points=1000, n_jobs=-1),
         acq_func_kwargs=dict(xi=0.01, kappa=1.96))
Exemple #25
0
 def get_optimizer(self, dimensions: List[Dimension], cpu_count) -> Optimizer:
     return Optimizer(
         dimensions,
         base_estimator="ET",
         acq_optimizer="auto",
         n_initial_points=INITIAL_POINTS,
         acq_optimizer_kwargs={'n_jobs': cpu_count},
         random_state=self.random_state,
     )
def arms_optimizer(nbArms, est):
    return Optimizer(
        [
            list(range(nbArms))  # Categorical dimensions: arm index!
        ],
        est(),
        acq_optimizer="sampling",
        n_random_starts=3 * nbArms  # Sure ?
    )
 def __init__(self, config, w_num, popsize, resample, search_space):
     self.popsize = popsize
     self.resample = resample
     self.optimizer = Optimizer(
         dimensions=[Real(search_space[0], search_space[1])] * w_num,
         random_state=1, # use the same seed for repeatability
         n_initial_points=self.popsize*self.resample, # if self.resample > 1, then we will continue ask-tell cycles self.resample times
         acq_optimizer_kwargs = {'n_points':10000} # 'n_jobs' slows down, 'noise' seem not to be used
     )
Exemple #28
0
    def __init__(self, pipeline_hyperparameter_ranges, random_seed=0):
        """Init SkOptTuner

        Arguments:
            pipeline_hyperparameter_ranges (dict): A set of hyperparameter ranges corresponding to a pipeline's parameters
            random_seed (int): The seed for the random number generator. Defaults to 0.
        """
        super().__init__(pipeline_hyperparameter_ranges, random_seed=random_seed)
        self.opt = Optimizer(self._search_space_ranges, "ET", acq_optimizer="sampling", random_state=random_seed)
 def __init__(self, space, samples, random_state=1):
     super().__init__(space)
     self._num_samples = samples
     self.optimizer = Optimizer(dimensions=space,
                                random_state=1,
                                base_estimator="GP",
                                acq_optimizer="auto",
                                n_initial_points=10)
     self.asked = 0
Exemple #30
0
    def __init__(self, pipeline_hyperparameter_ranges, random_state=0):
        """ Init SkOptTuner

        Arguments:
            pipeline_hyperparameter_ranges (dict): a set of hyperparameter ranges corresponding to a pipeline's parameters
            random_state (int, np.random.RandomState): The random state
        """
        super().__init__(pipeline_hyperparameter_ranges, random_state=random_state)
        self.opt = Optimizer(self._search_space_ranges, "ET", acq_optimizer="sampling", random_state=random_state)