def go(self):
     self.dimensions = normalize_dimensions(self.dimensions)
     super().go()
    def __init__(self,
                 dimensions,
                 base_estimator='gp',
                 n_random_starts=None,
                 n_initial_points=10,
                 acq_func='gp_hedge',
                 acq_optimizer='auto',
                 random_state=None,
                 acq_func_kwargs=None,
                 acq_optimizer_kwargs=None):
        """This is nearly identical to :meth:`skopt.optimizer.optimizer.Optimizer.__init__`. It is recreated here to use the
        modified :class:`hyperparameter_hunter.space.Space`, rather than the original `skopt` version. This is not an ideal
        solution, and other options are being considered

        Parameters
        ----------
        dimensions: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        base_estimator: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        n_random_starts: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        n_initial_points: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        acq_func: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        acq_optimizer: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        random_state: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        acq_func_kwargs: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`
        acq_optimizer_kwargs: See :class:`skopt.optimizer.optimizer.Optimizer.__init__`"""
        # TODO: Figure out way to override skopt Optimizer's use of skopt Space without having to rewrite __init__
        self.__repeated_ask_kwargs = {}
        self.rng = check_random_state(random_state)

        # Configure acquisition function - Store and create acquisition function set
        self.acq_func = acq_func
        self.acq_func_kwargs = acq_func_kwargs

        allowed_acq_funcs = ['gp_hedge', 'EI', 'LCB', 'PI', 'EIps', 'PIps']
        if self.acq_func not in allowed_acq_funcs:
            raise ValueError(
                F'Expected `acq_func` to be in {allowed_acq_funcs}, got {self.acq_func}'
            )

        # Treat hedging method separately
        if self.acq_func == 'gp_hedge':
            self.cand_acq_funcs_ = ['EI', 'LCB', 'PI']
            self.gains_ = np.zeros(3)
        else:
            self.cand_acq_funcs_ = [self.acq_func]

        if acq_func_kwargs is None:
            acq_func_kwargs = dict()
        self.eta = acq_func_kwargs.get('eta', 1.0)

        # Configure counters of points - Check `n_random_starts` deprecation first
        if n_random_starts is not None:
            warnings.warn((
                '`n_random_starts` will be removed in favour of `n_initial_points`'
            ), DeprecationWarning)
            n_initial_points = n_random_starts
        if n_initial_points < 0:
            raise ValueError(
                F'Expected `n_initial_points` >= 0, got {n_initial_points}')
        self._n_initial_points = n_initial_points
        self.n_initial_points_ = n_initial_points

        # Configure estimator - Build `base_estimator` if doesn't exist
        if isinstance(base_estimator, str):
            base_estimator = cook_estimator(base_estimator,
                                            space=dimensions,
                                            random_state=self.rng.randint(
                                                0,
                                                np.iinfo(np.int32).max))

        # Check if regressor
        if not is_regressor(base_estimator) and base_estimator is not None:
            raise ValueError(
                F'`base_estimator`={base_estimator} must be a regressor')

        # Treat per second acquisition function specially
        is_multi_regressor = isinstance(base_estimator, MultiOutputRegressor)
        if 'ps' in self.acq_func and not is_multi_regressor:
            self.base_estimator_ = MultiOutputRegressor(base_estimator)
        else:
            self.base_estimator_ = base_estimator

        # Configure optimizer - Decide optimizer based on gradient information
        if acq_optimizer == 'auto':
            if has_gradients(self.base_estimator_):
                acq_optimizer = 'lbfgs'
            else:
                acq_optimizer = 'sampling'

        if acq_optimizer not in ['lbfgs', 'sampling']:
            raise ValueError(
                'Expected `acq_optimizer` to be "lbfgs" or "sampling", got {}'.
                format(acq_optimizer))
        if (not has_gradients(self.base_estimator_)
                and acq_optimizer != 'sampling'):
            raise ValueError(
                'The regressor {} should run with `acq_optimizer`="sampling"'.
                format(type(base_estimator)))
        self.acq_optimizer = acq_optimizer

        # Record other arguments
        if acq_optimizer_kwargs is None:
            acq_optimizer_kwargs = dict()

        self.n_points = acq_optimizer_kwargs.get('n_points', 10000)
        self.n_restarts_optimizer = acq_optimizer_kwargs.get(
            'n_restarts_optimizer', 5)
        n_jobs = acq_optimizer_kwargs.get('n_jobs', 1)
        self.n_jobs = n_jobs
        self.acq_optimizer_kwargs = acq_optimizer_kwargs

        # Configure search space - Normalize space if GP regressor
        if isinstance(self.base_estimator_, GaussianProcessRegressor):
            dimensions = normalize_dimensions(dimensions)
        self.space = Space(dimensions)

        # Record categorical and non-categorical indices
        self._cat_inds = []
        self._non_cat_inds = []
        for ind, dim in enumerate(self.space.dimensions):
            if isinstance(dim, Categorical):
                self._cat_inds.append(ind)
            else:
                self._non_cat_inds.append(ind)

        # Initialize storage for optimization
        self.models = []
        self.Xi = []
        self.yi = []

        # Initialize cache for `ask` method responses
        # This ensures that multiple calls to `ask` with n_points set return same sets of points. Reset to {} at call to `tell`
        self.cache_ = {}