Exemplo n.º 1
0
    def __init__(self, experiment, optimizer_params=None):
        """
        Initializes the random search optimizer.

        Parameters
        ----------
        experiment : Experiment
            The experiment representing the current state of the execution.
        optimizer_params : dict, optional
            Dictionary of the optimizer parameters. If None, some standard
            parameters will be assumed.
            Available parameters are
            "random_state" : randomstate, optional
                The random state to use. See numpy random states.

        Raises
        ------
        ValueError
            Iff the experiment is not supported.
        """
        self._logger = logging_utils.get_logger(self)
        self._logger.debug(
            "Initializing random search. experiment is %s,"
            "optimizer_params %s", experiment, optimizer_params)
        if optimizer_params is None:
            optimizer_params = {}
        self.random_state = optimizer_params.get("random_state", None)
        self._logger.debug("Initialized random state to %s", self.random_state)
        Optimizer.__init__(self, experiment, optimizer_params)
Exemplo n.º 2
0
 def __init__(self, experiment, optimizer_params=None):
     """
     Initializes a bayesian optimizer.
     Parameters
     ----------
     experiment : Experiment
         The experiment for which to optimize.
     optimizer_arguments: dict of string keys
         Sets the possible arguments for this optimizer. Available are:
         "initial_random_runs" : int, optional
             The number of initial random runs before using the GP. Default
             is 10.
         "random_state" : scipy random state, optional
             The scipy random state or object to initialize one. Default is
             None.
         "acquisition_hyperparameters" : dict, optional
             dictionary of acquisition-function hyperparameters
         "num_gp_restarts" : int
             GPy's optimization requires restarts to find a good solution.
             This parameter controls this. Default is 10.
         "acquisition" : AcquisitionFunction
             The acquisition function to use. Default is
             ExpectedImprovement.
         "num_precomputed" : int
             The number of points that should be kept precomputed for faster
             multiple workers.
     """
     self._logger = get_logger(self)
     if optimizer_params is None:
         optimizer_params = {}
     self.random_state = optimizer_params.get("random_state", None)
     self.initial_random_runs = optimizer_params.get(
         'initial_random_runs', self.initial_random_runs)
     self.random_state = check_random_state(
         optimizer_params.get('random_state', None))
     self.acquisition_hyperparams = optimizer_params.get(
         'acquisition_hyperparams', None)
     self.num_gp_restarts = optimizer_params.get(
         'num_gp_restarts', self.num_gp_restarts)
     if not isinstance(optimizer_params.get('acquisition'), AcquisitionFunction):
         self.acquisition_function = optimizer_params.get(
             'acquisition', ExpectedImprovement)(self.acquisition_hyperparams)
     else:
         self.acquisition_function = optimizer_params.get("acquisition")
     self.kernel_params = optimizer_params.get("kernel_params", {})
     self.kernel = optimizer_params.get("kernel", "matern52")
     self.random_searcher = RandomSearch(experiment, optimizer_params)
     Optimizer.__init__(self, experiment, optimizer_params)
Exemplo n.º 3
0
    def __init__(self, experiment, optimizer_params=None):
        """
        Initializes the random search optimizer.

        Parameters
        ----------
        experiment : Experiment
            The experiment representing the current state of the execution.
        optimizer_params : dict, optional
            Dictionary of the optimizer parameters. If None, some standard
            parameters will be assumed.
            Available parameters are
            "random_state" : randomstate, optional
                The random state to use. See numpy random states.

        Raises
        ------
        ValueError
            Iff the experiment is not supported.
        """
        if optimizer_params is None:
            optimizer_params = {}
        self.random_state = optimizer_params.get("random_state", None)
        Optimizer.__init__(self, experiment, optimizer_params)
Exemplo n.º 4
0
    def __init__(self, experiment, optimizer_params=None):
        """
        Initializes a bayesian optimizer.
        Parameters
        ----------
        experiment : Experiment
            The experiment for which to optimize.
        optimizer_arguments: dict of string keys
            Sets the possible arguments for this optimizer. Available are:
            "initial_random_runs" : int, optional
                The number of initial random runs before using the GP. Default
                is 10.
            "random_state" : scipy random state, optional
                The scipy random state or object to initialize one. Default is
                None.
            "acquisition_hyperparameters" : dict, optional
                dictionary of acquisition-function hyperparameters
            "num_gp_restarts" : int
                GPy's optimization requires restarts to find a good solution.
                This parameter controls this. Default is 10.
            "acquisition" : AcquisitionFunction
                The acquisition function to use. Default is
                ExpectedImprovement.
            "num_precomputed" : int
                The number of points that should be kept precomputed for faster
                multiple workers.
        """
        self._logger = get_logger(self)
        self._logger.debug(
            "Initializing bayesian optimizer. Experiment is %s,"
            " optimizer_params %s", experiment, optimizer_params)
        if optimizer_params is None:
            optimizer_params = {}

        self.random_state = optimizer_params.get("random_state", None)
        self.initial_random_runs = optimizer_params.get(
            'initial_random_runs', self.initial_random_runs)
        self.random_state = check_random_state(
            optimizer_params.get('random_state', None))
        self.acquisition_hyperparams = optimizer_params.get(
            'acquisition_hyperparams', None)
        self.num_gp_restarts = optimizer_params.get('num_gp_restarts',
                                                    self.num_gp_restarts)

        self._logger.debug(
            "Initialized relevant parameters. "
            "initial_random_runs is %s, random_state is %s, "
            "acquisition_hyperparams %s, num_gp_restarts %s",
            self.initial_random_runs, self.random_state,
            self.acquisition_hyperparams, self.num_gp_restarts)

        if not isinstance(optimizer_params.get('acquisition'),
                          AcquisitionFunction):
            self.acquisition_function = optimizer_params.get(
                "acquisition", ExpectedImprovement)
            self.acquisition_function = check_acquisition(
                acquisition=self.acquisition_function,
                acquisition_params=self.acquisition_hyperparams)
            self._logger.debug(
                "acquisition is no AcquisitionFunction. Set "
                "it to %s", self.acquisition_function)
        else:
            self.acquisition_function = optimizer_params.get("acquisition")
            self._logger.debug(
                "Loaded acquisition function from "
                "optimizer_params. Is %s", self.acquisition_function)
        self.kernel_params = optimizer_params.get("kernel_params", {})
        self.kernel = optimizer_params.get("kernel", "matern52")

        self._logger.debug("Kernel details: Kernel is %s, kernel_params %s",
                           self.kernel, self.kernel_params)

        self.random_searcher = RandomSearch(experiment, optimizer_params)
        self._logger.debug("Initialized required RandomSearcher; is %s",
                           self.random_searcher)
        Optimizer.__init__(self, experiment, optimizer_params)
        self._logger.debug("Finished initializing bayOpt.")