示例#1
0
def gen_noise(dims, points, random_state=None):
    """
    Generates an ndarray representing random noise.

    This array has dims dimensions and points points per dimension. Each
    element is between 0 and 1.

    Parameters
    ----------
    dims : int
        The dimensionality of the noise.
    points : int
        The number of points per dimension.
    random_state : numpy RandomState
        The random state to generate the noise.

    Returns
    -------
    noise_gen : ndarray
        The ndarray containing the noise.
    """
    random_state = check_random_state(random_state)
    dimension_tuple = (points, ) * dims
    noise_gen = random_state.rand(*dimension_tuple)
    return noise_gen
def gen_noise(dims, points, random_state=None):
    """
    Generates an ndarray representing random noise.

    This array has dims dimensions and points points per dimension. Each
    element is between 0 and 1.

    Parameters
    ----------
    dims : int
        The dimensionality of the noise.
    points : int
        The number of points per dimension.
    random_state : numpy RandomState
        The random state to generate the noise.

    Returns
    -------
    noise_gen : ndarray
        The ndarray containing the noise.
    """
    random_state = check_random_state(random_state)
    dimension_tuple = (points,)*dims
    noise_gen = random_state.rand(*dimension_tuple)
    return noise_gen
    def __init__(self, optimizer_arguments=None):
        """
        Initializes a bayesian optimizer.

        Parameters
        ----------
        optimizer_arguments: dict of string keys
            Sets the possible arguments for this optimizer. Available are:
            "initial_random_runs" : int, optional
                The number of initial random runs before using the GP. Default
                is 10.
            "random_state" : scipy random state, optional
                The scipy random state or object to initialize one. Default is
                None.
            "acquisition_hyperparameters" : dict, optional
                dictionary of acquisition-function hyperparameters
            "num_gp_restarts" : int
                GPy's optimization requires restarts to find a good solution.
                This parameter controls this. Default is 10.
            "acquisition" : AcquisitionFunction
                The acquisition function to use. Default is
                ExpectedImprovement.
            "num_precomputed" : int
                The number of points that should be kept precomputed for faster
                multiple workers.
        """
        self.logger = get_logger(self)
        if optimizer_arguments is None:
            optimizer_arguments = {}
        self.initial_random_runs = optimizer_arguments.get(
            'initial_random_runs', self.initial_random_runs)
        self.random_state = check_random_state(
            optimizer_arguments.get('random_state', None))
        self.acquisition_hyperparams = optimizer_arguments.get(
            'acquisition_hyperparams', None)
        self.num_gp_restarts = optimizer_arguments.get('num_gp_restarts',
                                                       self.num_gp_restarts)
        if not isinstance(optimizer_arguments.get('acquisition'),
                          AcquisitionFunction):
            self.acquisition_function = optimizer_arguments.get(
                'acquisition',
                ExpectedImprovement)(self.acquisition_hyperparams)
        else:
            self.acquisition_function = optimizer_arguments.get("acquisition")
        self.kernel_params = optimizer_arguments.get("kernel_params", {})
        self.kernel = optimizer_arguments.get("kernel", "matern52")
        self.random_searcher = RandomSearch(
            {"random_state": self.random_state})

        if mcmc_imported:
            self.mcmc = optimizer_arguments.get("mcmc", False)
        else:
            self.mcmc = False

        self.num_precomputed = optimizer_arguments.get('num_precomputed', 10)
        self.logger.info("Bayesian optimization initialized.")
    def __init__(self, optimizer_arguments=None):
        """
        Initializes a bayesian optimizer.

        Parameters
        ----------
        optimizer_arguments: dict of string keys
            Sets the possible arguments for this optimizer. Available are:
            "initial_random_runs" : int, optional
                The number of initial random runs before using the GP. Default
                is 10.
            "random_state" : scipy random state, optional
                The scipy random state or object to initialize one. Default is
                None.
            "acquisition_hyperparameters" : dict, optional
                dictionary of acquisition-function hyperparameters
            "num_gp_restarts" : int
                GPy's optimization requires restarts to find a good solution.
                This parameter controls this. Default is 10.
            "acquisition" : AcquisitionFunction
                The acquisition function to use. Default is
                ExpectedImprovement.
            "num_precomputed" : int
                The number of points that should be kept precomputed for faster
                multiple workers.
        """
        self.logger = get_logger(self)
        if optimizer_arguments is None:
            optimizer_arguments = {}
        self.initial_random_runs = optimizer_arguments.get(
            'initial_random_runs', self.initial_random_runs)
        self.random_state = check_random_state(
            optimizer_arguments.get('random_state', None))
        self.acquisition_hyperparams = optimizer_arguments.get(
            'acquisition_hyperparams', None)
        self.num_gp_restarts = optimizer_arguments.get(
            'num_gp_restarts', self.num_gp_restarts)
        if not isinstance(optimizer_arguments.get('acquisition'), AcquisitionFunction):
            self.acquisition_function = optimizer_arguments.get(
                'acquisition', ExpectedImprovement)(self.acquisition_hyperparams)
        else:
            self.acquisition_function = optimizer_arguments.get("acquisition")
        self.kernel_params = optimizer_arguments.get("kernel_params", {})
        self.kernel = optimizer_arguments.get("kernel", "matern52")
        self.random_searcher = RandomSearch({"random_state": self.random_state})

        if mcmc_imported:
            self.mcmc = optimizer_arguments.get("mcmc", False)
        else:
            self.mcmc = False

        self.num_precomputed = optimizer_arguments.get('num_precomputed', 10)
        self.logger.info("Bayesian optimization initialized.")
示例#5
0
def demo_branin(steps=50, random_steps=10, cv=5, disable_auto_plot=False):
    logging.basicConfig(level=logging.DEBUG)

    #produce the same random state
    random_state_rs = check_random_state(42)

    param_defs = {
        "x": MinMaxNumericParamDef(-5, 10),
        "y": MinMaxNumericParamDef(0, 15)
    }

    LAss = ValidationLabAssistant(cv=cv, disable_auto_plot=disable_auto_plot)
    LAss.init_experiment("RandomSearch",
                         "RandomSearch",
                         param_defs,
                         minimization=True,
                         optimizer_arguments={"random_state": random_state_rs})

    optimizers = ["RandomSearch", "BayOpt_EI"]
    optimizer_arguments = [{
        "random_state": random_state_rs
    }, {
        "initial_random_runs": random_steps
    }]

    #evaluate random search for 10 steps use these steps as init value for bayesian
    for i in range(random_steps * cv):
        evaluated_candidate = single_branin_evaluation_step(
            LAss, 'RandomSearch')

    #now clone experiment for each optimizer
    for j in range(1, len(optimizers)):
        LAss.clone_experiments_by_name(
            exp_name=optimizers[0],
            new_exp_name=optimizers[j],
            optimizer="BayOpt",
            optimizer_arguments=optimizer_arguments[j])
    logger.info("Random Initialization Phase Finished.")
    logger.info("Competitive Evaluation Phase starts now.")

    #from there on go step by step all models
    for i in range(random_steps * cv, steps * cv):
        for optimizer in optimizers:
            single_branin_evaluation_step(LAss, optimizer)

    #plot results comparatively
    # a very detailed plot containing all points
    LAss.plot_result_per_step(optimizers)

    #a plot only showing the evaluation of the best result
    LAss.plot_validation(optimizers)
示例#6
0
    def _gen_one_candidate(self):
        """
        Generates a single candidate.

        This is done by generating parameter values for each of the
        available parameters.

        Returns
        -------
        candidate : Candidate
            The generated candidate
        """
        self.random_state = check_random_state(self.random_state)
        value_dict = {}
        for key, param_def in self._experiment.parameter_definitions.iteritems():
            value_dict[key] = self._gen_param_val(param_def)
        return Candidate(value_dict)
示例#7
0
    def _gen_one_candidate(self):
        """
        Generates a single candidate.

        This is done by generating parameter values for each of the
        available parameters.

        Returns
        -------
        candidate : Candidate
            The generated candidate
        """
        self._logger.debug("Generating single candidate.")
        self.random_state = check_random_state(self.random_state)
        value_dict = {}
        for key, param_def in self._experiment.parameter_definitions.iteritems(
        ):
            value_dict[key] = self._gen_param_val(param_def)
        generated_candidate = Candidate(value_dict)
        self._logger.debug("Generated candidate: %s", generated_candidate)
        return generated_candidate
示例#8
0
def demo_MNIST_MCMC(steps,
                    random_steps,
                    percentage,
                    cv,
                    plot_at_end=True,
                    disable_auto_plot=False):
    logging.basicConfig(level=logging.DEBUG)
    regressor = SVC(kernel="poly")
    param_defs = {
        "C": MinMaxNumericParamDef(0, 10),
        #"degree": FixedValueParamDef([1, 2, 3]),
        "gamma": MinMaxNumericParamDef(0, 1),
        "coef0": MinMaxNumericParamDef(0, 1)
    }

    random_state_rs = check_random_state(42)

    optimizer_names = ["RandomSearch", "BayOpt_EI", "BayOpt_EI_MCMC"]
    optmizers = ["RandomSearch", "BayOpt", "BayOpt"]
    optimizer_args = [{
        "random_state": random_state_rs
    }, {
        "initial_random_runs": random_steps
    }, {
        "initial_random_runs": random_steps,
        "mcmc": True
    }]

    evaluate_on_mnist(optimizer_names,
                      optmizers,
                      param_defs,
                      optimizer_args,
                      regressor,
                      cv,
                      percentage,
                      steps=steps * cv,
                      random_steps=random_steps,
                      plot_at_end=True,
                      disable_auto_plot=disable_auto_plot)
def demo_MNIST_LibSVM(steps,
                      random_steps,
                      percentage,
                      cv,
                      plot_at_end=True,
                      disable_auto_plot=False):
    logging.basicConfig(level=logging.DEBUG)

    param_defs = {
        "C": MinMaxNumericParamDef(0, 10),
        "gamma": MinMaxNumericParamDef(0, 1),
        "nu": MinMaxNumericParamDef(0, 1)
    }
    random_state_rs = check_random_state(42)

    regressor = libsvm()

    optimizer_names = ["RandomSearch", "BayOpt_EI"]
    optmizers = ["RandomSearch", "BayOpt"]
    optimizer_args = [{
        "random_state": random_state_rs
    }, {
        "initial_random_runs": random_steps
    }]

    evaluate_on_mnist(optimizer_names,
                      optmizers,
                      param_defs,
                      optimizer_args,
                      regressor,
                      cv,
                      percentage,
                      steps=steps,
                      random_steps=random_steps,
                      plot_at_end=plot_at_end,
                      disable_auto_plot=disable_auto_plot)
 def _get_one_candidate(self, experiment):
     self.random_state = check_random_state(self.random_state)
     value_dict = {}
     for key, param_def in experiment.parameter_definitions.iteritems():
         value_dict[key] = self._gen_param_val(param_def)
     return Candidate(value_dict)
示例#11
0
    def __init__(self, experiment, optimizer_params=None):
        """
        Initializes a bayesian optimizer.
        Parameters
        ----------
        experiment : Experiment
            The experiment for which to optimize.
        optimizer_arguments: dict of string keys
            Sets the possible arguments for this optimizer. Available are:
            "initial_random_runs" : int, optional
                The number of initial random runs before using the GP. Default
                is 10.
            "random_state" : scipy random state, optional
                The scipy random state or object to initialize one. Default is
                None.
            "acquisition_hyperparameters" : dict, optional
                dictionary of acquisition-function hyperparameters
            "num_gp_restarts" : int
                GPy's optimization requires restarts to find a good solution.
                This parameter controls this. Default is 10.
            "acquisition" : AcquisitionFunction
                The acquisition function to use. Default is
                ExpectedImprovement.
            "num_precomputed" : int
                The number of points that should be kept precomputed for faster
                multiple workers.
        """
        self._logger = get_logger(self)
        self._logger.debug(
            "Initializing bayesian optimizer. Experiment is %s,"
            " optimizer_params %s", experiment, optimizer_params)
        if optimizer_params is None:
            optimizer_params = {}

        self.random_state = optimizer_params.get("random_state", None)
        self.initial_random_runs = optimizer_params.get(
            'initial_random_runs', self.initial_random_runs)
        self.random_state = check_random_state(
            optimizer_params.get('random_state', None))
        self.acquisition_hyperparams = optimizer_params.get(
            'acquisition_hyperparams', None)
        self.num_gp_restarts = optimizer_params.get('num_gp_restarts',
                                                    self.num_gp_restarts)

        self._logger.debug(
            "Initialized relevant parameters. "
            "initial_random_runs is %s, random_state is %s, "
            "acquisition_hyperparams %s, num_gp_restarts %s",
            self.initial_random_runs, self.random_state,
            self.acquisition_hyperparams, self.num_gp_restarts)

        if not isinstance(optimizer_params.get('acquisition'),
                          AcquisitionFunction):
            self.acquisition_function = optimizer_params.get(
                "acquisition", ExpectedImprovement)
            self.acquisition_function = check_acquisition(
                acquisition=self.acquisition_function,
                acquisition_params=self.acquisition_hyperparams)
            self._logger.debug(
                "acquisition is no AcquisitionFunction. Set "
                "it to %s", self.acquisition_function)
        else:
            self.acquisition_function = optimizer_params.get("acquisition")
            self._logger.debug(
                "Loaded acquisition function from "
                "optimizer_params. Is %s", self.acquisition_function)
        self.kernel_params = optimizer_params.get("kernel_params", {})
        self.kernel = optimizer_params.get("kernel", "matern52")

        self._logger.debug("Kernel details: Kernel is %s, kernel_params %s",
                           self.kernel, self.kernel_params)

        self.random_searcher = RandomSearch(experiment, optimizer_params)
        self._logger.debug("Initialized required RandomSearcher; is %s",
                           self.random_searcher)
        Optimizer.__init__(self, experiment, optimizer_params)
        self._logger.debug("Finished initializing bayOpt.")
示例#12
0
 def _get_one_candidate(self, experiment):
     self.random_state = check_random_state(self.random_state)
     value_dict = {}
     for key, param_def in experiment.parameter_definitions.iteritems():
         value_dict[key] = self._gen_param_val(param_def)
     return Candidate(value_dict)