def __init__(self, R=60, eta=3, optimize_mode='maximize', exec_mode='parallelism'):
        """B = (s_max + 1)R"""
        super(Hyperband, self).__init__()
        self.R = R
        self.eta = eta
        self.brackets = dict()  # dict of Bracket
        self.generated_hyper_configs = []  # all the configs waiting for run
        self.completed_hyper_configs = []  # all the completed configs
        self.s_max = math.floor(math.log(self.R, self.eta) + _epsilon)
        self.curr_s = self.s_max
        self.curr_hb = 0
        self.exec_mode = exec_mode
        self.curr_bracket_id = None

        self.searchspace_json = None
        self.random_state = None
        self.optimize_mode = OptimizeMode(optimize_mode)

        # This is for the case that nnimanager requests trial config, but tuner cannot provide immediately.
        # In this case, tuner increases self.credit to issue a trial config sometime later.
        self.credit = 0

        # record the latest parameter_id of the trial job trial_job_id.
        # if there is no running parameter_id, self.job_id_para_id_map[trial_job_id] == None
        # new trial job is added to this dict and finished trial job is removed from it.
        self.job_id_para_id_map = dict()
Esempio n. 2
0
    def __init__(self, model, config_list, trainer, evaluator, dummy_input, criterion=torch.nn.CrossEntropyLoss(),
                 num_iterations=3, optimize_mode='maximize', base_algo='l1',
                 # SimulatedAnnealing related
                 start_temperature=100, stop_temperature=20, cool_down_rate=0.9, perturbation_magnitude=0.35,
                 # ADMM related
                 admm_num_iterations=30, admm_epochs_per_iteration=5, row=1e-4,
                 experiment_data_dir='./'):
        # original model
        self._model_to_prune = model
        self._base_algo = base_algo

        self._trainer = trainer
        self._criterion = criterion
        self._evaluator = evaluator
        self._dummy_input = dummy_input
        self._num_iterations = num_iterations
        self._optimize_mode = OptimizeMode(optimize_mode)

        # hyper parameters for SA algorithm
        self._start_temperature = start_temperature
        self._stop_temperature = stop_temperature
        self._cool_down_rate = cool_down_rate
        self._perturbation_magnitude = perturbation_magnitude

        # hyper parameters for ADMM algorithm
        self._admm_num_iterations = admm_num_iterations
        self._admm_epochs_per_iteration = admm_epochs_per_iteration
        self._row = row

        # overall pruning rate
        self._sparsity = config_list[0]['sparsity']

        self._experiment_data_dir = experiment_data_dir
        if not os.path.exists(self._experiment_data_dir):
            os.makedirs(self._experiment_data_dir)
Esempio n. 3
0
    def __init__(self,
                 algorithm_name,
                 optimize_mode='minimize',
                 parallel_optimize=False,
                 constant_liar_type='min'):
        """
        Parameters
        ----------
        algorithm_name : str
            algorithm_name includes "tpe", "random_search" and anneal".
        optimize_mode : str
        parallel_optimize : bool
            More detail could reference: docs/en_US/Tuner/HyperoptTuner.md
        constant_liar_type : str
            constant_liar_type including "min", "max" and "mean"
            More detail could reference: docs/en_US/Tuner/HyperoptTuner.md
        """
        self.algorithm_name = algorithm_name
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.json = None
        self.total_data = {}
        self.rval = None
        self.supplement_data_num = 0

        self.parallel = parallel_optimize
        if self.parallel:
            self.CL_rval = None
            self.constant_liar_type = constant_liar_type
            self.running_data = []
            self.optimal_y = None
Esempio n. 4
0
    def __init__(self, model, config_list, evaluator, optimize_mode='maximize', base_algo='l1',
                 start_temperature=100, stop_temperature=20, cool_down_rate=0.9, perturbation_magnitude=0.35, experiment_data_dir='./'):
        # original model
        self._model_to_prune = copy.deepcopy(model)
        self._base_algo = base_algo

        super().__init__(model, config_list)

        self._evaluator = evaluator
        self._optimize_mode = OptimizeMode(optimize_mode)

        # hyper parameters for SA algorithm
        self._start_temperature = start_temperature
        self._current_temperature = start_temperature
        self._stop_temperature = stop_temperature
        self._cool_down_rate = cool_down_rate
        self._perturbation_magnitude = perturbation_magnitude

        # overall pruning rate
        self._sparsity = config_list[0]['sparsity']
        # pruning rates of the layers
        self._sparsities = None

        # init current performance & best performance
        self._current_performance = -np.inf
        self._best_performance = -np.inf
        self._best_config_list = []

        self._search_history = []

        self._experiment_data_dir = experiment_data_dir
        if not os.path.exists(self._experiment_data_dir):
            os.makedirs(self._experiment_data_dir)
Esempio n. 5
0
    def __init__(self, optimize_mode="maximize", all_checkpoint_dir=None, population_size=10, factor=0.2,
                 resample_probability=0.25, fraction=0.2):
        self.optimize_mode = OptimizeMode(optimize_mode)
        if all_checkpoint_dir is None:
            all_checkpoint_dir = os.getenv('NNI_CHECKPOINT_DIRECTORY')
            logger.info("Checkpoint dir is set to %s by default.", all_checkpoint_dir)
        self.all_checkpoint_dir = all_checkpoint_dir
        self.population_size = population_size
        self.factor = factor
        self.resample_probability = resample_probability
        self.fraction = fraction
        # defined in trial code
        #self.perturbation_interval = perturbation_interval

        self.population = None
        self.pos = -1
        self.param_ids = []
        self.running = {}
        self.finished = []
        self.credit = 0
        self.finished_trials = 0
        self.epoch = 0

        self.searchspace_json = None
        self.space = None

        self.send_trial_callback = None

        logger.info('PBT tuner initialization')
Esempio n. 6
0
    def __init__(self,
                 model,
                 config_list,
                 short_term_fine_tuner,
                 evaluator,
                 optimize_mode='maximize',
                 base_algo='l1',
                 sparsity_per_iteration=0.05,
                 experiment_data_dir='./'):
        # models used for iterative pruning and evaluation
        self._model_to_prune = copy.deepcopy(model)
        self._base_algo = base_algo

        super().__init__(model, config_list)

        self._short_term_fine_tuner = short_term_fine_tuner
        self._evaluator = evaluator
        self._optimize_mode = OptimizeMode(optimize_mode)

        # hyper parameters for NetAdapt algorithm
        self._sparsity_per_iteration = sparsity_per_iteration

        # overall pruning rate
        self._sparsity = config_list[0]['sparsity']

        # config_list
        self._config_list_generated = []

        self._experiment_data_dir = experiment_data_dir
        if not os.path.exists(self._experiment_data_dir):
            os.makedirs(self._experiment_data_dir)

        self._tmp_model_path = os.path.join(self._experiment_data_dir,
                                            'tmp_model.pth')
Esempio n. 7
0
 def __init__(self,
              optimize_mode="maximize",
              no_resampling=True,
              no_candidates=False,
              selection_num_starting_points=600,
              cold_start_num=10,
              exploration_probability=0.9):
     self.samples_x = []
     self.samples_y = []
     self.samples_y_aggregation = []
     self.total_data = []
     self.space = None
     self.no_resampling = no_resampling
     self.no_candidates = no_candidates
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.key_order = []
     self.cold_start_num = cold_start_num
     self.selection_num_starting_points = selection_num_starting_points
     self.exploration_probability = exploration_probability
     self.minimize_constraints_fun = None
     self.minimize_starting_points = None
     self.supplement_data_num = 0
     # The constration of parameters
     self.x_bounds = []
     # The type of parameters
     self.x_types = []
Esempio n. 8
0
    def __init__(self,
                 optimize_mode,
                 trials_per_update=20,
                 epochs_per_update=4,
                 minibatch_size=4,
                 ent_coef=0.0,
                 lr=3e-4,
                 vf_coef=0.5,
                 max_grad_norm=0.5,
                 gamma=0.99,
                 lam=0.95,
                 cliprange=0.2):
        """
        initialization, PPO model is not initialized here as search space is not received yet.

        Parameters:
        ----------
        optimize_mode:         maximize or minimize
        trials_per_update:     number of trials to have for each model update
        epochs_per_update:     number of epochs to run for each model update
        minibatch_size:        minibatch size (number of trials) for the update
        ent_coef:              policy entropy coefficient in the optimization objective
        lr:                    learning rate of the model (lstm network), constant
        vf_coef:               value function loss coefficient in the optimization objective
        max_grad_norm:         gradient norm clipping coefficient
        gamma:                 discounting factor
        lam:                   advantage estimation discounting factor (lambda in the paper)
        cliprange:             cliprange in the PPO algorithm, constant
        """
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.model_config = ModelConfig()
        self.model = None
        self.search_space = None
        self.running_trials = {
        }  # key: parameter_id, value: actions/states/etc.
        self.inf_batch_size = trials_per_update  # number of trials to generate in one inference
        self.first_inf = True  # indicate whether it is the first time to inference new trials
        self.trials_result = [None for _ in range(self.inf_batch_size)
                              ]  # results of finished trials

        self.credit = 0  # record the unsatisfied trial requests
        self.param_ids = []
        self.finished_trials = 0
        self.chosen_arch_template = {}

        self.actions_spaces = None
        self.actions_to_config = None
        self.full_act_space = None
        self.trials_info = None

        self.all_trials = {
        }  # used to dedup the same trial, key: config, value: final result

        self.model_config.num_envs = self.inf_batch_size
        self.model_config.noptepochs = epochs_per_update
        self.model_config.nminibatches = minibatch_size

        self.send_trial_callback = None
        logger.info('=== finished PPOTuner initialization')
Esempio n. 9
0
 def __init__(self, optimize_mode="maximize", population_size=100, sample_size=25):
     super(RegularizedEvolutionTuner, self).__init__()
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.population_size = population_size
     self.sample_size = sample_size
     self.initial_population = deque()
     self.population = deque()
     self.history = {}
     self.search_space = None
     self._from_initial = {}  # whether the parameter is from initial population
Esempio n. 10
0
    def __init__(
        self,
        task="cv",
        input_width=32,
        input_channel=3,
        n_output_node=10,
        algorithm_name="Bayesian",
        optimize_mode="maximize",
        path="model_path",
        verbose=True,
        beta=Constant.BETA,
        t_min=Constant.T_MIN,
        max_model_size=Constant.MAX_MODEL_SIZE,
        default_model_len=Constant.MODEL_LEN,
        default_model_width=Constant.MODEL_WIDTH,
    ):
        """
        initilizer of the NetworkMorphismTuner.
        """

        if not os.path.exists(path):
            os.makedirs(path)
        self.path = os.path.join(os.getcwd(), path)
        if task == "cv":
            self.generators = [CnnGenerator]
        elif task == "common":
            self.generators = [MlpGenerator]
        else:
            raise NotImplementedError(
                '{} task not supported in List ["cv","common"]')

        self.n_classes = n_output_node
        self.input_shape = (input_width, input_width, input_channel)

        self.t_min = t_min
        self.beta = beta
        self.algorithm_name = algorithm_name
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.json = None
        self.total_data = {}
        self.verbose = verbose
        self.model_count = 0

        self.bo = BayesianOptimizer(self, self.t_min, self.optimize_mode,
                                    self.beta)
        self.training_queue = []
        self.descriptors = []
        self.history = []

        self.max_model_size = max_model_size
        self.default_model_len = default_model_len
        self.default_model_width = default_model_width

        self.search_space = dict()
Esempio n. 11
0
 def __init__(self, optimize_mode="maximize", config_dedup=False):
     self.logger = logger
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.total_data = {}
     self.optimizer = None
     self.smbo_solver = None
     self.first_one = True
     self.update_ss_done = False
     self.loguniform_key = set()
     self.categorical_dict = {}
     self.cs = None
     self.dedup = config_dedup
Esempio n. 12
0
    def __init__(self,
                 optimize_mode="maximize",
                 all_checkpoint_dir=None,
                 population_size=10,
                 factor=0.2,
                 resample_probability=0.25,
                 fraction=0.2):
        """
        Initialization

        Parameters
        ----------
        optimize_mode : str
            maximize or minimize
        all_checkpoint_dir : str
            directory to store training model checkpoint
        population_size : int
            number of trials for each epoch
        factor : float
            factor for perturbation
        resample_probability : float
            probability for resampling
        fraction : float
            fraction for selecting bottom and top trials
        """
        self.optimize_mode = OptimizeMode(optimize_mode)
        if all_checkpoint_dir is None:
            all_checkpoint_dir = os.getenv('NNI_CHECKPOINT_DIRECTORY')
            logger.info("Checkpoint dir is set to %s by default.",
                        all_checkpoint_dir)
        self.all_checkpoint_dir = all_checkpoint_dir
        self.population_size = population_size
        self.factor = factor
        self.resample_probability = resample_probability
        self.fraction = fraction
        # defined in trial code
        #self.perturbation_interval = perturbation_interval

        self.population = None
        self.pos = -1
        self.param_ids = []
        self.running = {}
        self.finished = []
        self.credit = 0
        self.finished_trials = 0
        self.epoch = 0

        self.searchspace_json = None
        self.space = None

        self.send_trial_callback = None

        logger.info('PBT tuner initialization')
Esempio n. 13
0
    def __init__(self, optimize_mode='maximize', population_size=32):
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.population_size = population_size

        self.searchspace_json = None
        self.running_trials = {}
        self.num_running_trials = 0
        self.random_state = None
        self.population = None
        self.space = None
        self.credit = 0  # record the unsatisfied trial requests
        self.send_trial_callback = None
        self.param_ids = deque()
Esempio n. 14
0
 def __init__(self, s, s_max, eta, R, optimize_mode):
     self.bracket_id = s
     self.s_max = s_max
     self.eta = eta
     self.n = math.ceil((s_max + 1) * (eta**s) / (s + 1) - _epsilon) # pylint: disable=invalid-name
     self.r = R / eta**s                     # pylint: disable=invalid-name
     self.i = 0
     self.hyper_configs = []         # [ {id: params}, {}, ... ]
     self.configs_perf = []          # [ {id: [seq, acc]}, {}, ... ]
     self.num_configs_to_run = []    # [ n, n, n, ... ]
     self.num_finished_configs = []  # [ n, n, n, ... ]
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.no_more_trial = False
Esempio n. 15
0
 def __init__(self, optimize_mode):
     """Constructor"""
     self.logger = logging.getLogger(
         self.__module__ + "." + self.__class__.__name__)
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.total_data = {}
     self.optimizer = None
     self.smbo_solver = None
     self.first_one = True
     self.update_ss_done = False
     self.loguniform_key = set()
     self.categorical_dict = {}
     self.cs = None
Esempio n. 16
0
 def __init__(self, algorithm_name, optimize_mode='minimize'):
     """
     Parameters
     ----------
     algorithm_name : str
         algorithm_name includes "tpe", "random_search" and anneal".
     optimize_mode : str
     """
     self.algorithm_name = algorithm_name
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.json = None
     self.total_data = {}
     self.rval = None
     self.supplement_data_num = 0
Esempio n. 17
0
    def __init__(self,
                 optimize_mode="maximize",
                 no_resampling=True,
                 no_candidates=False,
                 selection_num_starting_points=600,
                 cold_start_num=10,
                 exploration_probability=0.9):
        """
        Parameters
        ----------
        optimize_mode : str
            optimize_mode is a string that including two mode "maximize" and "minimize"

        no_resampling : bool
            True or False. Should Metis consider re-sampling as part of the search strategy?
        If you are confident that the training dataset is noise-free, then you do not need re-sampling.

        no_candidates: bool
            True or False. Should Metis suggest parameters for the next benchmark?
        If you do not plan to do more benchmarks, Metis can skip this step.

        selection_num_starting_points: int
            how many times Metis should try to find the global optimal in the search space?
        The higher the number, the longer it takes to output the solution.

        cold_start_num: int
            Metis need some trial result to get cold start. when the number of trial result is less than
        cold_start_num, Metis will randomly sample hyper-parameter for trial.

        exploration_probability: float
            The probability of Metis to select parameter from exploration instead of exploitation.
        """

        self.samples_x = []
        self.samples_y = []
        self.samples_y_aggregation = []
        self.total_data = []
        self.space = None
        self.no_resampling = no_resampling
        self.no_candidates = no_candidates
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.key_order = []
        self.cold_start_num = cold_start_num
        self.selection_num_starting_points = selection_num_starting_points
        self.exploration_probability = exploration_probability
        self.minimize_constraints_fun = None
        self.minimize_starting_points = None
        self.supplement_data_num = 0
        self.x_bounds = []
        self.x_types = []
Esempio n. 18
0
    def __init__(self,
                 optimize_mode='maximize',
                 min_budget=1,
                 max_budget=3,
                 eta=3,
                 min_points_in_model=None,
                 top_n_percent=15,
                 num_samples=64,
                 random_fraction=1 / 3,
                 bandwidth_factor=3,
                 min_bandwidth=1e-3,
                 config_space=None):
        super(BOHB, self).__init__()
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.min_budget = min_budget
        self.max_budget = max_budget
        self.eta = eta
        self.min_points_in_model = min_points_in_model
        self.top_n_percent = top_n_percent
        self.num_samples = num_samples
        self.random_fraction = random_fraction
        self.bandwidth_factor = bandwidth_factor
        self.min_bandwidth = min_bandwidth
        self.config_space = config_space

        # all the configs waiting for run
        self.generated_hyper_configs = []
        # all the completed configs
        self.completed_hyper_configs = []

        self.s_max = math.floor(
            math.log(self.max_budget / self.min_budget, self.eta) + _epsilon)
        # current bracket(s) number
        self.curr_s = self.s_max
        # In this case, tuner increases self.credit to issue a trial config sometime later.
        self.credit = 0
        self.brackets = dict()
        self.search_space = None
        # [key, value] = [parameter_id, parameter]
        self.parameters = dict()

        # config generator
        self.cg = None

        # record the latest parameter_id of the trial job trial_job_id.
        # if there is no running parameter_id, self.job_id_para_id_map[trial_job_id] == None
        # new trial job is added to this dict and finished trial job is removed from it.
        self.job_id_para_id_map = dict()
        # record the unsatisfied parameter request from trial jobs
        self.unsatisfied_jobs = []
Esempio n. 19
0
    def __init__(self, s, s_max, eta, max_budget, optimize_mode):
        self.s = s
        self.s_max = s_max
        self.eta = eta
        self.max_budget = max_budget
        self.optimize_mode = OptimizeMode(optimize_mode)

        self.n = math.ceil((s_max + 1) * eta**s / (s + 1) - _epsilon)
        self.r = max_budget / eta**s
        self.i = 0
        self.hyper_configs = []  # [ {id: params}, {}, ... ]
        self.configs_perf = []  # [ {id: [seq, acc]}, {}, ... ]
        self.num_configs_to_run = []  # [ n, n, n, ... ]
        self.num_finished_configs = []  # [ n, n, n, ... ]
        self.no_more_trial = False
Esempio n. 20
0
 def __init__(self,
              tuun_config,
              optimize_mode="maximize",
              initial_data=None):
     """
     Parameters
     ----------
     tuun_config : dict
         Config to specify Tuun options.
     initial_data : dict
         Dictionary with keys x (list) and y (1D numpy ndarray).
     """
     self._set_tuun(tuun_config)
     self._set_data(initial_data)
     assert isinstance(tuun_config, dict)
     assert optimize_mode in ['minimize', 'maximize']
     self._optimize_mode = OptimizeMode(optimize_mode)
Esempio n. 21
0
    def __init__(self,
                 algorithm_name,
                 optimize_mode='minimize',
                 parallel_optimize=False,
                 constant_liar_type='min'):
        self.algorithm_name = algorithm_name
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.json = None
        self.total_data = {}
        self.rval = None
        self.supplement_data_num = 0

        self.parallel = parallel_optimize
        if self.parallel:
            self.CL_rval = None
            self.constant_liar_type = constant_liar_type
            self.running_data = []
            self.optimal_y = None
Esempio n. 22
0
 def __init__(self, optimize_mode="maximize"):
     """
     Parameters
     ----------
     optimize_mode : str
         Optimize mode, 'maximize' or 'minimize', by default 'maximize'
     """
     self.logger = logging.getLogger(
         self.__module__ + "." + self.__class__.__name__)
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.total_data = {}
     self.optimizer = None
     self.smbo_solver = None
     self.first_one = True
     self.update_ss_done = False
     self.loguniform_key = set()
     self.categorical_dict = {}
     self.cs = None
    def __init__(self, R, eta=3, optimize_mode='maximize'):
        """B = (s_max + 1)R"""
        super(Hyperband, self).__init__()
        self.R = R  # pylint: disable=invalid-name
        self.eta = eta
        self.brackets = dict()  # dict of Bracket
        self.generated_hyper_configs = []  # all the configs waiting for run
        self.completed_hyper_configs = []  # all the completed configs
        self.s_max = math.floor(math.log(self.R, self.eta) + _epsilon)
        self.curr_s = self.s_max

        self.searchspace_json = None
        self.random_state = None
        self.optimize_mode = OptimizeMode(optimize_mode)

        # This is for the case that nnimanager requests trial config, but tuner cannot provide immediately.
        # In this case, tuner increases self.credit to issue a trial config sometime later.
        self.credit = 0
Esempio n. 24
0
    def __init__(self, optimize_mode, population_size=32):
        """
        Parameters
        ----------
        optimize_mode : str
        population_size : int
            initial population size. The larger population size,
        the better evolution performance.
        """
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.population_size = population_size

        self.trial_result = []
        self.searchspace_json = None
        self.total_data = {}
        self.random_state = None
        self.population = None
        self.space = None
Esempio n. 25
0
    def __init__(self, optimize_mode='maximize', feature_percent=0.6):
        """Initlization function
        count : 
        optimize_mode : contains "Maximize" or "Minimize" mode.
        search_space : define which features that tuner need to search
        feature_percent : @mengjiao
        default_space : @mengjiao 
        epoch_importance : @mengjiao
        estimate_sample_prob : @mengjiao
        """
        self.count = -1
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.search_space = None
        self.feature_percent = feature_percent
        self.default_space = []
        self.epoch_importance = []
        self.estimate_sample_prob = None

        logger.debug('init aufo-fe done.')
Esempio n. 26
0
    def __init__(self,
                 optimize_mode,
                 trials_per_update=20,
                 epochs_per_update=4,
                 minibatch_size=4,
                 ent_coef=0.0,
                 lr=3e-4,
                 vf_coef=0.5,
                 max_grad_norm=0.5,
                 gamma=0.99,
                 lam=0.95,
                 cliprange=0.2):
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.model_config = ModelConfig()
        self.model = None
        self.search_space = None
        self.running_trials = {
        }  # key: parameter_id, value: actions/states/etc.
        self.inf_batch_size = trials_per_update  # number of trials to generate in one inference
        self.first_inf = True  # indicate whether it is the first time to inference new trials
        self.trials_result = [None for _ in range(self.inf_batch_size)
                              ]  # results of finished trials

        self.credit = 0  # record the unsatisfied trial requests
        self.param_ids = []
        self.finished_trials = 0
        self.chosen_arch_template = {}

        self.actions_spaces = None
        self.actions_to_config = None
        self.full_act_space = None
        self.trials_info = None

        self.all_trials = {
        }  # used to dedup the same trial, key: config, value: final result

        self.model_config.num_envs = self.inf_batch_size
        self.model_config.noptepochs = epochs_per_update
        self.model_config.nminibatches = minibatch_size

        self.send_trial_callback = None
        logger.info('Finished PPOTuner initialization')
Esempio n. 27
0
    def __init__(self,
                 optimize_mode='maximize',
                 min_budget=1,
                 max_budget=3,
                 eta=3,
                 min_points_in_model=None,
                 top_n_percent=15,
                 num_samples=64,
                 random_fraction=1 / 3,
                 bandwidth_factor=3,
                 min_bandwidth=1e-3):
        super(BOHB, self).__init__()
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.min_budget = min_budget
        self.max_budget = max_budget
        self.eta = eta
        self.min_points_in_model = min_points_in_model
        self.top_n_percent = top_n_percent
        self.num_samples = num_samples
        self.random_fraction = random_fraction
        self.bandwidth_factor = bandwidth_factor
        self.min_bandwidth = min_bandwidth

        # all the configs waiting for run
        self.generated_hyper_configs = []
        # all the completed configs
        self.completed_hyper_configs = []

        self.s_max = math.floor(
            math.log(self.max_budget / self.min_budget, self.eta) + _epsilon)
        # current bracket(s) number
        self.curr_s = self.s_max
        # In this case, tuner increases self.credit to issue a trial config sometime later.
        self.credit = 0
        self.brackets = dict()
        self.search_space = None
        # [key, value] = [parameter_id, parameter]
        self.parameters = dict()

        # config generator
        self.cg = None
Esempio n. 28
0
 def __init__(self, optimize_mode="maximize", config_dedup=False):
     """
     Parameters
     ----------
     optimize_mode : str
         Optimize mode, 'maximize' or 'minimize', by default 'maximize'
     config_dedup : bool
         If True, the tuner will not generate a configuration that has been already generated.
         If False, a configuration may be generated twice, but it is rare for relatively large search space.
     """
     self.logger = logger
     self.optimize_mode = OptimizeMode(optimize_mode)
     self.total_data = {}
     self.optimizer = None
     self.smbo_solver = None
     self.first_one = True
     self.update_ss_done = False
     self.loguniform_key = set()
     self.categorical_dict = {}
     self.cs = None
     self.dedup = config_dedup
Esempio n. 29
0
    def __init__(self, optimize_mode="maximize", population_size=32):
        """
        Parameters
        ----------
        optimize_mode : str, default 'maximize'
        population_size : int
            initial population size. The larger population size,
        the better evolution performance.
        """
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.population_size = population_size

        self.searchspace_json = None
        self.running_trials = {}
        self.num_running_trials = 0
        self.random_state = None
        self.population = None
        self.space = None
        self.credit = 0  # record the unsatisfied trial requests
        self.send_trial_callback = None
        self.param_ids = deque()
Esempio n. 30
0
    def __init__(self,
                 optimize_mode="maximize",
                 utility='ei',
                 kappa=5,
                 xi=0,
                 nu=2.5,
                 alpha=1e-6,
                 cold_start_num=10,
                 selection_num_warm_up=100000,
                 selection_num_starting_points=250):
        self._optimize_mode = OptimizeMode(optimize_mode)

        # utility function related
        self._utility = utility
        self._kappa = kappa
        self._xi = xi

        # target space
        self._space = None

        self._random_state = np.random.RandomState()

        # nu, alpha are GPR related params
        self._gp = GaussianProcessRegressor(kernel=Matern(nu=nu),
                                            alpha=alpha,
                                            normalize_y=True,
                                            n_restarts_optimizer=25,
                                            random_state=self._random_state)
        # num of random evaluations before GPR
        self._cold_start_num = cold_start_num

        # params for acq_max
        self._selection_num_warm_up = selection_num_warm_up
        self._selection_num_starting_points = selection_num_starting_points

        # num of imported data
        self._supplement_data_num = 0