Ejemplo n.º 1
0
class TPESampler(Sampler):
    def __init__(self, optimize_mode='minimize'):
        self.tpe_tuner = HyperoptTuner('tpe', optimize_mode)
        self.cur_sample = None
        self.index = None
        self.total_parameters = {}

    def update_sample_space(self, sample_space):
        search_space = {}
        for i, each in enumerate(sample_space):
            search_space[str(i)] = {'_type': 'choice', '_value': each}
        self.tpe_tuner.update_search_space(search_space)

    def generate_samples(self, model_id):
        self.cur_sample = self.tpe_tuner.generate_parameters(model_id)
        self.total_parameters[model_id] = self.cur_sample
        self.index = 0

    def receive_result(self, model_id, result):
        self.tpe_tuner.receive_trial_result(model_id,
                                            self.total_parameters[model_id],
                                            result)

    def choice(self, candidates, mutator, model, index):
        chosen = self.cur_sample[str(self.index)]
        self.index += 1
        return chosen
Ejemplo n.º 2
0
class TPESampler(Sampler):
    def __init__(self, optimize_mode='minimize'):
        # Move import here to eliminate some warning messages about dill.
        from nni.algorithms.hpo.hyperopt_tuner import HyperoptTuner

        self.tpe_tuner = HyperoptTuner('tpe', optimize_mode)
        self.cur_sample: Optional[dict] = None
        self.index: Optional[int] = None
        self.total_parameters = {}

    def update_sample_space(self, sample_space):
        search_space = {}
        for i, each in enumerate(sample_space):
            search_space[str(i)] = {'_type': 'choice', '_value': each}
        self.tpe_tuner.update_search_space(search_space)

    def generate_samples(self, model_id):
        self.cur_sample = self.tpe_tuner.generate_parameters(model_id)
        self.total_parameters[model_id] = self.cur_sample
        self.index = 0

    def receive_result(self, model_id, result):
        self.tpe_tuner.receive_trial_result(model_id,
                                            self.total_parameters[model_id],
                                            result)

    def choice(self, candidates, mutator, model, index):
        assert isinstance(self.index, int) and isinstance(
            self.cur_sample, dict)
        chosen = self.cur_sample[str(self.index)]
        self.index += 1
        return chosen
Ejemplo n.º 3
0
    def __init__(self, optimize_mode='minimize'):
        # Move import here to eliminate some warning messages about dill.
        from nni.algorithms.hpo.hyperopt_tuner import HyperoptTuner

        self.tpe_tuner = HyperoptTuner('tpe', optimize_mode)
        self.cur_sample: Optional[dict] = None
        self.index: Optional[int] = None
        self.total_parameters = {}
Ejemplo n.º 4
0
 def test_tpe(self):
     tuner_fn = lambda: HyperoptTuner("tpe")
     self.search_space_test_all(tuner_fn,
                                ignore_types=[
                                    "uniform_equal", "qloguniform_equal",
                                    "loguniform_equal", "quniform_clip_2"
                                ])
     # NOTE: types are ignored because `tpe.py line 465, in adaptive_parzen_normal assert prior_sigma > 0`
     self.import_data_test(tuner_fn)
Ejemplo n.º 5
0
def get_tuner(config: TaskConfig):
    # Users may add their customized Tuners here 
    if config.framework_params['tuner_type'] == 'tpe':
        return HyperoptTuner('tpe'), 'TPE Tuner'

    elif config.framework_params['tuner_type'] == 'random_search':
        return HyperoptTuner('random_search'), 'Random Search Tuner'

    elif config.framework_params['tuner_type'] == 'anneal':
        return HyperoptTuner('anneal'), 'Annealing Tuner'
    
    elif config.framework_params['tuner_type'] == 'evolution':
        return EvolutionTuner(), 'Evolution Tuner'

    elif config.framework_params['tuner_type'] == 'smac':
        return SMACTuner(), 'SMAC Tuner'

    elif config.framework_params['tuner_type'] == 'gp':
        return GPTuner(), 'GP Tuner'

    elif config.framework_params['tuner_type'] == 'metis':
        return MetisTuner(), 'Metis Tuner'

    elif config.framework_params['tuner_type'] == 'hyperband':
        if 'max_resource' in config.framework_params:
            tuner = Hyperband(R=config.framework_params['max_resource'])
        else:
            tuner = Hyperband()
        return tuner, 'Hyperband Advisor'
    
    elif config.framework_params['tuner_type'] == 'bohb':
        if 'max_resource' in config.framework_params:
            tuner = BOHB(max_budget=config.framework_params['max_resource'])     
        else:
            tuner = BOHB(max_budget=60)  
        return tuner, 'BOHB Advisor'
        
    else:
        raise RuntimeError('The requested tuner type in framework.yaml is unavailable.')
Ejemplo n.º 6
0
 def test_tuner_generate(self):
     for algorithm in ["tpe", "random_search", "anneal"]:
         tuner = HyperoptTuner(algorithm)
         choice_list = ["a", "b", 1, 2]
         tuner.update_search_space({
             "a": {
                 "_type": "randint",
                 "_value": [1, 3]
             },
             "b": {
                 "_type": "choice",
                 "_value": choice_list
             }
         })
         for k in range(30):
             # sample multiple times
             param = tuner.generate_parameters(k)
             print(param)
             self.assertIsInstance(param["a"], int)
             self.assertGreaterEqual(param["a"], 1)
             self.assertLessEqual(param["a"], 2)
             self.assertIn(param["b"], choice_list)
Ejemplo n.º 7
0
# FIXME: For demonstration only. It should not be here

from pathlib import Path

from nni.experiment import Experiment
from nni.algorithms.hpo.hyperopt_tuner import HyperoptTuner

tuner = HyperoptTuner('tpe')

search_space = {
    "dropout_rate": { "_type": "uniform", "_value": [0.5, 0.9] },
    "conv_size": { "_type": "choice", "_value": [2, 3, 5, 7] },
    "hidden_size": { "_type": "choice", "_value": [124, 512, 1024] },
    "batch_size": { "_type": "choice", "_value": [16, 32] },
    "learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.01, 0.1] }
}

experiment = Experiment(tuner, 'local')
experiment.config.experiment_name = 'test'
experiment.config.trial_concurrency = 2
experiment.config.max_trial_number = 5
experiment.config.search_space = search_space
experiment.config.trial_command = 'python3 mnist.py'
experiment.config.trial_code_directory = Path(__file__).parent
experiment.config.training_service.use_active_gpu = True

experiment.run(8081)
Ejemplo n.º 8
0
 def test_anneal(self):
     tuner_fn = lambda: HyperoptTuner("anneal")
     self.search_space_test_all(tuner_fn)
     self.import_data_test(tuner_fn)
Ejemplo n.º 9
0
 def test_random_search(self):
     tuner_fn = lambda: HyperoptTuner("random_search")
     self.search_space_test_all(tuner_fn)
     self.import_data_test(tuner_fn)
Ejemplo n.º 10
0
 def __init__(self, optimize_mode='minimize'):
     self.tpe_tuner = HyperoptTuner('tpe', optimize_mode)
     self.cur_sample = None
     self.index = None
     self.total_parameters = {}
Ejemplo n.º 11
0
from pathlib import Path

from nni.experiment import Experiment, RemoteMachineConfig
from nni.algorithms.hpo.hyperopt_tuner import HyperoptTuner

tuner = HyperoptTuner('tpe', optimize_mode='maximize')

search_space = {
    "learning_rate": {
        "_type": "loguniform",
        "_value": [1e-2, 20]
    },
    "output_dropout": {
        "_type": "uniform",
        "_value": [0.3, 0.6]
    },
    "hidden_dropout": {
        "_type": "uniform",
        "_value": [0.4, 0.6]
    },
    "input_dropout": {
        "_type": "uniform",
        "_value": [0.5, 0.7]
    },
    "embedding_dropout": {
        "_type": "uniform",
        "_value": [0, 0.5]
    },
    "weight_dropout": {
        "_type": "uniform",
        "_value": [0.5, 0.8]