Beispiel #1
0
    def __init__(self, optimize_mode='minimize'):
        # Move import here to eliminate some warning messages about dill.
        from nni.algorithms.hpo.hyperopt_tuner import HyperoptTuner

        self.tpe_tuner = HyperoptTuner('tpe', optimize_mode)
        self.cur_sample: Optional[dict] = None
        self.index: Optional[int] = None
        self.total_parameters = {}
Beispiel #2
0
 def test_tpe(self):
     tuner_fn = lambda: HyperoptTuner("tpe")
     self.search_space_test_all(tuner_fn,
                                ignore_types=[
                                    "uniform_equal", "qloguniform_equal",
                                    "loguniform_equal", "quniform_clip_2"
                                ])
     # NOTE: types are ignored because `tpe.py line 465, in adaptive_parzen_normal assert prior_sigma > 0`
     self.import_data_test(tuner_fn)
Beispiel #3
0
def get_tuner(config: TaskConfig):
    # Users may add their customized Tuners here 
    if config.framework_params['tuner_type'] == 'tpe':
        return HyperoptTuner('tpe'), 'TPE Tuner'

    elif config.framework_params['tuner_type'] == 'random_search':
        return HyperoptTuner('random_search'), 'Random Search Tuner'

    elif config.framework_params['tuner_type'] == 'anneal':
        return HyperoptTuner('anneal'), 'Annealing Tuner'
    
    elif config.framework_params['tuner_type'] == 'evolution':
        return EvolutionTuner(), 'Evolution Tuner'

    elif config.framework_params['tuner_type'] == 'smac':
        return SMACTuner(), 'SMAC Tuner'

    elif config.framework_params['tuner_type'] == 'gp':
        return GPTuner(), 'GP Tuner'

    elif config.framework_params['tuner_type'] == 'metis':
        return MetisTuner(), 'Metis Tuner'

    elif config.framework_params['tuner_type'] == 'hyperband':
        if 'max_resource' in config.framework_params:
            tuner = Hyperband(R=config.framework_params['max_resource'])
        else:
            tuner = Hyperband()
        return tuner, 'Hyperband Advisor'
    
    elif config.framework_params['tuner_type'] == 'bohb':
        if 'max_resource' in config.framework_params:
            tuner = BOHB(max_budget=config.framework_params['max_resource'])     
        else:
            tuner = BOHB(max_budget=60)  
        return tuner, 'BOHB Advisor'
        
    else:
        raise RuntimeError('The requested tuner type in framework.yaml is unavailable.')
Beispiel #4
0
 def test_tuner_generate(self):
     for algorithm in ["tpe", "random_search", "anneal"]:
         tuner = HyperoptTuner(algorithm)
         choice_list = ["a", "b", 1, 2]
         tuner.update_search_space({
             "a": {
                 "_type": "randint",
                 "_value": [1, 3]
             },
             "b": {
                 "_type": "choice",
                 "_value": choice_list
             }
         })
         for k in range(30):
             # sample multiple times
             param = tuner.generate_parameters(k)
             print(param)
             self.assertIsInstance(param["a"], int)
             self.assertGreaterEqual(param["a"], 1)
             self.assertLessEqual(param["a"], 2)
             self.assertIn(param["b"], choice_list)
Beispiel #5
0
# FIXME: For demonstration only. It should not be here

from pathlib import Path

from nni.experiment import Experiment
from nni.algorithms.hpo.hyperopt_tuner import HyperoptTuner

tuner = HyperoptTuner('tpe')

search_space = {
    "dropout_rate": { "_type": "uniform", "_value": [0.5, 0.9] },
    "conv_size": { "_type": "choice", "_value": [2, 3, 5, 7] },
    "hidden_size": { "_type": "choice", "_value": [124, 512, 1024] },
    "batch_size": { "_type": "choice", "_value": [16, 32] },
    "learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.01, 0.1] }
}

experiment = Experiment(tuner, 'local')
experiment.config.experiment_name = 'test'
experiment.config.trial_concurrency = 2
experiment.config.max_trial_number = 5
experiment.config.search_space = search_space
experiment.config.trial_command = 'python3 mnist.py'
experiment.config.trial_code_directory = Path(__file__).parent
experiment.config.training_service.use_active_gpu = True

experiment.run(8081)
Beispiel #6
0
 def test_anneal(self):
     tuner_fn = lambda: HyperoptTuner("anneal")
     self.search_space_test_all(tuner_fn)
     self.import_data_test(tuner_fn)
Beispiel #7
0
 def test_random_search(self):
     tuner_fn = lambda: HyperoptTuner("random_search")
     self.search_space_test_all(tuner_fn)
     self.import_data_test(tuner_fn)
Beispiel #8
0
 def __init__(self, optimize_mode='minimize'):
     self.tpe_tuner = HyperoptTuner('tpe', optimize_mode)
     self.cur_sample = None
     self.index = None
     self.total_parameters = {}
Beispiel #9
0
from pathlib import Path

from nni.experiment import Experiment, RemoteMachineConfig
from nni.algorithms.hpo.hyperopt_tuner import HyperoptTuner

tuner = HyperoptTuner('tpe', optimize_mode='maximize')

search_space = {
    "learning_rate": {
        "_type": "loguniform",
        "_value": [1e-2, 20]
    },
    "output_dropout": {
        "_type": "uniform",
        "_value": [0.3, 0.6]
    },
    "hidden_dropout": {
        "_type": "uniform",
        "_value": [0.4, 0.6]
    },
    "input_dropout": {
        "_type": "uniform",
        "_value": [0.5, 0.7]
    },
    "embedding_dropout": {
        "_type": "uniform",
        "_value": [0, 0.5]
    },
    "weight_dropout": {
        "_type": "uniform",
        "_value": [0.5, 0.8]