Exemple #1
0
def optimize(scenario, run, forest=False, seed=8, ratio=0.8):
    types, bounds = get_types(scenario.cs, scenario.feature_array)
    rfr = RandomForestWithInstances(types=types, bounds=bounds, instance_features=scenario.feature_array, seed=seed)
    ei = EI(model=rfr)
    if forest:
        optimizer = ForestSearch(ei, scenario.cs, ratio=ratio)
    else:
        optimizer = InterleavedLocalAndRandomSearch(ei, scenario.cs)

    scenario.output_dir = "%s_%s_%d_%lf" % ("./logs/run_", "forest_" if forest else "random_", seed, time.time())
    smac = SMAC(
        scenario=scenario,
        rng=np.random.RandomState(seed),
        model=rfr,
        acquisition_function=ei,
        acquisition_function_optimizer=optimizer,
        tae_runner=run,
    )

    try:
        incumbent = smac.optimize()
    finally:
        incumbent = smac.solver.incumbent

    return smac.get_tae_runner().run(incumbent, 1)[1]
Exemple #2
0
 def test_get_runhistory_and_trajectory_and_tae_runner(self):
     ta = ExecuteTAFuncDict(lambda x: x**2)
     smac = SMAC(tae_runner=ta, scenario=self.scenario)
     self.assertRaises(ValueError, smac.get_runhistory)
     self.assertRaises(ValueError, smac.get_trajectory)
     smac.trajectory = 'dummy'
     self.assertEqual(smac.get_trajectory(), 'dummy')
     smac.runhistory = 'dummy'
     self.assertEqual(smac.get_runhistory(), 'dummy')
     self.assertEqual(smac.get_tae_runner(), ta)
Exemple #3
0
 def bayesian_optimize(self):    
     """conduct Bayesian optimization on the hyperparameters, starting at current values""" 
     if self.algorithm in ['GNB','Perceptron']:
         return self
     else:
         cs = ConfigurationSpace()
         cs.add_hyperparameters(list(getattr(util, self.algorithm + '_range')(self.hyperparameters).values()))
         if self.algorithm == 'kNN':
             if self.hyperparameters['k'] == 1: num = 3
             else: num = 5
         else: num = 10
         scenario = Scenario({'run_obj': 'quality', 'runcount-limit': num, 'cs': cs, 'deterministic': 'true', 'memory_limit': None})
         smac = SMAC(scenario=scenario, rng=np.random.RandomState(100), tae_runner=self.error_function)
         try:
             incumbent = smac.optimize()
         finally:
             incumbent = smac.solver.incumbent
         self.error = smac.get_tae_runner().run(incumbent, 1)[1]
         self.hyperparameters = incumbent.get_dictionary()
         self.bayesian_optimized = True
         return self
scenario = Scenario({"run_obj": "quality",   # we optimize quality (alternative runtime)
                     "runcount-limit": 50,  # maximum number of function evaluations
                     "cs": cs,               # configuration space
                     "deterministic": "true",
                     "memory_limit": 3072,   # adapt this to reasonable value for your hardware
                     "shared_model": True,
                     "input_psmac_dirs": "smac3-output*"
                     })

# To optimize, we pass the function to the SMAC-object
smac = SMAC(scenario=scenario, rng=np.random.RandomState(42),
            tae_runner=rf_from_cfg)

# Example call of the function with default values
# It returns: Status, Cost, Runtime, Additional Infos
def_value = smac.get_tae_runner().run(cs.get_default_configuration(), 1)[1]
print("Value for default configuration: %.2f" % (def_value))

# Start optimization
try:
    incumbent = smac.optimize()
finally:
    incumbent = smac.solver.incumbent

incumbent._values

mlflow.log_param("city",city)
mlflow.log_param("model","rf")
all_feats = ""
for elem in features:
    if all_feats == "":