def get_search_space(): config = ConfigSpace() max_depth = GridHyperparameter(name='max_depth', values=[3, 5]) config.add_hyper([max_depth]) return config.get_hypers()
def get_search_space(): config = ConfigSpace() n_neighbors = GridHyperparameter(name='n_neighbors', values=[3, 5, 7, 10]) config.add_hyper([n_neighbors]) return config.get_hypers()
def get_search_space(): config = ConfigSpace() n_estimator_list = GridHyperparameter(name='n_estimators', values=[100, 300, 500]) config.add_hyper(n_estimator_list) return config.get_hypers()
def get_search_space(): config = ConfigSpace() c_list = UniformHyperparameter(name="C", low=0.1, high=10, size=3) # dual = CategoryHyperparameter(name="dual", categories=[True, False]) # grid = GridHyperparameter(name="C", values=[10, 100]) config.add_hyper([c_list]) return config.get_hypers()
def get_search_space(): config = ConfigSpace() fit_intercept = CategoryHyperparameter(name="fit_intercept", categories=[True, False]) # grid = GridHyperparameter(name="C", values=[1, 2, 3]) config.add_hyper([fit_intercept]) # config.get_hypers() return config.get_hypers()
def get_search_space(): config = ConfigSpace() n_estimators = GridHyperparameter(name='n_estimators', values=[100, 300, 500]) learning_rate = UniformHyperparameter(name='learning_rate', low=0.01, high=1.0, size=2) config.add_hyper([n_estimators, learning_rate]) return config.get_hypers()
def get_search_space(): config = ConfigSpace() # Let's try base estimator with DT, as in real world that also make improvement. base_estimators = GridHyperparameter(name='base_estimator', values=[None, DTRegressor()]) n_estimators = GridHyperparameter(name='n_estimators', values=[50, 70, 100]) learning_rate = UniformHyperparameter(name='learning_rate', low=0.01, high=1.0, size=2) config.add_hyper([n_estimators, learning_rate]) return config.get_hypers()