def test_fit(self): auto_xgb_reg = AutoXGBRegressor(cpus_per_trial=2, name="auto_xgb_regressor", tree_method='hist') data, validation_data = get_data() auto_xgb_reg.fit(data=data, validation_data=validation_data, search_space=create_XGB_recipe(), n_sampling=4, epochs=1, metric="mae") best_model = auto_xgb_reg.get_best_model() assert 5 <= best_model.model.n_estimators <= 10 assert 2 <= best_model.model.max_depth <= 5 best_config = auto_xgb_reg.get_best_config() assert all(k in best_config.keys() for k in create_XGB_recipe().keys())
def test_data_creator(self): train_data_creator, val_data_creator = get_data_creators() auto_xgb_reg = AutoXGBRegressor(cpus_per_trial=2, name="auto_xgb_regressor", tree_method='hist') model_search_space = get_xgb_search_space() # todo: change to hp.choice_n search_space = { "features": hp.sample_from( lambda spec: np.random.choice(["f1", "f2", "f3"], size=2)) } search_space.update(model_search_space) auto_xgb_reg.fit(data=train_data_creator, epochs=1, validation_data=val_data_creator, metric="logloss", metric_mode="min", search_space=search_space, n_sampling=2) best_config = auto_xgb_reg.get_best_config() assert all(k in best_config.keys() for k in search_space.keys()) assert len(best_config["features"]) == 2