def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ The Bernoulli NB configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/bernoulli_nb.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('sklearn.naive_bayes.BernoulliNB', seed) # the smoothing parameter is a non-negative float # I will limit it to 1000 and put it on a logarithmic scale. (SF) # Please adjust that, if you know a proper range, this is just a guess. alpha = ConfigSpace.UniformFloatHyperparameter(name='alpha', lower=1e-2, upper=100, default_value=1, log=True) fit_prior = ConfigSpace.CategoricalHyperparameter(name='fit_prior', choices=[True, False], default_value=True) hyperparameters = [alpha, fit_prior] return ConfigSpaceWrapper(cs, hyperparameters, None)
def prepare_classifier(configuration_space_wrapper: ConfigSpaceWrapper, task: openml.tasks.OpenMLTask, run_defaults: bool): configuration_space = configuration_space_wrapper.assemble() data_name = task.get_dataset().name data_qualities = task.get_dataset().qualities data_tuple = (task.task_id, data_name, data_qualities['NumberOfFeatures'], data_qualities['NumberOfInstances']) logging.info('Obtained task %d (%s); %s attributes; %s observations' % data_tuple) # obtain deserialized classifier nominal_indices = task.get_dataset().get_features_by_type( 'nominal', [task.target_name]) numeric_indices = task.get_dataset().get_features_by_type( 'numeric', [task.target_name]) if configuration_space_wrapper.wrapped_in_pipeline: classifier = sklearnbot.sklearn.as_pipeline(configuration_space, numeric_indices, nominal_indices) else: classifier = sklearnbot.sklearn.as_estimator(configuration_space, False) # sample configuration and set hyperparameters if not run_defaults: configuration = configuration_space.sample_configuration(1) logging.info('Configuration: %s' % configuration.get_dictionary()) classifier.set_params(**configuration.get_dictionary()) else: logging.info('Running default configuration') return classifier
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ The SVM configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/libsvm_svc.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('sklearn.svm.SVC', seed) C = ConfigSpace.UniformFloatHyperparameter(name='C', lower=0.03125, upper=32768, log=True, default_value=1.0) kernel = ConfigSpace.CategoricalHyperparameter( name='kernel', choices=['rbf', 'poly', 'sigmoid'], default_value='rbf') degree = ConfigSpace.UniformIntegerHyperparameter(name='degree', lower=1, upper=5, default_value=3) gamma = ConfigSpace.UniformFloatHyperparameter(name='gamma', lower=3.0517578125e-05, upper=8, log=True, default_value=0.1) coef0 = ConfigSpace.UniformFloatHyperparameter(name='coef0', lower=-1, upper=1, default_value=0) shrinking = ConfigSpace.CategoricalHyperparameter(name='shrinking', choices=[True, False], default_value=True) tol = ConfigSpace.UniformFloatHyperparameter(name='tol', lower=1e-5, upper=1e-1, default_value=1e-3, log=True) max_iter = ConfigSpace.UnParametrizedHyperparameter('max_iter', -1) hyperparameters = [ C, kernel, degree, gamma, coef0, shrinking, tol, max_iter ] degree_depends_on_poly = ConfigSpace.EqualsCondition( degree, kernel, 'poly') coef0_condition = ConfigSpace.InCondition(coef0, kernel, ['poly', 'sigmoid']) conditions = [degree_depends_on_poly, coef0_condition] return ConfigSpaceWrapper(cs, hyperparameters, conditions)
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ k-NN search space based on a best effort using the scikit-learn implementation. Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace( 'sklearn.neighbors.KNeighborsClassifier', seed) n_neighbors = ConfigSpace.hyperparameters.UniformIntegerHyperparameter( name='n_neighbors', lower=1, upper=20, default_value=5) weights = ConfigSpace.hyperparameters.CategoricalHyperparameter( name='weights', choices=['uniform', 'distance'], default_value='uniform') algorithm = ConfigSpace.hyperparameters.CategoricalHyperparameter( name='algorithm', choices=['auto', 'ball_tree', 'kd_tree', 'brute'], default_value='auto') leaf_size = ConfigSpace.UniformIntegerHyperparameter(name='leaf_size', lower=1, upper=50, default_value=1) p = ConfigSpace.UniformIntegerHyperparameter(name='p', lower=1, upper=5, default_value=2) metric = ConfigSpace.hyperparameters.CategoricalHyperparameter( name='metric', choices=[ 'euclidean', 'manhattan', 'chebyshev', 'minkowski', 'wminkowski', 'seuclidean', 'mahalanobis' ], default_value='minkowski') hyperparameters = [ n_neighbors, weights, algorithm, leaf_size, p, metric, ] leaf_size_condition = ConfigSpace.InCondition(leaf_size, algorithm, ['ball_tree', 'kd_tree']) conditions = [leaf_size_condition] return ConfigSpaceWrapper(cs, hyperparameters, conditions)
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ The random forest configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/random_forest.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace( 'sklearn.ensemble.RandomForestClassifier', seed) # TODO: parameterize the number of estimators? n_estimators = ConfigSpace.Constant(name='n_estimators', value=100) criterion = ConfigSpace.CategoricalHyperparameter( name='criterion', choices=['gini', 'entropy'], default_value='gini') max_features = ConfigSpace.UniformFloatHyperparameter(name='max_features', lower=0., upper=1., default_value=0.5) # max_depth = ConfigSpace.UnParametrizedHyperparameter( # name='randomforestclassifier__max_depth', value=None) min_samples_split = ConfigSpace.UniformIntegerHyperparameter( name='min_samples_split', lower=2, upper=20, default_value=2) min_samples_leaf = ConfigSpace.UniformIntegerHyperparameter( name='min_samples_leaf', lower=1, upper=20, default_value=1) min_weight_fraction_leaf = ConfigSpace.UnParametrizedHyperparameter( name='min_weight_fraction_leaf', value=0.) # max_leaf_nodes = ConfigSpace.UnParametrizedHyperparameter( # name='randomforestclassifier__max_leaf_nodes', value=None) bootstrap = ConfigSpace.CategoricalHyperparameter(name='bootstrap', choices=[True, False], default_value=True) hyperparameters = [ n_estimators, criterion, max_features, # max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, # max_leaf_nodes, bootstrap ] return ConfigSpaceWrapper(cs, hyperparameters, None)
def run_optimizer_on_task(task_id: int, configuration_space_wrapper: ConfigSpaceWrapper, output_dir: str, upload_and_delete: bool) \ -> typing.Tuple[bool, typing.Optional[int], typing.Optional[str]]: local_run_dir = os.path.join(output_dir, str(task_id), str(uuid.uuid4())) try: # obtain task task = openml.tasks.get_task(task_id) configuration_space = configuration_space_wrapper.assemble() data_name = task.get_dataset().name data_qualities = task.get_dataset().qualities data_tuple = (task.task_id, data_name, data_qualities['NumberOfFeatures'], data_qualities['NumberOfInstances']) logging.info('Obtained task %d (%s); %s attributes; %s observations' % data_tuple) # obtain deserialized classifier nominal_indices = task.get_dataset().get_features_by_type( 'nominal', [task.target_name]) numeric_indices = task.get_dataset().get_features_by_type( 'numeric', [task.target_name]) # obtain prepared classifier optimizer = sklearnbot.sklearn.as_search_cv(configuration_space, nominal_indices, numeric_indices) # invoke OpenML run run = openml.runs.run_model_on_task(optimizer, task) score = run.get_metric_fn(sklearn.metrics.accuracy_score) logging.info('Task %d - %s; Accuracy: %0.2f' % (task_id, task.get_dataset().name, score.mean())) run.to_filesystem(local_run_dir, store_model=False) if upload_and_delete: run = run.publish() shutil.rmtree(local_run_dir) local_run_dir = None return True, run.run_id, local_run_dir except openml.exceptions.OpenMLServerException: traceback.print_exc() return False, None, local_run_dir except openml.exceptions.OpenMLRunsExistError: traceback.print_exc() return False, None, local_run_dir
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ The adaboost configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/adaboost.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace( 'sklearn.ensemble.AdaBoostClassifier', seed, meta={"base_estimator": DecisionTreeClassifier(random_state=0)}) n_estimators = ConfigSpace.hyperparameters.UniformIntegerHyperparameter( name="n_estimators", lower=50, upper=500, default_value=50, log=False) learning_rate = ConfigSpace.hyperparameters.UniformFloatHyperparameter( name="learning_rate", lower=0.01, upper=2, default_value=0.1, log=True) algorithm = ConfigSpace.hyperparameters.CategoricalHyperparameter( name="algorithm", choices=["SAMME.R", "SAMME"], default_value="SAMME.R") max_depth = ConfigSpace.hyperparameters.UniformIntegerHyperparameter( name="base_estimator__max_depth", lower=1, upper=10, default_value=1, log=False) hyperparameters = [n_estimators, learning_rate, algorithm, max_depth] return ConfigSpaceWrapper(cs, hyperparameters, None)
def get_hyperparameter_search_space(seed: int) -> ConfigSpaceWrapper: """ The decision tree configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/decision_tree.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace("sklearn.tree.DecisionTreeClassifier", seed) criterion = ConfigSpace.CategoricalHyperparameter( name='criterion', choices=['gini', 'entropy'], default_value='gini') min_samples_split = ConfigSpace.UniformIntegerHyperparameter( name='min_samples_split', lower=2, upper=20, default_value=2) min_samples_leaf = ConfigSpace.UniformIntegerHyperparameter( name='min_samples_leaf', lower=1, upper=20, default_value=1) min_weight_fraction_leaf = ConfigSpace.Constant( name='min_weight_fraction_leaf', value=0.0) max_features = ConfigSpace.UnParametrizedHyperparameter( name='max_features', value=1.0) min_impurity_decrease = ConfigSpace.UnParametrizedHyperparameter( 'min_impurity_decrease', 0.0) # TODO: max_leaf_nodes one can only be tuned once config space allows for this. hyperparameters = [ criterion, max_features, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, min_impurity_decrease ] return ConfigSpaceWrapper(cs, hyperparameters, None)
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ Gradient Boosting search space based on a best effort using the scikit-learn implementation. Note that for state of the art performance, other packages, such as xgboost, could be preferred. Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace( 'sklearn.ensemble.GradientBoostingClassifier', seed) # fixed to deviance, as exponential requires two classes loss = ConfigSpace.hyperparameters.Constant( name='gradientboostingclassifier__loss', value='deviance') # JvR: changed after conversation with AM on 2019-01-17 learning_rate = ConfigSpace.hyperparameters.UniformFloatHyperparameter( name='learning_rate', lower=0.00001, upper=0.1, default_value=0.0001, log=True) n_estimators = ConfigSpace.hyperparameters.UniformIntegerHyperparameter( name='n_estimators', lower=64, upper=2048, default_value=100, log=True) subsample = ConfigSpace.UniformFloatHyperparameter(name='subsample', lower=0.0, upper=1.0, default_value=1.0) criterion = ConfigSpace.hyperparameters.CategoricalHyperparameter( name='criterion', choices=['friedman_mse', 'mse', 'mae']) min_samples_split = ConfigSpace.hyperparameters.UniformIntegerHyperparameter( name='min_samples_split', lower=2, upper=20, default_value=2) min_samples_leaf = ConfigSpace.hyperparameters.UniformIntegerHyperparameter( name='min_samples_leaf', lower=1, upper=20, default_value=1) # TODO: upper bound? min_weight_fraction_leaf = ConfigSpace.hyperparameters.UniformFloatHyperparameter( name='min_weight_fraction_leaf', lower=0.0, upper=0.5, default_value=0.0) # JvR: changed after conversation with AM on 2019-01-17 max_depth = ConfigSpace.hyperparameters.UniformIntegerHyperparameter( name='max_depth', lower=1, upper=32, default_value=3) # TODO: upper bound? min_impurity_decrease = ConfigSpace.hyperparameters.UniformFloatHyperparameter( name='min_impurity_decrease', lower=0.0, upper=1.0, default_value=0.0) max_features = ConfigSpace.hyperparameters.UniformFloatHyperparameter( name='max_features', lower=0.0, upper=1.0, default_value=0.0) validation_fraction = ConfigSpace.UniformFloatHyperparameter( name='validation_fraction', lower=0, upper=1, default_value=0.1) n_iter_no_change = ConfigSpace.UniformIntegerHyperparameter( name='n_iter_no_change', lower=1, upper=2048, default_value=200) tol = ConfigSpace.UniformFloatHyperparameter(name='tol', lower=1e-5, upper=1e-1, default_value=1e-4, log=True) hyperparameters = [ loss, learning_rate, n_estimators, subsample, criterion, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_depth, min_impurity_decrease, max_features, validation_fraction, n_iter_no_change, tol, ] return ConfigSpaceWrapper(cs, hyperparameters, None)
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ The random forest configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/sgd.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('sklearn.linear_model.SGDClassifier', seed) loss = ConfigSpace.CategoricalHyperparameter(name='loss', choices=[ 'hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron' ], default_value='log') penalty = ConfigSpace.CategoricalHyperparameter( name='penalty', choices=['l1', 'l2', 'elasticnet'], default_value='l2') alpha = ConfigSpace.UniformFloatHyperparameter(name='alpha', lower=1e-7, upper=1e-1, log=True, default_value=0.0001) l1_ratio = ConfigSpace.UniformFloatHyperparameter(name='l1_ratio', lower=1e-9, upper=1, log=True, default_value=0.15) # fit_intercept = ConfigSpace.UnParametrizedHyperparameter(name='fit_intercept', value=True) tol = ConfigSpace.UniformFloatHyperparameter(name='tol', lower=1e-5, upper=1e-1, log=True, default_value=1e-4) epsilon = ConfigSpace.UniformFloatHyperparameter(name='epsilon', lower=1e-5, upper=1e-1, default_value=1e-4, log=True) learning_rate = ConfigSpace.CategoricalHyperparameter( name='learning_rate', choices=['optimal', 'invscaling', 'constant'], default_value='invscaling') eta0 = ConfigSpace.UniformFloatHyperparameter(name='eta0', lower=1e-7, upper=1e-1, default_value=0.01, log=True) power_t = ConfigSpace.UniformFloatHyperparameter('power_t', 1e-5, 1, default_value=0.5) average = ConfigSpace.CategoricalHyperparameter(name='average', choices=[False, True], default_value=False) hyperparameters = [ loss, penalty, alpha, l1_ratio, # fit_intercept, tol, epsilon, learning_rate, eta0, power_t, average ] # TODO MF: add passive/aggressive here, although not properly documented? elasticnet = ConfigSpace.EqualsCondition(l1_ratio, penalty, 'elasticnet') epsilon_condition = ConfigSpace.EqualsCondition(epsilon, loss, 'modified_huber') power_t_condition = ConfigSpace.EqualsCondition(power_t, learning_rate, 'invscaling') # eta0 is only relevant if learning_rate!='optimal' according to code # https://github.com/scikit-learn/scikit-learn/blob/0.19.X/sklearn/ # linear_model/sgd_fast.pyx#L603 eta0_in_inv_con = ConfigSpace.InCondition(eta0, learning_rate, ['invscaling', 'constant']) conditions = [ elasticnet, epsilon_condition, power_t_condition, eta0_in_inv_con ] return ConfigSpaceWrapper(cs, hyperparameters, conditions)
def get_hyperparameter_search_space(seed: int) -> ConfigSpaceWrapper: """ The extra trees configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/extra_trees.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace( 'sklearn.ensemble.ExtraTreesClassifier', seed) # TODO: parameterize the number of estimators? n_estimators = ConfigSpace.Constant( name='extratreesclassifier__n_estimators', value=100) criterion = ConfigSpace.CategoricalHyperparameter( name='criterion', choices=['gini', 'entropy'], default_value='gini') # The maximum number of features used in the forest is calculated as m^max_features, where # m is the total number of features, and max_features is the hyperparameter specified below. # The default is 0.5, which yields sqrt(m) features as max_features in the estimator. This # corresponds with Geurts' heuristic. max_features = ConfigSpace.UniformFloatHyperparameter(name='max_features', lower=0., upper=1., default_value=0.5) # max_depth = ConfigSpace.UnParametrizedHyperparameter(name='extratreesclassifier__max_depth', value=None) min_samples_split = ConfigSpace.UniformIntegerHyperparameter( name='min_samples_split', lower=2, upper=20, default_value=2) min_samples_leaf = ConfigSpace.UniformIntegerHyperparameter( name='min_samples_leaf', lower=1, upper=20, default_value=1) min_weight_fraction_leaf = ConfigSpace.UnParametrizedHyperparameter( name='min_weight_fraction_leaf', value=0.) # max_leaf_nodes = ConfigSpace.UnParametrizedHyperparameter(name='max_leaf_nodes', value=None) min_impurity_decrease = ConfigSpace.UnParametrizedHyperparameter( name='min_impurity_decrease', value=0.0) bootstrap = ConfigSpace.CategoricalHyperparameter('bootstrap', [True, False], default_value=False) hyperparameters = [ n_estimators, criterion, max_features, # max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, # max_leaf_nodes, min_impurity_decrease, bootstrap ] return ConfigSpaceWrapper(cs, hyperparameters, None)
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ Neural Network search space based on a best effort using the scikit-learn implementation. Note that for state of the art performance, other packages could be preferred. Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('sklearn.neural_network.MLPClassifier', seed) hidden_layer_sizes = ConfigSpace.UniformIntegerHyperparameter( name='hidden_layer_sizes', lower=32, upper=2048, default_value=2048) activation = ConfigSpace.CategoricalHyperparameter( name='activation', choices=['identity', 'logistic', 'tanh', 'relu'], default_value='relu') solver = ConfigSpace.CategoricalHyperparameter( name='solver', choices=['lbfgs', 'sgd', 'adam'], default_value='adam') alpha = ConfigSpace.UniformFloatHyperparameter(name='alpha', lower=1e-5, upper=1e-1, log=True, default_value=1e-4) batch_size = ConfigSpace.UniformIntegerHyperparameter(name='batch_size', lower=32, upper=4096, default_value=200) learning_rate = ConfigSpace.CategoricalHyperparameter( name='learning_rate', choices=['constant', 'invscaling', 'adaptive'], default_value='constant') learning_rate_init = ConfigSpace.UniformFloatHyperparameter( name='learning_rate_init', lower=1e-5, upper=1e-1, log=True, default_value=1e-04) # TODO: Sensible range?? power_t = ConfigSpace.UniformFloatHyperparameter(name='power_t', lower=1e-5, upper=1, log=True, default_value=0.5) max_iter = ConfigSpace.UniformIntegerHyperparameter(name='max_iter', lower=64, upper=1024, default_value=200) shuffle = ConfigSpace.CategoricalHyperparameter(name='shuffle', choices=[True, False], default_value=True) tol = ConfigSpace.UniformFloatHyperparameter(name='tol', lower=1e-5, upper=1e-1, default_value=1e-4, log=True) # TODO: log-scale? momentum = ConfigSpace.UniformFloatHyperparameter(name='momentum', lower=0, upper=1, default_value=0.9) nesterovs_momentum = ConfigSpace.CategoricalHyperparameter( name='nesterovs_momentum', choices=[True, False], default_value=True) early_stopping = ConfigSpace.CategoricalHyperparameter( name='early_stopping', choices=[True, False], default_value=True) validation_fraction = ConfigSpace.UniformFloatHyperparameter( name='validation_fraction', lower=0, upper=1, default_value=0.1) beta_1 = ConfigSpace.UniformFloatHyperparameter(name='beta_1', lower=0, upper=1, default_value=0.9) beta_2 = ConfigSpace.UniformFloatHyperparameter(name='beta_2', lower=0, upper=1, default_value=0.999) n_iter_no_change = ConfigSpace.UniformIntegerHyperparameter( name='n_iter_no_change', lower=1, upper=1024, default_value=200) hyperparameters = [ hidden_layer_sizes, activation, solver, alpha, batch_size, learning_rate, learning_rate_init, power_t, max_iter, shuffle, tol, momentum, nesterovs_momentum, early_stopping, validation_fraction, beta_1, beta_2, n_iter_no_change, ] batch_size_condition = ConfigSpace.InCondition(batch_size, solver, ['sgd', 'adam']) learning_rate_init_condition = ConfigSpace.InCondition( learning_rate_init, solver, ['sgd', 'adam']) power_t_condition = ConfigSpace.EqualsCondition(power_t, solver, 'sgd') shuffle_confition = ConfigSpace.InCondition(shuffle, solver, ['sgd', 'adam']) tol_condition = ConfigSpace.InCondition(tol, learning_rate, ['constant', 'invscaling']) momentum_confition = ConfigSpace.EqualsCondition(momentum, solver, 'sgd') nesterovs_momentum_confition_solver = ConfigSpace.EqualsCondition( nesterovs_momentum, solver, 'sgd') nesterovs_momentum_confition_momentum = ConfigSpace.GreaterThanCondition( nesterovs_momentum, momentum, 0) nesterovs_momentum_conjunstion = ConfigSpace.AndConjunction( nesterovs_momentum_confition_solver, nesterovs_momentum_confition_momentum) early_stopping_condition = ConfigSpace.InCondition(early_stopping, solver, ['sgd', 'adam']) validation_fraction_condition = ConfigSpace.EqualsCondition( validation_fraction, early_stopping, True) beta_1_condition = ConfigSpace.EqualsCondition(beta_1, solver, 'adam') beta_2_condition = ConfigSpace.EqualsCondition(beta_2, solver, 'adam') n_iter_no_change_condition_solver = ConfigSpace.InCondition( n_iter_no_change, solver, ['sgd', 'adam']) conditions = [ batch_size_condition, learning_rate_init_condition, power_t_condition, shuffle_confition, tol_condition, momentum_confition, nesterovs_momentum_conjunstion, early_stopping_condition, validation_fraction_condition, beta_1_condition, beta_2_condition, n_iter_no_change_condition_solver ] return ConfigSpaceWrapper(cs, hyperparameters, conditions)