def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.int_param('global_step', (0, 1e9)), HyperParameter.float_param( 'initial_gradient_squared_accumulator_value', (1e-6, 1.)), HyperParameter.float_param('l1_regularization_strength', (0., 1.)), HyperParameter.float_param('l2_regularization_strength', (0., 1.)), HyperParameter.categorical_param('use_locking', (True, False)) ] model_initializer = tf.train.AdagradDAOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('nu', (5e-3, 1)), HyperParameter.categorical_param('kernel', ('poly', 'rbf', 'sigmoid')), HyperParameter.int_param('degree', (2, 5)), HyperParameter.float_param('gamma', (3.0517578125e-05, 8)), HyperParameter.float_param('coef0', (-1, 1.)), # HyperParameter.categorical_param('shrinking', (True, False)), HyperParameter.float_param('tol', (1e-5, 1e-1)) ] model_initializer = sklearn.svm.NuSVC super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('learning_rate_power', (-2., -1e-6)), HyperParameter.float_param('initial_accumulator_value', (0., 2.)), HyperParameter.float_param('l1_regularization_strength', (0., 2.)), HyperParameter.float_param('l2_regularization_strength', (0., 2.)), HyperParameter.categorical_param('use_locking', (True, False)), HyperParameter.float_param('l2_shrinkage_regularization_strength', (0., 2.)), ] model_initializer = tf.train.FtrlOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.int_param('n_estimators', (10, 1000)), HyperParameter.int_param('max_depth', (0, 40)), HyperParameter.int_param('min_samples_split', (1, 100)), HyperParameter.int_param('min_samples_leaf', (1, 100)), HyperParameter.categorical_param('max_features', ('sqrt', 'log2', None)), HyperParameter.int_param('max_leaf_nodes', (-1, 100)), HyperParameter.float_param('min_impurity_decrease', (0.0, 100.0)) ] initializer = sklearn.ensemble.RandomForestClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.int_param('max_depth', (0, 60)), HyperParameter.float_param('learning_rate', (0, 1)), HyperParameter.int_param('n_estimators', (50, 1000)), HyperParameter.categorical_param('booster', ('gbtree', 'gblinear', 'dart')), HyperParameter.float_param('gamma', (0, 10000)), HyperParameter.int_param('min_child_weight', (0, 100)), HyperParameter.int_param('max_delta_step', (0, 10)), HyperParameter.float_param('subsample', (0.1, 1)), HyperParameter.float_param('colsample_bytree', (0.1, 1)), HyperParameter.float_param('colsample_bylevel', (0.1, 1)), HyperParameter.float_param('reg_alpha', (0.0, 1e4)), HyperParameter.float_param('reg_lambda', (0.0, 1e4)), HyperParameter.categorical_param( 'tree_method', ('exact', 'approx', 'hist', 'gpu_exact', 'gpu_hist')), HyperParameter.float_param('sketch_eps', (0.003, 1)), HyperParameter.categorical_param('grow_policy', ('depthwise', 'lossguide')), HyperParameter.int_param('max_leaves', (0, 100)), HyperParameter.int_param('max_bin', (20, 2000)), HyperParameter.categorical_param('sample_type', ('uniform', 'weighted')), HyperParameter.categorical_param('normalize_type', ('tree', 'forest')), HyperParameter.float_param('rate_drop', (0, 1)), HyperParameter.float_param('skip_drop', (0, 1)), HyperParameter.categorical_param('updater', ('shotgun', 'coord_descent')), HyperParameter.categorical_param( 'feature_selector', ('cyclic', 'shuffle', 'random', 'greedy', 'thrifty')) ] initializer = XGBClassifier super().__init__(hp_space, initializer)