def __init__(self): hp_space = [ HyperParameter.float_param('alpha', (0.0, 10.0)), HyperParameter.float_param('binarize', (0.0, 1.0)) ] initializer = sklearn.naive_bayes.BernoulliNB super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('reg_param', (0., 1.)), HyperParameter.float_param('tol', (1e-5, 1e-3)) ] initializer = sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.categorical_param('use_locking', (True, False)) ] model_initializer = tf.train.GradientDescentOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.int_param('n_estimators', (50, 500)), HyperParameter.float_param('learning_rate', (0.1, 2.)), HyperParameter.categorical_param('algorithm', ('SAMME', 'SAMME.R')), ] initializer = sklearn.ensemble.AdaBoostClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('alpha', (1e-2, 100.0)), HyperParameter.categorical_param('fit_prior', (True, False)) # HyperParameter.float_param('binarize', (0.0, 1.0)) ] initializer = sklearn.naive_bayes.BernoulliNB super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.int_param('n_neighbors', (1, 100)), HyperParameter.categorical_param('weights', ('uniform', 'distance')), HyperParameter.categorical_param('p', (1, 2)) ] initializer = sklearn.neighbors.KNeighborsClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.int_param('n_estimators', (5, 100)), HyperParameter.float_param('max_samples', (0.0, 1.0)), # HyperParameter.float_param('max_features', (0.0, 1.0)) ] initializer = sklearn.ensemble.BaggingClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('momentum', (0., 1.)), HyperParameter.categorical_param('use_locking', (True, False)), HyperParameter.categorical_param('use_nesterov', (True, False)) ] model_initializer = tf.train.MomentumOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('initial_accumulator_value', (1e-6, 2.)), HyperParameter.categorical_param('use_locking', (True, False)) ] model_initializer = tf.train.AdagradOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('l1_regularization_strength', (0., 2.)), HyperParameter.float_param('l2_regularization_strength', (0., 2.)), HyperParameter.categorical_param('use_locking', (True, False)) ] model_initializer = tf.train.ProximalGradientDescentOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('alpha', (1e-2, 1e2)), HyperParameter.int_param('max_iter', (1000, 10000)), HyperParameter.float_param('tol', (1e-4, 1e-2)), HyperParameter.categorical_param('solver', ('svd', 'cholesky', 'lsqr', 'sparse_cg', 'sag', 'saga')) ] initializer = sklearn.linear_model.RidgeClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('beta1', (0., 1. - 1e-6)), HyperParameter.float_param('beta2', (0., 1. - 1e-6)), HyperParameter.float_param('epsilon', (1e-6, 0.1)), HyperParameter.categorical_param('use_locking', (True, False)) ] model_initializer = tf.train.AdamOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('C', (0.1, 10)), HyperParameter.int_param('max_iter', (1000, 10000)), HyperParameter.float_param('tol', (1e-4, 1e-2)), HyperParameter.categorical_param('loss', ('hinge', 'squared_hinge')) ] initializer = sklearn.linear_model.PassiveAggressiveClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.categorical_param('penalty', (None, 'l2', 'l1', 'elasticnet')), HyperParameter.float_param('alpha', (1e-5, 1e-3)), HyperParameter.int_param('max_iter', (1000, 10000)), HyperParameter.float_param('tol', (1e-4, 1e-2)), HyperParameter.float_param('eta0', (0.1, 10)) ] initializer = sklearn.linear_model.Perceptron super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('C', (1e-5, 10)), HyperParameter.categorical_param('fit_intercept', (True,)), HyperParameter.float_param('tol', (1e-5, 1e-1)), HyperParameter.categorical_param('loss', ('hinge', 'squared_hinge')), HyperParameter.categorical_param('average', (False, True)) ] initializer = sklearn.linear_model.PassiveAggressiveClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('radius', (1e-2, 1e3)), HyperParameter.categorical_param('weights', ('uniform', 'distance')), HyperParameter.categorical_param('algorithm', ('ball_tree', 'kd_tree', 'brute')), HyperParameter.int_param('leaf_size', (3, 100)), HyperParameter.int_param('p', (1, 10)) ] initializer = sklearn.neighbors.RadiusNeighborsClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.categorical_param('penalty', ('l1', 'l2')), HyperParameter.categorical_param('loss', ('hinge', 'squared_hinge')), HyperParameter.categorical_param('dual', (True, False)), HyperParameter.float_param('tol', (1e-6, 1e-1)), HyperParameter.float_param('C', (0.03125, 327686)) ] initializer = sklearn.svm.LinearSVC super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('rho', (1e-6, 1.)), #recheck needed HyperParameter.float_param( 'epsilon', (1e-6, 0.1)), #wider range is allowed but not suggested HyperParameter.categorical_param('use_locking', (True, False)) ] model_initializer = tf.train.AdadeltaOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('tol', (1e-6, 1e-1)), HyperParameter.float_param('C', (1e-2, 1e2)), HyperParameter.categorical_param('solver', ('newton-cg', 'lbfgs', 'sag')), HyperParameter.int_param('max_iter', (100, 1000)), HyperParameter.categorical_param('multi_class', ('ovr', 'multinomial')) ] initializer = sklearn.linear_model.LogisticRegression super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.categorical_param('criterion', ('gini', 'entropy')), HyperParameter.int_param('max_depth', (1, 40)), HyperParameter.int_param('min_samples_split', (2, 20)), HyperParameter.int_param('min_samples_leaf', (1, 20)), HyperParameter.categorical_param('max_features', ('sqrt', 'log2', None)), # HyperParameter.int_param('max_leaf_nodes', (2, 100)), # HyperParameter.float_param('min_impurity_decrease', (0., 100.)), ] model_initializer = sklearn.tree.DecisionTreeClassifier super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('alpha', (1e-2, 100.0)) ] initializer = sklearn.naive_bayes.MultinomialNB super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.int_param('max_iter_predict', (10, 1000)) ] initializer = sklearn.gaussian_process.GaussianProcessClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('var_smoothing', (1e-10, 1e-8)) ] initializer = sklearn.naive_bayes.GaussianNB super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.int_param('n_estimators', (5, 1000)), HyperParameter.categorical_param('criterion', ('gini', 'entropy')), HyperParameter.int_param('max_depth', (2, 40)), HyperParameter.int_param('min_samples_split', (1, 100)), HyperParameter.int_param('min_samples_leaf', (1, 100)), HyperParameter.categorical_param('max_features', ('sqrt', 'log2', None)), HyperParameter.int_param('max_leaf_nodes', (-1, 100)), HyperParameter.float_param('min_impurity_decrease', (0.0, 100.0)) ] initializer = sklearn.ensemble.ExtraTreesClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.categorical_param('loss', ('hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron')), HyperParameter.categorical_param( 'penalty', ('none', 'l2', 'l1', 'elasticnet')), HyperParameter.float_param('alpha', (1e-5, 1e-3)), HyperParameter.float_param('l1_ratio', (0.0, 1.0)), HyperParameter.int_param('max_iter', (1000, 10000)), HyperParameter.float_param('tol', (1e-4, 1e-2)), HyperParameter.categorical_param( 'learning_rate', ('constant', 'optimal', 'invscaling', 'adaptive')), HyperParameter.float_param('eta0', (0.0, 10.0)), HyperParameter.float_param('power_t', (0.05, 1)) ] initializer = sklearn.linear_model.SGDClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('nu', (5e-3, 1)), HyperParameter.categorical_param( 'kernel', ('linear', 'poly', 'rbf', 'sigmoid')), HyperParameter.int_param('degree', (1, 30)), HyperParameter.float_param('gamma', (1e-5, 10)), HyperParameter.float_param('coef0', (0., 100.)), HyperParameter.categorical_param('shrinking', (True, False)), HyperParameter.float_param('tol', (1e-5, 1e-1)) ] model_initializer = sklearn.svm.NuSVC super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('learning_rate_power', (-2., -1e-6)), HyperParameter.float_param('initial_accumulator_value', (0., 2.)), HyperParameter.float_param('l1_regularization_strength', (0., 2.)), HyperParameter.float_param('l2_regularization_strength', (0., 2.)), HyperParameter.categorical_param('use_locking', (True, False)), HyperParameter.float_param('l2_shrinkage_regularization_strength', (0., 2.)), ] model_initializer = tf.train.FtrlOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.float_param('decay', (0., 1.)), HyperParameter.float_param('momentum', (0., 1.)), HyperParameter.float_param('epsilon', (1e-6, 0.1)), HyperParameter.categorical_param('use_locking', (True, False)), HyperParameter.categorical_param('centered', (True, False)), ] model_initializer = tf.trainRMSPropOptimizer super().__init__(hp_space, model_initializer)
def __init__(self): hp_space = [ HyperParameter.categorical_param('n_estimators', (100,)), HyperParameter.categorical_param('criterion', ('gini', 'entropy')), HyperParameter.int_param('min_samples_split', (2, 20)), HyperParameter.int_param('min_samples_leaf', (1, 20)), HyperParameter.float_param('max_features', (0., 1.)), HyperParameter.categorical_param('bootstrap', (True, False)), ] initializer = sklearn.ensemble.RandomForestClassifier super().__init__(hp_space, initializer)
def __init__(self): hp_space = [ HyperParameter.float_param('learning_rate', (1e-6, 0.15)), HyperParameter.int_param('global_step', (0, 1e9)), HyperParameter.float_param( 'initial_gradient_squared_accumulator_value', (1e-6, 1.)), HyperParameter.float_param('l1_regularization_strength', (0., 1.)), HyperParameter.float_param('l2_regularization_strength', (0., 1.)), HyperParameter.categorical_param('use_locking', (True, False)) ] model_initializer = tf.train.AdagradDAOptimizer super().__init__(hp_space, model_initializer)