Esempio n. 1
0
def tune():
    X, y = get_data()

    too = torch.optim.Adam, torch.optim.Adadelta, torch.optim.Adagrad, torch.optim.ASGD
    to = ht.CategoricalParameter('torch_optimizer', options=too)
    eta = ht.ContinuousParameter('eta', lower_bound=1e-10, upper_bound=1e-1)
    mi = ht.DiscreteParameter('max_iter', lower_bound=1e2, upper_bound=1e4)

    hl1 = ht.DiscreteParameter('', lower_bound=10, upper_bound=100)
    hl2 = ht.DiscreteParameter('', lower_bound=10, upper_bound=100)
    hls = ht.TupleParameter('hidden_layer_sizes', values=(hl1, hl2))

    tp1 = ht.CategoricalParameter('', options=(nn.Linear, ))
    tp2 = ht.CategoricalParameter('', options=(nn.Linear, ))
    tp3 = ht.CategoricalParameter('', options=(nn.Linear, ))
    top = ht.TupleParameter('topology', values=(tp1, tp2, tp3))

    hypers = [to, eta, mi, hls, top]

    tuner = ht.HyperTune(algorithm=Net,
                         parameters=hypers,
                         train_func=Net.fit,
                         objective_func=Net.mse,
                         train_func_args=(X, y),
                         objective_func_args=(X, y),
                         max_evals=100,
                         maximize=False,
                         num_replications=1)

    tuner.tune()
    print(tuner.get_results())
    def test_continuous_param(self):
        c = ht.ContinuousParameter('c', lower_bound=0, upper_bound=0)
        self.assertEqual(c.shape, 1)

        r = rand()
        v = c.get_val(r)
        self.assertEqual(v, 0)

        c = ht.ContinuousParameter('c', lower_bound=0, upper_bound=1)
        r = rand()
        v = c.get_val(r)
        exp = (r[0] + 1) / 2
        self.assertEqual(v, exp)

        lb, ub = -100, 500
        c = ht.ContinuousParameter('c', lower_bound=lb, upper_bound=ub)
        r = rand()
        v = c.get_val(r)
        exp = ((r[0] + 1) / 2) * (ub - lb) + lb
        self.assertEqual(v, exp)
    def test_categorical_param(self):
        c = ht.CategoricalParameter('c', options=('a', 'b'))
        self.assertEqual(c.name, 'c')
        self.assertEqual(c.shape, 1)

        v = c.get_val([-1])
        self.assertEqual(v, 'a')

        co = ht.ContinuousParameter('co', lower_bound=0, upper_bound=1)
        con = ht.ConstantParameter('con', value=100)
        c = ht.CategoricalParameter('c', options=('a', co, con))
        self.assertEqual(c.shape, 2)
Esempio n. 4
0
    def test_import(self):
        print('go')

        a = ht.ContinuousParameter('a', lower_bound=0, upper_bound=1)
        hypers = [a]

        gs = ht.optimizers.GridSearch(depth=1, resolution=0.1)
        tuner = ht.HyperTune(algorithm=A,
                             parameters=hypers,
                             optimizer=gs,
                             train_func=A.fit,
                             objective_func=A.acc,
                             max_evals=100,
                             maximize=False,
                             num_replications=1)

        results = tuner.tune()
        print(results)
    def test_tuple_param(self):
        t = ht.TupleParameter('t', values=(1, 1, 1))
        self.assertEqual(t.shape, 0)

        c = ht.ContinuousParameter('c', lower_bound=0, upper_bound=1)
        d = ht.DiscreteParameter('d', lower_bound=0, upper_bound=1)
        co = ht.ConstantParameter('co', value='world')
        t = ht.TupleParameter('t', values=(c, d, co))
        self.assertEqual(t.shape, 2)

        r = [-1, 0.5]
        v = t.get_val(r)
        exp = (0.0, 1, 'world')
        self.assertEqual(v, exp)

        t = ht.TupleParameter('t', values=(c, {'z': -1}, d))
        r = [-1, 0.5]
        v = t.get_val(r)
        exp = (0.0, {'z': -1}, 1)
        self.assertEqual(v, exp)
Esempio n. 6
0
from sklearn.neural_network import MLPRegressor
import hypertune as ht
import numpy as np

# make an example dataset
p = 75
X = np.random.rand(100, 3)
y = np.random.rand(100)
X_train, X_test, y_train, y_test = X[:p], X[p:], y[:p], y[p:]

# define the target hyperparameters
activation = ht.CategoricalParameter('activation',
                                     options=('identity', 'logistic', 'tanh',
                                              'relu'))
learning_rate_init = ht.ContinuousParameter('learning_rate_init',
                                            lower_bound=10**-5,
                                            upper_bound=0.1)
max_iter = ht.DiscreteParameter('max_iter', lower_bound=500, upper_bound=10**3)

hl1 = ht.DiscreteParameter('', lower_bound=50, upper_bound=250)
hl2 = ht.DiscreteParameter('', lower_bound=100, upper_bound=250)
hl3 = ht.DiscreteParameter('', lower_bound=1, upper_bound=100)
hidden_layer_sizes = ht.TupleParameter('hidden_layer_sizes',
                                       values=(hl1, hl2, hl3))

hypers = [
    activation, learning_rate_init, max_iter, learning_rate, hidden_layer_sizes
]

# define a Hypertune object
tuner = ht.HyperTune(algorithm=MLPRegressor,
Esempio n. 7
0
    def mse(self, X, y):
        y_hat = self.predict(X)
        return self._loss_func_(y, y_hat).item()


# make an example dataset
p = 750
X = np.random.rand(10**4, 3)
y = np.random.rand(10**4)
X_train, X_test, y_train, y_test = X[:p], X[p:], y[:p], y[p:]

# define the target hyperparameters
too = torch.optim.Adam, torch.optim.Adadelta, torch.optim.Adagrad, torch.optim.ASGD
to = ht.CategoricalParameter('torch_optimizer', options=too)
eta = ht.ContinuousParameter('eta', lower_bound=1e-10, upper_bound=1e-1)
mi = ht.DiscreteParameter('max_iter', lower_bound=1e2, upper_bound=1e4)
hl1 = ht.DiscreteParameter('', lower_bound=10, upper_bound=100)
hl2 = ht.DiscreteParameter('', lower_bound=10, upper_bound=100)
hls = ht.TupleParameter('hidden_layer_sizes', values=(hl1, hl2))
tp1 = ht.CategoricalParameter('', options=(nn.Linear, ))
tp2 = ht.CategoricalParameter('', options=(nn.Linear, ))
tp3 = ht.CategoricalParameter('', options=(nn.Linear, ))
top = ht.TupleParameter('topology', values=(tp1, tp2, tp3))
hypers = [to, eta, mi, hls, top]

# define a Hypertune object
tuner = ht.HyperTune(algorithm=Net,
                     parameters=hypers,
                     train_func=Net.fit,
                     objective_func=Net.mse,
Esempio n. 8
0
        return mse


def aux_objective_func(algo, X, y):
    _y = np.where(y >= 0.0, 1, -1)
    y_hat = np.where(algo.predict(X) >= 0.0, 1, -1)
    return np.mean(_y == y_hat)


X, y = datasets.iris(return_splits=False)
X = X[y != 2]
y = y[y != 2]
y[y == 0] = -1
X_train, X_test, y_train, y_test = datasets.split(X, y)

alpha = ht.ContinuousParameter('alpha', lower_bound=10**-10, upper_bound=0.01)
epochs = ht.DiscreteParameter('epochs', lower_bound=200, upper_bound=10**4)

hypers = [alpha, epochs]

tuner = ht.HyperTune(algorithm=Perceptron,
                     parameters=hypers,
                     train_func=Perceptron.fit,
                     objective_func=Perceptron.mse,
                     train_func_args=(X_train, y_train),
                     objective_func_args=(X_test, y_test),
                     max_evals=50,
                     maximize=False)

results = tuner.tune()
print(results)