Ejemplo n.º 1
0
def test_early_stopping_simple():
    param_dict = dict(x=range(-10, 10))

    def objfunc(p_list):
        return [p['x']**2 for p in p_list]

    def early_stop(results):
        if len(results['params_tried']) >= 5:
            return True

    config = dict(num_iteration=20,
                  initial_random=1,
                  early_stopping=early_stop)

    tuner = Tuner(param_dict, objfunc, conf_dict=config)
    results = tuner.minimize()
    assert (len(results['params_tried']) == 5)

    config = dict(num_iteration=20,
                  initial_random=1,
                  early_stopping=early_stop,
                  optimizer='Random')

    tuner = Tuner(param_dict, objfunc, conf_dict=config)
    results = tuner.minimize()
    assert (len(results['params_tried']) == 5)
Ejemplo n.º 2
0
def test_celery_scheduler():
    try:
        import celery
    except ModuleNotFoundError:
        return

    # search space
    param_space = dict(x=range(-10, 10))

    class MockTask:
        def __init__(self, x):
            self.x = x

        def objective(self):
            return self.x * self.x

        def get(self, timeout=None):
            return self.objective()

    @scheduler.celery(n_jobs=2)
    def objective(x):
        return MockTask(x)

    tuner = Tuner(param_space, objective)
    assert tuner.config.batch_size == 2

    results = tuner.minimize()

    assert abs(results['best_params']['x']) <= 0.1

    class MockTask:
        def __init__(self, x):
            self.x = x

        def objective(self):
            return (self.x - 5) * (self.x - 5)

        def get(self, timeout=None):
            if self.x < -8:
                raise celery.exceptions.TimeoutError("timeout")
            return self.objective()

    @scheduler.celery(n_jobs=1)
    def objective(x):
        return MockTask(x)

    tuner = Tuner(param_space, objective)
    results = tuner.minimize()

    assert abs(results['best_params']['x'] - 5) <= 0.1
Ejemplo n.º 3
0
def test_convex():
    param_dict = {
        'x': range(-100, 10),
        'y': range(-10, 20),
    }

    x_opt = 0
    y_opt = 0

    def objfunc(args_list):
        results = []
        for hyper_par in args_list:
            x = hyper_par['x']
            y = hyper_par['y']
            result = (x**2 + y**2) / 1e4
            results.append(result)
        return results

    tuner = Tuner(param_dict, objfunc)
    results = tuner.minimize()

    print('best hyper parameters:', results['best_params'])
    print('best Accuracy:', results['best_objective'])

    assert abs(results['best_params']['x'] - x_opt) <= 3
    assert abs(results['best_params']['y'] - y_opt) <= 3
Ejemplo n.º 4
0
def test_config():
    param_dict = {
        'x': range(-10, 10),
        'y': range(-10, 10),
    }

    x_opt = 0
    y_opt = 0

    def objfunc(args_list):
        results = []
        for hyper_par in args_list:
            x = hyper_par['x']
            y = hyper_par['y']
            result = (x**2 + y**2) / 1e4
            results.append(result)
        return results

    def check(results, error_msg):
        assert abs(results['best_params']['x'] - x_opt) <= 3, error_msg
        assert abs(results['best_params']['y'] - y_opt) <= 3, error_msg

    tuner = Tuner(param_dict, objfunc, conf_dict=dict(optimizer='Random'))
    results = tuner.minimize()
    check(results, 'error while minimizing random')
Ejemplo n.º 5
0
def main():
    tuner = Tuner(param_space, objective)  # Initialize Tuner
    results = tuner.minimize()  # Run Tuner
    best_params = results["best_params"]
    best_objective = results["best_objective"]
    print(f'Optimal value of parameters: {best_params} and objective: {best_objective}')
    assert abs(best_params['x'] - 0) < 1
    assert abs(best_objective - 0) < 1
Ejemplo n.º 6
0
def main():
    tuner = Tuner(param_space, objective)
    results = tuner.minimize()

    print('best parameters:', results['best_params'])
    print('best accuracy:', results['best_objective'])

    assert abs(results['best_objective'] - 0) < 1
Ejemplo n.º 7
0
def test_custom_scheduler():
    # search space
    param_space = dict(x=range(-10, 10))

    @scheduler.custom(n_jobs=2)
    def objective(params):
        assert len(params) == 2
        return [p['x'] * p['x'] for p in params]

    tuner = Tuner(param_space, objective, dict(initial_random=2))
    results = tuner.minimize()

    assert abs(results['best_params']['x'] - 0) <= 0.1
Ejemplo n.º 8
0
def test_early_stopping_complex():
    '''testing early stop by time since last improvement'''
    param_dict = dict(x=range(-10, 10))

    def objfunc(p_list):
        time.sleep(2)
        res = [np.random.uniform()] * len(p_list)
        return res

    class Context:
        previous_best = None
        previous_best_time = None
        min_improvement_secs = 1
        objective_variation = 1

    def early_stop(results):
        context = Context

        current_best = results['best_objective']
        current_time = time.time()
        _stop = False

        if context.previous_best is None:
            context.previous_best = current_best
            context.previous_best_time = current_time
        elif (current_best <= context.previous_best + context.objective_variation) and \
                (current_time - context.previous_best_time > context.min_improvement_secs):
            print("no improvement in %d seconds: stopping early." %
                  context.min_improvement_secs)
            _stop = True
        else:
            context.previous_best = current_best
            context.previous_best_time = current_time

        return _stop

    config = dict(num_iteration=20,
                  initial_random=1,
                  early_stopping=early_stop)

    tuner = Tuner(param_dict, objfunc, conf_dict=config)
    results = tuner.minimize()
    assert (len(results['params_tried']) == 3)