Exemplo n.º 1
0
def test_shac_simple_custom_basepath():
    total_budget = 50
    batch_size = 5
    objective = 'max'

    params = get_hyperparameter_list()
    h = hp.HyperParameterList(params)

    shac = engine.KerasSHAC(h,
                            total_budget=total_budget,
                            max_gpu_evaluators=1,
                            num_batches=batch_size,
                            objective=objective,
                            save_dir='custom')

    assert shac.total_classifiers == min(max(batch_size - 1, 1), 18)
    assert shac._per_classifier_budget == 10
    assert shac.num_workers == 10
    assert len(shac.classifiers) == 0
    assert len(shac.dataset) == 0

    # do sequential work for debugging
    shac.num_parallel_generators = 1
    shac.num_parallel_evaluators = 1

    # training
    shac.fit(evaluation_simple_keras_tf)

    assert len(shac.classifiers) <= shac.total_classifiers
    assert os.path.exists('custom/datasets/dataset.csv')
    assert os.path.exists('custom/classifiers/classifiers.pkl')

    # Serialization
    shac.save_data()

    # Restore with different batchsize
    shac2 = engine.KerasSHAC(None,
                             total_budget=total_budget,
                             max_gpu_evaluators=0,
                             num_batches=10,
                             objective=objective,
                             save_dir='custom')

    shac2.restore_data()

    # test no file found, yet no error
    shutil.rmtree('custom/')

    shac2.dataset = None
    shac2.classifiers = None
    shac2.restore_data()
Exemplo n.º 2
0
def test_shac_simple_early_stop():
    total_budget = 100
    batch_size = 20
    objective = 'max'

    params = get_hyperparameter_list()
    h = hp.HyperParameterList(params)

    shac = engine.KerasSHAC(h,
                            total_budget=total_budget,
                            max_gpu_evaluators=0,
                            num_batches=batch_size,
                            objective=objective)

    assert shac.total_classifiers == min(max(batch_size - 1, 1), 18)
    assert shac._per_classifier_budget == 5
    assert shac.num_workers == 5
    assert len(shac.classifiers) == 0
    assert len(shac.dataset) == 0

    # do sequential work for debugging
    shac.num_parallel_generators = 1
    shac.num_parallel_evaluators = 1

    # training (with failure)
    shac.fit(evaluation_simple_keras_tf, early_stop=True, skip_cv_checks=True)
    assert len(shac.classifiers) == 0
Exemplo n.º 3
0
def test_shac_simple():
    total_budget = 100
    batch_size = 5
    objective = 'max'

    params = get_hyperparameter_list()
    h = hp.HyperParameterList(params)

    shac = engine.KerasSHAC(h, total_budget=total_budget, max_gpu_evaluators=1,
                            num_batches=batch_size, objective=objective)

    assert shac.total_classifiers == min(max(batch_size - 1, 1), 18)
    assert shac._per_classifier_budget == 20
    assert shac.num_workers == 20
    assert len(shac.classifiers) == 0
    assert len(shac.dataset) == 0

    # do sequential work for debugging
    shac.num_parallel_generators = 1
    shac.num_parallel_evaluators = 1

    print("Evaluating before training")
    np.random.seed(0)
    random_samples = shac.predict(num_batches=16, num_workers_per_batch=1)  # random sample predictions

    random_eval = [evaluation_simple(0, sample) for sample in random_samples]
    random_mean = np.mean(random_eval)

    print()

    # training
    shac.fit(evaluation_simple_keras_tf)

    assert len(shac.classifiers) <= shac.total_classifiers
    assert os.path.exists('shac/datasets/dataset.csv')
    assert os.path.exists('shac/classifiers/classifiers.pkl')

    print()
    print("Evaluating after training")
    np.random.seed(0)
    predictions = shac.predict(num_batches=16, num_workers_per_batch=1)
    pred_evals = [evaluation_simple(0, pred) for pred in predictions]
    pred_mean = np.mean(pred_evals)

    print()
    print("Random mean : ", random_mean)
    print("Predicted mean : ", pred_mean)

    assert random_mean < pred_mean

    # Serialization
    shac.save_data()

    # Restore with different batchsize
    shac2 = engine.KerasSHAC(None, total_budget=total_budget, max_gpu_evaluators=0,
                             num_batches=10, objective=objective)

    shac2.restore_data()

    np.random.seed(0)
    predictions = shac.predict(num_batches=10, num_workers_per_batch=1)
    pred_evals = [evaluation_simple(0, pred) for pred in predictions]
    pred_mean = np.mean(pred_evals)

    print()
    print("Random mean : ", random_mean)
    print("Predicted mean : ", pred_mean)

    assert random_mean < pred_mean

    # test no file found, yet no error
    shutil.rmtree('shac/')

    shac2.dataset = None
    shac2.classifiers = None
    shac2.restore_data()