Exemple #1
0
def test_gp_mcmc_fit(tuning_job_state):
    def tuning_job_state_mcmc(X, Y) -> TuningJobState:
        Y = [dictionarize_objective(y) for y in Y]

        return TuningJobState(
            HyperparameterRanges_Impl(
                HyperparameterRangeContinuous('x', -4., 4., LinearScaling())),
            [CandidateEvaluation(x, y) for x, y in zip(X, Y)], [], [])

    _set_seeds(0)

    def f(x):
        return 0.1 * np.power(x, 3)

    X = np.concatenate((np.random.uniform(-4., -1.,
                                          10), np.random.uniform(1., 4., 10)))
    Y = f(X)
    X_test = np.sort(np.random.uniform(-1., 1., 10))

    X = [(x, ) for x in X]
    X_test = [(x, ) for x in X_test]

    tuning_job_state = tuning_job_state_mcmc(X, Y)
    # checks if fitting is running
    random_seed = 0
    gpmodel = default_gpmodel_mcmc(tuning_job_state,
                                   random_seed,
                                   mcmc_config=DEFAULT_MCMC_CONFIG)
    model = GPMXNetModel(tuning_job_state,
                         DEFAULT_METRIC,
                         random_seed,
                         gpmodel,
                         fit_parameters=True,
                         num_fantasy_samples=20)

    X = [tuning_job_state.hp_ranges.to_ndarray(x) for x in X]
    predictions = model.predict(np.array(X))

    Y_std_list = [stds for means, stds in predictions]
    Y_mean_list = [means for means, stds in predictions]
    Y_mean = np.mean(Y_mean_list, axis=0)
    Y_std = np.mean(Y_std_list, axis=0)

    assert np.all(np.abs(Y_mean - Y) < 1e-1), \
        "in a noiseless setting, mean of GP should coincide closely with outputs at training points"

    X_test = [tuning_job_state.hp_ranges.to_ndarray(x) for x in X_test]

    predictions_test = model.predict(np.array(X_test))
    Y_std_test_list = [stds for means, stds in predictions_test]
    Y_std_test = np.mean(Y_std_test_list, axis=0)
    assert np.max(Y_std) < np.min(Y_std_test), \
        "Standard deviation on un-observed points should be greater than at observed ones"
Exemple #2
0
def test_gp_fit(tuning_job_state):
    _set_seeds(0)
    X = [
        (0.0, 0.0),
        (1.0, 0.0),
        (0.0, 1.0),
        (1.0, 1.0),
    ]
    Y = [np.sum(x) * 10.0 for x in X]

    # checks if fitting is running
    random_seed = 0
    gpmodel = default_gpmodel(tuning_job_state,
                              random_seed,
                              optimization_config=DEFAULT_OPTIMIZATION_CONFIG)
    model = GPMXNetModel(tuning_job_state,
                         DEFAULT_METRIC,
                         random_seed,
                         gpmodel,
                         fit_parameters=True,
                         num_fantasy_samples=20)

    X = [tuning_job_state.hp_ranges.to_ndarray(x) for x in X]
    Y_mean, Y_std = model.predict(np.array(X))[0]

    assert np.all(np.abs(Y_mean - Y) < 1e-1), \
        "in a noiseless setting, mean of GP should coincide closely with outputs at training points"

    X_test = [
        (0.2, 0.2),
        (0.4, 0.2),
        (0.1, 0.9),
        (0.5, 0.5),
    ]
    X_test = [tuning_job_state.hp_ranges.to_ndarray(x) for x in X_test]

    Y_mean_test, Y_std_test = model.predict(np.array(X_test))[0]
    assert np.min(Y_std) < np.min(Y_std_test), \
        "Standard deviation on un-observed points should be greater than at observed ones"
def fit_predict_ours(data: dict,
                     random_seed: int,
                     optimization_config: OptimizationConfig,
                     test_intermediates: Optional[dict] = None) -> dict:
    # Create surrogate model
    num_dims = len(data['ss_limits'])
    _gpmodel = GaussianProcessRegression(
        kernel=Matern52(num_dims, ARD=True),
        mean=ZeroMeanFunction(),  # Instead of ScalarMeanFunction
        optimization_config=optimization_config,
        random_seed=random_seed,
        test_intermediates=test_intermediates)
    model = GPMXNetModel(data['state'],
                         DEFAULT_METRIC,
                         random_seed,
                         _gpmodel,
                         fit_parameters=True,
                         num_fantasy_samples=20)
    model_params = model.get_params()
    print('Hyperparameters: {}'.format(model_params))
    # Prediction
    means, stddevs = model.predict(data['test_inputs'])[0]
    return {'means': means, 'stddevs': stddevs}