示例#1
0
def _construct_models(X, Y, metric, do_mcmc, with_pending):
    pending_candidates = []
    if with_pending:
        pending_candidates = [PendingEvaluation((0.5, 0.5)), PendingEvaluation((0.2, 0.2))]
    state = TuningJobState(
        HyperparameterRanges_Impl(
            HyperparameterRangeContinuous('x', 0.0, 1.0, LinearScaling()),
            HyperparameterRangeContinuous('y', 0.0, 1.0, LinearScaling()),
        ),
        [
            CandidateEvaluation(x, y) for x, y in zip(X, Y)
        ],
        [],
        pending_candidates
    )
    random_seed = 0

    gpmodel = default_gpmodel(
        state, random_seed=random_seed,
        optimization_config=DEFAULT_OPTIMIZATION_CONFIG)
    result = [GaussProcSurrogateModel(
        state, metric, random_seed, gpmodel, fit_parameters=True,
        num_fantasy_samples=20)]
    if do_mcmc:
        gpmodel_mcmc = default_gpmodel_mcmc(
            state, random_seed=random_seed,
            mcmc_config=DEFAULT_MCMC_CONFIG)
        result.append(
            GaussProcSurrogateModel(
                state, metric, random_seed, gpmodel_mcmc,
                fit_parameters=True,num_fantasy_samples=20))
    return result
def test_gp_mcmc_fit(tuning_job_state):
    def tuning_job_state_mcmc(X, Y) -> TuningJobState:
        Y = [dictionarize_objective(y) for y in Y]

        return TuningJobState(
            HyperparameterRanges_Impl(
                HyperparameterRangeContinuous('x', -4., 4., LinearScaling())),
            [CandidateEvaluation(x, y) for x, y in zip(X, Y)], [], [])

    _set_seeds(0)

    def f(x):
        return 0.1 * np.power(x, 3)

    X = np.concatenate((np.random.uniform(-4., -1.,
                                          10), np.random.uniform(1., 4., 10)))
    Y = f(X)
    X_test = np.sort(np.random.uniform(-1., 1., 10))

    X = [(x, ) for x in X]
    X_test = [(x, ) for x in X_test]

    tuning_job_state = tuning_job_state_mcmc(X, Y)
    # checks if fitting is running
    random_seed = 0
    gpmodel = default_gpmodel_mcmc(tuning_job_state,
                                   random_seed,
                                   mcmc_config=DEFAULT_MCMC_CONFIG)
    model = GaussProcSurrogateModel(tuning_job_state,
                                    DEFAULT_METRIC,
                                    random_seed,
                                    gpmodel,
                                    fit_parameters=True,
                                    num_fantasy_samples=20)

    X = [tuning_job_state.hp_ranges.to_ndarray(x) for x in X]
    predictions = model.predict(np.array(X))

    Y_std_list = [p['std'] for p in predictions]
    Y_mean_list = [p['mean'] for p in predictions]
    Y_mean = np.mean(Y_mean_list, axis=0)
    Y_std = np.mean(Y_std_list, axis=0)

    assert np.all(np.abs(Y_mean - Y) < 1e-1), \
        "in a noiseless setting, mean of GP should coincide closely with outputs at training points"

    X_test = [tuning_job_state.hp_ranges.to_ndarray(x) for x in X_test]

    predictions_test = model.predict(np.array(X_test))
    Y_std_test_list = [p['std'] for p in predictions_test]
    Y_std_test = np.mean(Y_std_test_list, axis=0)
    assert np.max(Y_std) < np.min(Y_std_test), \
        "Standard deviation on un-observed points should be greater than at observed ones"
def default_models() -> List[GaussProcSurrogateModel]:
    X = [
        (0.0, 0.0),
        (1.0, 0.0),
        (0.0, 1.0),
        (1.0, 1.0),
        (0.0, 0.0
         ),  # same evals are added multiple times to force GP to unlearn prior
        (1.0, 0.0),
        (0.0, 1.0),
        (1.0, 1.0),
        (0.0, 0.0),
        (1.0, 0.0),
        (0.0, 1.0),
        (1.0, 1.0),
    ]
    Y = [dictionarize_objective(np.sum(x) * 10.0) for x in X]

    state = TuningJobState(
        HyperparameterRanges_Impl(
            HyperparameterRangeContinuous('x', 0.0, 1.0, LinearScaling()),
            HyperparameterRangeContinuous('y', 0.0, 1.0, LinearScaling()),
        ),
        [CandidateEvaluation(x, y) for x, y in zip(X, Y)],
        [],
        [],
    )
    random_seed = 0

    gpmodel = default_gpmodel(state,
                              random_seed=random_seed,
                              optimization_config=DEFAULT_OPTIMIZATION_CONFIG)

    gpmodel_mcmc = default_gpmodel_mcmc(state,
                                        random_seed=random_seed,
                                        mcmc_config=DEFAULT_MCMC_CONFIG)

    return [
        GaussProcSurrogateModel(state,
                                DEFAULT_METRIC,
                                random_seed,
                                gpmodel,
                                fit_parameters=True,
                                num_fantasy_samples=20),
        GaussProcSurrogateModel(state,
                                DEFAULT_METRIC,
                                random_seed,
                                gpmodel_mcmc,
                                fit_parameters=True,
                                num_fantasy_samples=20)
    ]
示例#4
0
def default_models(metric, do_mcmc=True) -> List[GaussProcSurrogateModel]:
    X = [
        (0.0, 0.0),
        (1.0, 0.0),
        (0.0, 1.0),
        (1.0, 1.0),
    ]
    if metric == DEFAULT_METRIC:
        Y = [dictionarize_objective(np.sum(x) * 10.0) for x in X]
    elif metric == DEFAULT_COST_METRIC:
        # Increasing the first hp increases cost
        Y = [{metric: 1.0 + x[0] * 2.0}
             for x in X]
    else:
        raise ValueError(f"{metric} is not a valid metric")

    state = TuningJobState(
        HyperparameterRanges_Impl(
            HyperparameterRangeContinuous('x', 0.0, 1.0, LinearScaling()),
            HyperparameterRangeContinuous('y', 0.0, 1.0, LinearScaling()),
        ),
        [
            CandidateEvaluation(x, y) for x, y in zip(X, Y)
        ],
        [], []
    )
    random_seed = 0

    gpmodel = default_gpmodel(
        state, random_seed=random_seed,
        optimization_config=DEFAULT_OPTIMIZATION_CONFIG)
    result = [GaussProcSurrogateModel(
        state, metric, random_seed, gpmodel, fit_parameters=True,
        num_fantasy_samples=20)]
    if do_mcmc:
        gpmodel_mcmc = default_gpmodel_mcmc(
            state, random_seed=random_seed,
            mcmc_config=DEFAULT_MCMC_CONFIG)
        result.append(
            GaussProcSurrogateModel(
                state, metric, random_seed, gpmodel_mcmc,
                fit_parameters=True,num_fantasy_samples=20))
    return result