コード例 #1
0
def test_warping_with_multidimension_and_arbitrary_parameters():
    X = mx.nd.array([[0., 1., 0.], [1., 2., 1.], [2., 0., 2.]],
                    dtype=DATA_TYPE)

    dimension = 3

    # We transform only the columns {0,2} of the 3-dimensional data X
    input_range = (0., 2.)
    warping = Warping(index_to_range={
        0: input_range,
        2: input_range
    },
                      dimension=dimension)

    assert len(warping.transformations) == dimension

    warping.collect_params().initialize()

    # We change the warping parameters of the first dimension only
    w0 = warping.transformations[0]
    w0.encoding.set(w0.warping_internal, [2., 0.5])

    w2 = warping.transformations[2]
    w2_parameters = w2.encoding.get(mx.nd, w2.warping_internal.data())

    # The parameters of w2 should be the default ones (as there was no set operations)
    np.testing.assert_almost_equal(w2_parameters.asnumpy(), np.ones(2))

    # print(warping(X).asnumpy())
    # for name, p  in warping.collect_params().items():
    #     print(name, p.data().asnumpy())

    # With parameters [2., 0.5], the warping is given by x => 1. - sqrt(1. - x^2)
    def expected_warping(x):
        return 1. - np.sqrt(1. - x * x)

    expected_column0 = expected_warping(
        np.array([NUMERICAL_JITTER, 0.5, 1. - NUMERICAL_JITTER])).reshape(
            (-1, 1))
    expected_column1 = np.array([1., 2., 0.]).reshape((-1, 1))
    expected_column2 = np.array([NUMERICAL_JITTER, 0.5,
                                 1. - NUMERICAL_JITTER]).reshape((-1, 1))

    np.testing.assert_almost_equal(
        warping(X).asnumpy(),
        np.hstack([expected_column0, expected_column1, expected_column2]))
コード例 #2
0
def build_kernel(state: TuningJobState,
                 do_warping: bool = False) -> KernelFunction:
    dims, warping_ranges = dimensionality_and_warping_ranges(state.hp_ranges)
    kernel = Matern52(dims, ARD=True)
    if do_warping:
        return WarpedKernel(kernel=kernel,
                            warping=Warping(dims, warping_ranges))
    else:
        return kernel
コード例 #3
0
def test_gp_regression_with_warping():
    def f(x):
        return np.sin(3 * np.log(x))

    np.random.seed(7)

    L, U = -5., 12.
    input_range = (2.**L, 2.**U)

    x_train = np.sort(2.**np.random.uniform(L, U, 250))
    x_test = np.sort(2.**np.random.uniform(L, U, 500))
    y_train = f(x_train)
    y_test = f(x_test)

    # to mx.nd
    y_train_mx_nd = mx.nd.array(y_train)
    x_train_mx_nd = mx.nd.array(x_train)
    x_test_mx_nd = mx.nd.array(x_test)

    kernels = [
        Matern52(dimension=1),
        WarpedKernel(kernel=Matern52(dimension=1),
                     warping=Warping(dimension=1,
                                     index_to_range={0: input_range}))
    ]

    models = [
        GaussianProcessRegression(kernel=k, random_seed=0) for k in kernels
    ]
    train_errors, test_errors = [], []

    for model in models:

        model.fit(x_train_mx_nd, y_train_mx_nd)

        mu_train, var_train = model.predict(x_train_mx_nd)[0]
        mu_test, var_test = model.predict(x_test_mx_nd)[0]

        # back to np.array
        mu_train = mu_train.asnumpy()
        mu_test = mu_test.asnumpy()
        # var_train = var_train.asnumpy()
        # var_test = var_test.asnumpy()

        train_errors.append(np.mean(np.abs((mu_train - y_train))))
        test_errors.append(np.mean(np.abs((mu_test - y_test))))

    # The two models have similar performance on training points
    np.testing.assert_almost_equal(train_errors[0], train_errors[1], decimal=4)

    # As expected, the model with warping largely outperforms the model without
    assert test_errors[1] < 0.1 * test_errors[0]
def test_set_gp_hps():
    mean = ScalarMeanFunction()
    kernel = Matern52(dimension=1)
    warping = Warping(dimension=1, index_to_range={0: (-4., 4.)})
    warped_kernel = WarpedKernel(kernel=kernel, warping=warping)
    likelihood = MarginalLikelihood(kernel=warped_kernel,
                                    mean=mean,
                                    initial_noise_variance=1e-6)
    likelihood.initialize(ctx=mx.cpu(), force_reinit=True)
    likelihood.hybridize()
    hp_values = np.array([1e-2, 1.0, 0.5, 0.3, 0.2, 1.1])
    _set_gp_hps(hp_values, likelihood)
    np.testing.assert_array_almost_equal(hp_values, _get_gp_hps(likelihood))
def test_get_gp_hps():
    mean = ScalarMeanFunction()
    kernel = Matern52(dimension=1)
    warping = Warping(dimension=1, index_to_range={0: (-4., 4.)})
    warped_kernel = WarpedKernel(kernel=kernel, warping=warping)
    likelihood = MarginalLikelihood(kernel=warped_kernel,
                                    mean=mean,
                                    initial_noise_variance=1e-6)
    likelihood.initialize(ctx=mx.cpu(), force_reinit=True)
    likelihood.hybridize()
    hp_values = _get_gp_hps(likelihood)
    # the oder of hps are noise, mean, covariance scale, bandwidth, warping a, warping b
    np.testing.assert_array_almost_equal(
        hp_values, np.array([1e-6, 0.0, 1.0, 1.0, 1.0, 1.0]))
def resource_kernel_factory(
        name: str, kernel_x: KernelFunction, mean_x: gluon.HybridBlock,
        max_metric_value: float) -> (KernelFunction, gluon.HybridBlock):
    """
    Given kernel function kernel_x and mean function mean_x over config x,
    create kernel and mean functions over (x, r), where r is the resource
    attribute (nonnegative scalar, usually in [0, 1]).

    :param name: Selects resource kernel type
    :param kernel_x: Kernel function over configs x
    :param mean_x: Mean function over configs x
    :return: res_kernel, res_mean, both over (x, r)

    """
    if name == 'matern52':
        res_kernel = Matern52(dimension=kernel_x.dimension + 1, ARD=True)
        res_mean = mean_x
    elif name == 'matern52-res-warp':
        # Warping on resource dimension (last one)
        dim_x = kernel_x.dimension
        res_warping = Warping(dimension=dim_x + 1,
                              index_to_range={dim_x: (0., 1.)})
        res_kernel = WarpedKernel(kernel=Matern52(dimension=dim_x + 1,
                                                  ARD=True),
                                  warping=res_warping)
        res_mean = mean_x
    else:
        if name == 'exp-decay-sum':
            delta_fixed_value = 0.0
        elif name == 'exp-decay-combined':
            delta_fixed_value = None
        elif name == 'exp-decay-delta1':
            delta_fixed_value = 1.0
        else:
            raise AssertionError("name = '{}' not supported".format(name))
        res_kernel = ExponentialDecayResourcesKernelFunction(
            kernel_x,
            mean_x,
            gamma_init=0.5 * max_metric_value,
            delta_fixed_value=delta_fixed_value,
            max_metric_value=max_metric_value)
        res_mean = ExponentialDecayResourcesMeanFunction(kernel=res_kernel)

    return res_kernel, res_mean
 def build_kernel():
     return WarpedKernel(kernel=Matern52(dimension=1),
                         warping=Warping(dimension=1,
                                         index_to_range={0: (-4., 4.)}))