예제 #1
0
def test_slicing(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    base = bs.LinearBasis(onescol=False, apply_ind=[0]) \
        + bs.RandomRBF(Xdim=1, nbases=1, apply_ind=[1]) \
        + bs.RandomRBF(Xdim=2, nbases=3,
                       lenscale_init=Parameter(np.ones(2), Positive()),
                       apply_ind=[1, 0])

    P = base.transform(X, 1., np.ones(d))
    assert P.shape == (N, 9)

    dP = base.grad(X, 1., np.ones(d))
    assert list(dP)[0].shape == (N, 9)
예제 #2
0
def test_apply_grad(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    y = np.random.randn(N)

    def fun(Phi, dPhi):
        return y.dot(Phi).dot(dPhi.T).dot(y)

    base = bs.LinearBasis(onescol=False)
    obj = lambda dPhi: fun(base(X), dPhi)

    assert len(bs.apply_grad(obj, base.grad(X))) == 0

    base = bs.RadialBasis(centres=X)
    obj = lambda dPhi: fun(base.transform(X, 1.), dPhi)

    assert np.isscalar(bs.apply_grad(obj, base.grad(X, 1.)))

    D = 200
    base = bs.RandomRBF(nbases=D,
                        Xdim=d,
                        lenscale_init=Parameter(np.ones(d), Positive()))
    obj = lambda dPhi: fun(base.transform(X, np.ones(d)), dPhi)

    assert bs.apply_grad(obj, base.grad(X, np.ones(d))).shape == (d, )

    base = bs.LinearBasis(onescol=False) + bs.RadialBasis(centres=X) \
        + bs.RandomRBF(nbases=D, Xdim=d,
                       lenscale_init=Parameter(np.ones(d), Positive()))
    obj = lambda dPhi: fun(base.transform(X, 1., np.ones(d)), dPhi)

    gs = bs.apply_grad(obj, base.grad(X, 1., np.ones(d)))
    assert np.isscalar(gs[0])
    assert gs[1].shape == (d, )
예제 #3
0
def test_regularizer(make_gaus_data):

    X, _, _, _ = make_gaus_data
    N, d = X.shape
    nbases1, nbases2 = 10, 5

    # Single basis
    base = bs.LinearBasis(regularizer=Parameter(2, Positive()))
    diag, slices = base.regularizer_diagonal(X)
    assert base.regularizer.value == 2
    assert all(diag == np.full(d, 2))
    assert slices == slice(None)

    # Basis cat
    base += bs.RandomRBF(Xdim=d, nbases=nbases1) \
        + bs.RandomMatern32(Xdim=d, nbases=nbases2)
    dims = np.cumsum([0, d, 2 * nbases1, 2 * nbases2])
    diag, slices = base.regularizer_diagonal(X)
    for db, de, s in zip(dims[:-1], dims[1:], slices):
        assert s == slice(db, de)
예제 #4
0
def test_simple_concat(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    base = bs.LinearBasis(onescol=False) + bs.LinearBasis(onescol=False)
    P = base.transform(X)

    assert np.allclose(P, np.hstack((X, X)))

    base += bs.RadialBasis(centres=X)
    P = base.transform(X, 1.)

    assert P.shape == (N, d * 2 + N)

    D = 200
    base += bs.RandomRBF(nbases=D,
                         Xdim=d,
                         lenscale_init=Parameter(np.ones(d), Positive()))
    P = base.transform(X, 1., np.ones(d))

    assert P.shape == (N, (D + d) * 2 + N)
예제 #5
0
def test_grad_concat(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    base = bs.LinearBasis(onescol=False) + bs.LinearBasis(onescol=False)

    assert list(base.grad(X)) == []

    base += bs.RadialBasis(centres=X)

    G = base.grad(X, 1.)

    assert list(G)[0].shape == (N, N + 2 * d)

    D = 200
    base += bs.RandomRBF(nbases=D,
                         Xdim=d,
                         lenscale_init=Parameter(np.ones(d), Positive()))
    G = base.grad(X, 1., np.ones(d))
    dims = [(N, N + (D + d) * 2), (N, N + (D + d) * 2, d)]

    for g, d in zip(G, dims):
        assert g.shape == d
예제 #6
0
def main():

    #
    # Settings
    #

    # Algorithmic properties
    nbases = 50
    lenscale = 1  # For all basis functions that take lengthscales
    lenscale2 = 0.5  # For the Combo basis
    noise = 1
    order = 7  # For polynomial basis
    rate = 0.9
    eta = 1e-5
    passes = 1000
    batchsize = 100
    reg = 1

    # np.random.seed(100)

    N = 500
    Ns = 250

    # Dataset selection
    # dataset = 'sinusoid'
    dataset = 'gp1D'

    # Dataset properties
    lenscale_true = 0.7  # For the gpdraw dataset
    noise_true = 0.1

    basis = 'RKS'
    # basis = 'FF'
    # basis = 'RBF'
    # basis = 'Linear'
    # basis = 'Poly'
    # basis = 'Combo'

    #
    # Make Data
    #

    # Sinusoid
    if dataset == 'sinusoid':
        Xtrain = np.linspace(-2 * np.pi, 2 * np.pi, N)[:, np.newaxis]
        ytrain = np.sin(Xtrain).flatten() + np.random.randn(N) * noise
        Xtest = np.linspace(-2 * np.pi, 2 * np.pi, Ns)[:, np.newaxis]
        ftest = np.sin(Xtest).flatten()

    # Random RBF GP
    elif dataset == 'gp1D':

        Xtrain, ytrain, Xtest, ftest = \
            gen_gausprocess_se(N, Ns, lenscale=lenscale_true, noise=noise_true)

    else:
        raise ValueError('Invalid dataset!')

    #
    # Make Bases
    #

    if basis == 'FF':
        base = basis_functions.FastFood(nbases, Xtrain.shape[1])
    elif basis == 'RKS':
        base = basis_functions.RandomRBF(nbases, Xtrain.shape[1])
    elif basis == 'RBF':
        base = basis_functions.RadialBasis(Xtrain)
    elif basis == 'Linear':
        base = basis_functions.LinearBasis(onescol=True)
    elif basis == 'Poly':
        base = basis_functions.PolynomialBasis(order)
    elif basis == 'Combo':
        base1 = basis_functions.RandomRBF(nbases, Xtrain.shape[1])
        base2 = basis_functions.LinearBasis(onescol=True)
        base3 = basis_functions.FastFood(nbases, Xtrain.shape[1])
        base = base1 + base2 + base3
    else:
        raise ValueError('Invalid basis!')

    # Set up optimisation
    # learning_params = gp.OptConfig()
    # learning_params.sigma = gp.auto_range(kdef)
    # learning_params.noise = gp.Range([1e-5], [1e5], [1])
    # learning_params.walltime = 60

    #
    # Learn regression parameters and predict
    #

    if basis == 'Linear' or basis == 'Poly':
        hypers = []
    elif basis == 'FF' or basis == 'RKS' or basis == 'RBF':
        hypers = [lenscale]
    elif basis == 'Combo':
        hypers = [lenscale, lenscale2]
    else:
        raise ValueError('Invalid basis!')

    params_elbo = regression.learn(Xtrain, ytrain, base, hypers, var=noise**2,
                                   regulariser=reg)
    Ey_e, Vf_e, Vy_e = regression.predict(Xtest, base, *params_elbo)
    Sy_e = np.sqrt(Vy_e)

    #
    # Nonparametric variational inference GLM
    #

    llhood = likelihoods.Gaussian()
    lparams = [noise**2]
    params_glm = glm.learn(Xtrain, ytrain, llhood, lparams, base, hypers,
                           regulariser=reg, use_sgd=True, rate=rate,
                           postcomp=10, eta=eta, batchsize=batchsize,
                           maxit=passes)
    Ey_g, Vf_g, Eyn, Eyx = glm.predict_meanvar(Xtest, llhood, base,
                                               *params_glm)
    Vy_g = Vf_g + params_glm[2][0]
    Sy_g = np.sqrt(Vy_g)

    #
    # Learn GP and predict
    #

    def kdef(h, k):
        return (h(1e-5, 1., 0.5) * k(kern.gaussian, h(1e-5, 1e5, lenscale)) +
                k(kern.lognoise, h(-4, 1, -3)))
    hyper_params = gp.learn(Xtrain, ytrain, kdef, verbose=True, ftol=1e-15,
                            maxiter=passes)

    regressor = gp.condition(Xtrain, ytrain, kdef, hyper_params)
    query = gp.query(regressor, Xtest)
    Ey_gp = gp.mean(query)
    Vf_gp = gp.variance(query)
    Vy_gp = gp.variance(query, noise=True)
    Sy_gp = np.sqrt(Vy_gp)

    # import ipdb; ipdb.set_trace()

    #
    # Evaluate LL and SMSE
    #

    LL_elbo = mll(ftest, Ey_e, Vf_e)
    LL_gp = mll(ftest, Ey_gp, Vf_gp)
    LL_g = mll(ftest, Ey_g, Vy_g)

    smse_elbo = smse(ftest, Ey_e)
    smse_gp = smse(ftest, Ey_gp)
    smse_glm = smse(ftest, Ey_g)

    log.info("A la Carte, LL: {}, smse = {}, noise: {}, hypers: {}"
             .format(LL_elbo, smse_elbo, np.sqrt(params_elbo[3]),
                     params_elbo[2]))
    log.info("GP, LL: {}, smse = {}, noise: {}, hypers: {}"
             .format(LL_gp, smse_gp, hyper_params[1], hyper_params[0]))
    log.info("GLM, LL: {}, smse = {}, noise: {}, hypers: {}"
             .format(LL_g, smse_glm, np.sqrt(params_glm[2][0]),
                     params_glm[3]))

    #
    # Plot
    #

    Xpl_t = Xtrain.flatten()
    Xpl_s = Xtest.flatten()

    # Training/Truth
    pl.plot(Xpl_t, ytrain, 'k.', label='Training')
    pl.plot(Xpl_s, ftest, 'k-', label='Truth')

    # ELBO Regressor
    pl.plot(Xpl_s, Ey_e, 'g-', label='Bayesian linear regression')
    pl.fill_between(Xpl_s, Ey_e - 2 * Sy_e, Ey_e + 2 * Sy_e, facecolor='none',
                    edgecolor='g', linestyle='--', label=None)

    # GP
    # pl.plot(Xpl_s, Ey_gp, 'b-', label='GP')
    # pl.fill_between(Xpl_s, Ey_gp - 2 * Sy_gp, Ey_gp + 2 * Sy_gp,
    #                 facecolor='none', edgecolor='b', linestyle='--',
    #                 label=None)

    # GLM Regressor
    pl.plot(Xpl_s, Ey_g, 'm-', label='GLM')
    pl.fill_between(Xpl_s, Ey_g - 2 * Sy_g, Ey_g + 2 * Sy_g, facecolor='none',
                    edgecolor='m', linestyle='--', label=None)

    pl.legend()

    pl.grid(True)
    pl.title('Regression demo')
    pl.ylabel('y')
    pl.xlabel('x')

    pl.show()
예제 #7
0
def test_bases(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape
    nC = 10

    bases = [
        bs.BiasBasis(),
        bs.LinearBasis(onescol=True),
        bs.PolynomialBasis(order=2),
        bs.RadialBasis(centres=X[:nC, :]),
        bs.RadialBasis(centres=X[:nC, :],
                       lenscale_init=Parameter(np.ones(d), Positive())),
        bs.SigmoidalBasis(centres=X[:nC, :]),
        bs.SigmoidalBasis(centres=X[:nC, :],
                          lenscale_init=Parameter(np.ones(d), Positive())),
        bs.RandomRBF(Xdim=d, nbases=10),
        bs.RandomRBF(Xdim=d,
                     nbases=10,
                     lenscale_init=Parameter(np.ones(d), Positive())),
        bs.FastFoodRBF(Xdim=d, nbases=10),
        bs.FastFoodRBF(Xdim=d,
                       nbases=10,
                       lenscale_init=Parameter(np.ones(d), Positive())),
        bs.FastFoodGM(Xdim=d, nbases=10),
        bs.FastFoodGM(Xdim=d,
                      nbases=10,
                      mean_init=Parameter(np.zeros(d), Bound()),
                      lenscale_init=Parameter(np.ones(d), Positive())),
    ]

    hypers = [(), (), (), (1., ), (np.ones(d), ), (1., ), (np.ones(d), ),
              (1., ), (np.ones(d), ), (1., ), (np.ones(d), ),
              (np.ones(d), np.ones(d)), (np.ones(d), np.ones(d))]

    for b, h in zip(bases, hypers):
        P = b.transform(X, *h)
        dP = b.grad(X, *h)

        assert P.shape[0] == N
        if not issequence(dP):
            assert dP.shape[0] == N if not isinstance(dP, list) else dP == []
        else:
            for dp in dP:
                assert dp.shape[0] == N
        assert P.ndim == 2

    bcat = reduce(add, bases)
    hyps = []
    for h in hypers:
        hyps.extend(list(h))
    P = bcat.transform(X, *hyps)
    dP = bcat.grad(X, *hyps)

    assert bcat.get_dim(X) == P.shape[1]
    assert P.shape[0] == N
    assert P.ndim == 2
    for dp in dP:
        if not issequence(dP):
            assert dP.shape[0] == N if not isinstance(dP, list) else dP == []
        else:
            for dp in dP:
                assert dp.shape[0] == N
예제 #8
0
# Make Bases and Likelihood
#

if like == 'Gaussian':
    llhood = likelihoods.Gaussian()
    lparams = [noise**2]
elif like == 'Bernoulli':
    llhood = likelihoods.Bernoulli()
    lparams = []
elif like == 'Poisson':
    llhood = likelihoods.Poisson(tranfcn='softplus')
    lparams = []
else:
    raise ValueError("Invalid likelihood, {}!".format(like))

basis = basis_functions.RandomRBF(nbases, Xtrain.shape[1])
bparams = [lenscale]
# basis = basis_functions.PolynomialBasis(order=4)
# bparams = []

#
# Inference
#

params = glm.learn(Xtrain,
                   ytrain,
                   llhood,
                   lparams,
                   basis,
                   bparams,
                   postcomp=postcomp,