Beispiel #1
0
def test_structured_params(make_quadratic, make_random):

    random = make_random
    a, b, c, data, _ = make_quadratic
    w0 = [
        Parameter(random.randn(2), Bound()),
        Parameter(random.randn(1), Bound())
    ]

    qobj_struc = lambda w12, w3, data: q_struc(w12, w3, data, qobj)
    assert_opt = lambda Eab, Ec: \
        np.allclose((a, b, c), (Eab[0], Eab[1], Ec), atol=1e-3, rtol=0)

    nmin = structured_minimizer(minimize)
    res = nmin(qobj_struc, w0, args=(data, ), jac=True, method='L-BFGS-B')
    assert_opt(*res.x)

    nsgd = structured_sgd(sgd)
    res = nsgd(qobj_struc, w0, data, eval_obj=True, random_state=make_random)
    assert_opt(*res.x)

    qf_struc = lambda w12, w3, data: q_struc(w12, w3, data, qfun)
    qg_struc = lambda w12, w3, data: q_struc(w12, w3, data, qgrad)
    res = nmin(qf_struc, w0, args=(data, ), jac=qg_struc, method='L-BFGS-B')
    assert_opt(*res.x)
Beispiel #2
0
def test_rand_start(make_quadratic, make_random):

    random = make_random
    a, b, c, data, _ = make_quadratic

    w0 = [Parameter(gamma(1), Positive(), shape=(2, )), Parameter(1., Bound())]

    qobj_struc = lambda w12, w3, data: q_struc(w12, w3, data, qobj)
    assert_opt = lambda Eab, Ec: \
        np.allclose((a, b, c), (Eab[0], Eab[1], Ec), atol=1e-3, rtol=0)

    nmin = structured_minimizer(logtrick_minimizer(minimize))
    res = nmin(qobj_struc,
               w0,
               args=(data, ),
               jac=True,
               method='L-BFGS-B',
               random_state=random,
               nstarts=100)
    assert_opt(*res.x)

    nsgd = structured_sgd(logtrick_sgd(sgd))
    res = nsgd(qobj_struc,
               w0,
               data,
               eval_obj=True,
               nstarts=100,
               random_state=random)
    assert_opt(*res.x)
Beispiel #3
0
    def __init__(self,
                 onescol=True,
                 var=1.,
                 regulariser=1.,
                 tol=1e-8,
                 maxiter=1000,
                 nstarts=100):

        basis = LinearBasis(onescol=onescol,
                            regularizer=Parameter(regulariser, Positive()))
        super().__init__(basis=basis,
                         var=Parameter(var, Positive()),
                         tol=tol,
                         maxiter=maxiter,
                         nstarts=nstarts)
Beispiel #4
0
    def __init__(self,
                 kernel='rbf',
                 nbases=50,
                 lenscale=1.,
                 var=1.,
                 falloff=1.,
                 regulariser=1.,
                 ard=True,
                 indicator_field='censored',
                 maxiter=3000,
                 batch_size=10,
                 alpha=0.01,
                 beta1=0.9,
                 beta2=0.99,
                 epsilon=1e-8,
                 random_state=None):

        lhood = Switching(lenscale=falloff,
                          var_init=Parameter(var, Positive()))

        super().__init__(likelihood=lhood,
                         basis=None,
                         maxiter=maxiter,
                         batch_size=batch_size,
                         updater=Adam(alpha, beta1, beta2, epsilon),
                         random_state=random_state)

        self.indicator_field = indicator_field
        self._store_params(kernel, regulariser, nbases, lenscale, ard)
Beispiel #5
0
    def _store_params(self, kernel, regulariser, nbases, lenscale, ard):

        self.kernel = kernel
        self.nbases = nbases
        self.ard = ard
        self.lenscale = lenscale if np.isscalar(lenscale) \
            else np.asarray(lenscale)
        self.regulariser = Parameter(regulariser, Positive())
Beispiel #6
0
def test_concat_params():

    d = 10
    D = 20

    base = bs.LinearBasis(onescol=True) + bs.RandomMatern52(
        nbases=D, Xdim=d, lenscale=Parameter(1., Positive()))

    assert np.isscalar(base.params.value)

    base = bs.LinearBasis(onescol=True) + bs.RandomMatern52(
        nbases=D, Xdim=d, lenscale=Parameter(np.ones(d), Positive()))

    assert len(base.params.value) == d

    base += bs.RandomMatern52(nbases=D,
                              Xdim=d,
                              lenscale=Parameter(1., Positive()))

    assert len(base.params) == 2
Beispiel #7
0
 def __init__(self,
              onescol=True,
              var=1.,
              regulariser=1.,
              maxiter=3000,
              batch_size=10,
              alpha=0.01,
              beta1=0.9,
              beta2=0.99,
              epsilon=1e-8,
              random_state=None,
              nstarts=500):
     basis = LinearBasis(onescol=onescol,
                         regularizer=Parameter(regulariser, Positive()))
     super().__init__(likelihood=Gaussian(Parameter(var, Positive())),
                      basis=basis,
                      maxiter=maxiter,
                      batch_size=batch_size,
                      updater=Adam(alpha, beta1, beta2, epsilon),
                      random_state=random_state,
                      nstarts=nstarts)
Beispiel #8
0
def test_randomgridsearch_slm(make_gaus_data):

    X, y, Xs, ys = make_gaus_data

    slm = StandardLinearModel(LinearBasis(onescol=True))

    param_dict = {'var': [Parameter(1.0 / v, Positive()) for v in range(1, 6)]}
    estimator = RandomizedSearchCV(slm, param_dict, n_jobs=-1, n_iter=2)

    estimator.fit(X, y)
    Ey = estimator.predict(Xs)
    assert len(ys) == len(Ey)  # we just want to make sure this all runs
Beispiel #9
0
def test_gridsearch_slm(make_gaus_data):

    X, y, Xs, ys = make_gaus_data

    slm = StandardLinearModel(LinearBasis(onescol=True))

    param_dict = {'var': [Parameter(v, Positive()) for v in [1.0, 2.0]]}
    estimator = GridSearchCV(slm, param_dict, n_jobs=-1)

    estimator.fit(X, y)
    Ey = estimator.predict(Xs)
    assert len(ys) == len(Ey)  # we just want to make sure this all runs
Beispiel #10
0
    def _make_basis(self, X):

        D = X.shape[1]
        lenscale = self.lenscale
        if self.ard and D > 1:
            lenscale = np.ones(D) * lenscale
        lenscale_init = Parameter(lenscale, Positive())
        gpbasis = basismap[self.kernel](Xdim=X.shape[1],
                                        nbases=self.nbases,
                                        lenscale=lenscale_init,
                                        regularizer=self.regulariser)

        self.basis = gpbasis + BiasBasis()
Beispiel #11
0
def test_apply_grad(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    y = np.random.randn(N)

    def fun(Phi, dPhi):
        return y.dot(Phi).dot(dPhi.T).dot(y)

    base = bs.LinearBasis(onescol=False)
    obj = lambda dPhi: fun(base(X), dPhi)

    assert len(bs.apply_grad(obj, base.grad(X))) == 0

    base = bs.RadialBasis(centres=X)
    obj = lambda dPhi: fun(base.transform(X, 1.), dPhi)

    assert np.isscalar(bs.apply_grad(obj, base.grad(X, 1.)))

    D = 200
    base = bs.RandomRBF(nbases=D,
                        Xdim=d,
                        lenscale_init=Parameter(np.ones(d), Positive()))
    obj = lambda dPhi: fun(base.transform(X, np.ones(d)), dPhi)

    assert bs.apply_grad(obj, base.grad(X, np.ones(d))).shape == (d, )

    base = bs.LinearBasis(onescol=False) + bs.RadialBasis(centres=X) \
        + bs.RandomRBF(nbases=D, Xdim=d,
                       lenscale_init=Parameter(np.ones(d), Positive()))
    obj = lambda dPhi: fun(base.transform(X, 1., np.ones(d)), dPhi)

    gs = bs.apply_grad(obj, base.grad(X, 1., np.ones(d)))
    assert np.isscalar(gs[0])
    assert gs[1].shape == (d, )
Beispiel #12
0
def test_slicing(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    base = bs.LinearBasis(onescol=False, apply_ind=[0]) \
        + bs.RandomRBF(Xdim=1, nbases=1, apply_ind=[1]) \
        + bs.RandomRBF(Xdim=2, nbases=3,
                       lenscale_init=Parameter(np.ones(2), Positive()),
                       apply_ind=[1, 0])

    P = base.transform(X, 1., np.ones(d))
    assert P.shape == (N, 9)

    dP = base.grad(X, 1., np.ones(d))
    assert list(dP)[0].shape == (N, 9)
Beispiel #13
0
    def __init__(self,
                 kernel='rbf',
                 nbases=50,
                 lenscale=1.,
                 var=1.,
                 regulariser=1.,
                 ard=True,
                 tol=1e-8,
                 maxiter=1000,
                 nstarts=100):

        super().__init__(basis=None,
                         var=Parameter(var, Positive()),
                         tol=tol,
                         maxiter=maxiter,
                         nstarts=nstarts)

        self._store_params(kernel, regulariser, nbases, lenscale, ard)
Beispiel #14
0
def learn(X,
          y,
          kerneldef,
          opt_criterion=None,
          verbose=False,
          ftol=1e-8,
          maxiter=10000):

    n, d = X.shape

    if opt_criterion is None:
        opt_criterion = criterions.negative_log_marginal_likelihood
    else:
        pass  # check type

    cov_fn = compose(kerneldef)

    # Automatically determine the range
    meta = get_meta(kerneldef)

    params = [
        Parameter(i, Bound(l, h)) for i, l, h in zip(
            meta.initial_val, meta.lower_bound, meta.upper_bound)
    ]

    def criterion(*theta):
        K = cov_fn(X, X, theta, True)  # learn with noise!
        factors = np.linalg.svd(K)
        value = opt_criterion(y, factors)
        if verbose:
            log.info("[{0}] {1}".format(value, theta))
        return value

    # up to here
    nmin = structured_minimizer(minimize)
    result = nmin(criterion,
                  params,
                  tol=ftol,
                  options={'maxiter': maxiter},
                  jac=False,
                  method='L-BFGS-B')
    print(result)
    return result.x
Beispiel #15
0
def test_regularizer(make_gaus_data):

    X, _, _, _ = make_gaus_data
    N, d = X.shape
    nbases1, nbases2 = 10, 5

    # Single basis
    base = bs.LinearBasis(regularizer=Parameter(2, Positive()))
    diag, slices = base.regularizer_diagonal(X)
    assert base.regularizer.value == 2
    assert all(diag == np.full(d, 2))
    assert slices == slice(None)

    # Basis cat
    base += bs.RandomRBF(Xdim=d, nbases=nbases1) \
        + bs.RandomMatern32(Xdim=d, nbases=nbases2)
    dims = np.cumsum([0, d, 2 * nbases1, 2 * nbases2])
    diag, slices = base.regularizer_diagonal(X)
    for db, de, s in zip(dims[:-1], dims[1:], slices):
        assert s == slice(db, de)
Beispiel #16
0
def test_simple_concat(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    base = bs.LinearBasis(onescol=False) + bs.LinearBasis(onescol=False)
    P = base.transform(X)

    assert np.allclose(P, np.hstack((X, X)))

    base += bs.RadialBasis(centres=X)
    P = base.transform(X, 1.)

    assert P.shape == (N, d * 2 + N)

    D = 200
    base += bs.RandomRBF(nbases=D,
                         Xdim=d,
                         lenscale_init=Parameter(np.ones(d), Positive()))
    P = base.transform(X, 1., np.ones(d))

    assert P.shape == (N, (D + d) * 2 + N)
Beispiel #17
0
    def __init__(self,
                 kernel='rbf',
                 nbases=50,
                 lenscale=1.,
                 var=1.,
                 regulariser=1.,
                 ard=True,
                 maxiter=3000,
                 batch_size=10,
                 alpha=0.01,
                 beta1=0.9,
                 beta2=0.99,
                 epsilon=1e-8,
                 random_state=None,
                 nstarts=500):

        super().__init__(likelihood=Gaussian(Parameter(var, Positive())),
                         basis=None,
                         maxiter=maxiter,
                         batch_size=batch_size,
                         updater=Adam(alpha, beta1, beta2, epsilon),
                         random_state=random_state,
                         nstarts=nstarts)
        self._store_params(kernel, regulariser, nbases, lenscale, ard)
Beispiel #18
0
def test_grad_concat(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape

    base = bs.LinearBasis(onescol=False) + bs.LinearBasis(onescol=False)

    assert list(base.grad(X)) == []

    base += bs.RadialBasis(centres=X)

    G = base.grad(X, 1.)

    assert list(G)[0].shape == (N, N + 2 * d)

    D = 200
    base += bs.RandomRBF(nbases=D,
                         Xdim=d,
                         lenscale_init=Parameter(np.ones(d), Positive()))
    G = base.grad(X, 1., np.ones(d))
    dims = [(N, N + (D + d) * 2), (N, N + (D + d) * 2, d)]

    for g, d in zip(G, dims):
        assert g.shape == d
Beispiel #19
0
# Log output to the terminal attached to this notebook
logging.basicConfig(level=logging.INFO)

# Load the data
boston = load_boston()
X = boston.data
y = boston.target - boston.target.mean()

folds = 5
(tr_ind, ts_ind) = list(KFold(len(y), n_folds=folds, shuffle=True))[0]

# Make Basis and Likelihood
N, D = X.shape
lenscale = 10.
nbases = 50
lenARD = lenscale * np.ones(D)
lenscale_init = Parameter(lenARD, Positive())
base = LinearBasis(onescol=True) + RandomMatern32(
    Xdim=D, nbases=nbases, lenscale_init=lenscale_init)
like = Gaussian()

# Fit and predict the model
glm = GeneralisedLinearModel(like, base, maxiter=6000)
glm.fit(X[tr_ind], y[tr_ind])
Ey, Vy = glm.predict_moments(X[ts_ind])

# Score
y_true = y[ts_ind]
print("SMSE = {}, MSLL = {}".format(smse(y_true, Ey),
                                    msll(y_true, Ey, Vy, y[tr_ind])))
Beispiel #20
0
    def __init__(self, lenscale=1., var_init=Parameter(1., Positive())):

        self.params = var_init
        self.gaus = Gaussian(var_init)
        self.unif = UnifGauss(lenscale)
Beispiel #21
0
 def get_var(var):
     if isinstance(var, float):
         var = gamma(a=var, scale=1)  # Initial target noise
         var = Parameter(var, Positive())
     return var
Beispiel #22
0
 def get_regularizer(regularizer):
     if isinstance(regularizer, float):
         reg = gamma(a=regularizer, scale=1)  # Initial weight prior
         regularizer = Parameter(reg, Positive())
     return regularizer
Beispiel #23
0
def test_bases(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape
    nC = 10

    bases = [
        bs.BiasBasis(),
        bs.LinearBasis(onescol=True),
        bs.PolynomialBasis(order=2),
        bs.RadialBasis(centres=X[:nC, :]),
        bs.RadialBasis(centres=X[:nC, :],
                       lenscale_init=Parameter(np.ones(d), Positive())),
        bs.SigmoidalBasis(centres=X[:nC, :]),
        bs.SigmoidalBasis(centres=X[:nC, :],
                          lenscale_init=Parameter(np.ones(d), Positive())),
        bs.RandomRBF(Xdim=d, nbases=10),
        bs.RandomRBF(Xdim=d,
                     nbases=10,
                     lenscale_init=Parameter(np.ones(d), Positive())),
        bs.FastFoodRBF(Xdim=d, nbases=10),
        bs.FastFoodRBF(Xdim=d,
                       nbases=10,
                       lenscale_init=Parameter(np.ones(d), Positive())),
        bs.FastFoodGM(Xdim=d, nbases=10),
        bs.FastFoodGM(Xdim=d,
                      nbases=10,
                      mean_init=Parameter(np.zeros(d), Bound()),
                      lenscale_init=Parameter(np.ones(d), Positive())),
    ]

    hypers = [(), (), (), (1., ), (np.ones(d), ), (1., ), (np.ones(d), ),
              (1., ), (np.ones(d), ), (1., ), (np.ones(d), ),
              (np.ones(d), np.ones(d)), (np.ones(d), np.ones(d))]

    for b, h in zip(bases, hypers):
        P = b.transform(X, *h)
        dP = b.grad(X, *h)

        assert P.shape[0] == N
        if not issequence(dP):
            assert dP.shape[0] == N if not isinstance(dP, list) else dP == []
        else:
            for dp in dP:
                assert dp.shape[0] == N
        assert P.ndim == 2

    bcat = reduce(add, bases)
    hyps = []
    for h in hypers:
        hyps.extend(list(h))
    P = bcat.transform(X, *hyps)
    dP = bcat.grad(X, *hyps)

    assert bcat.get_dim(X) == P.shape[1]
    assert P.shape[0] == N
    assert P.ndim == 2
    for dp in dP:
        if not issequence(dP):
            assert dP.shape[0] == N if not isinstance(dP, list) else dP == []
        else:
            for dp in dP:
                assert dp.shape[0] == N