예제 #1
0
def test_structured_params(make_quadratic, make_random):

    random = make_random
    a, b, c, data, _ = make_quadratic
    w0 = [
        Parameter(random.randn(2), Bound()),
        Parameter(random.randn(1), Bound())
    ]

    qobj_struc = lambda w12, w3, data: q_struc(w12, w3, data, qobj)
    assert_opt = lambda Eab, Ec: \
        np.allclose((a, b, c), (Eab[0], Eab[1], Ec), atol=1e-3, rtol=0)

    nmin = structured_minimizer(minimize)
    res = nmin(qobj_struc, w0, args=(data, ), jac=True, method='L-BFGS-B')
    assert_opt(*res.x)

    nsgd = structured_sgd(sgd)
    res = nsgd(qobj_struc, w0, data, eval_obj=True, random_state=make_random)
    assert_opt(*res.x)

    qf_struc = lambda w12, w3, data: q_struc(w12, w3, data, qfun)
    qg_struc = lambda w12, w3, data: q_struc(w12, w3, data, qgrad)
    res = nmin(qf_struc, w0, args=(data, ), jac=qg_struc, method='L-BFGS-B')
    assert_opt(*res.x)
예제 #2
0
def test_rand_start(make_quadratic, make_random):

    random = make_random
    a, b, c, data, _ = make_quadratic

    w0 = [Parameter(gamma(1), Positive(), shape=(2, )), Parameter(1., Bound())]

    qobj_struc = lambda w12, w3, data: q_struc(w12, w3, data, qobj)
    assert_opt = lambda Eab, Ec: \
        np.allclose((a, b, c), (Eab[0], Eab[1], Ec), atol=1e-3, rtol=0)

    nmin = structured_minimizer(logtrick_minimizer(minimize))
    res = nmin(qobj_struc,
               w0,
               args=(data, ),
               jac=True,
               method='L-BFGS-B',
               random_state=random,
               nstarts=100)
    assert_opt(*res.x)

    nsgd = structured_sgd(logtrick_sgd(sgd))
    res = nsgd(qobj_struc,
               w0,
               data,
               eval_obj=True,
               nstarts=100,
               random_state=random)
    assert_opt(*res.x)
예제 #3
0
def test_log_params(make_quadratic):

    a, b, c, data, _ = make_quadratic
    w0 = np.abs(np.random.randn(3))
    bounds = [Positive(), Bound(), Positive()]

    assert_opt = lambda Ea, Eb, Ec: \
        np.allclose((a, b, c), (Ea, Eb, Ec), atol=1e-3, rtol=0)

    nmin = logtrick_minimizer(minimize)
    res = nmin(qobj,
               w0,
               args=(data, ),
               jac=True,
               method='L-BFGS-B',
               bounds=bounds)
    assert_opt(*res.x)

    nsgd = logtrick_sgd(sgd)
    res = nsgd(qobj,
               w0,
               data,
               eval_obj=True,
               bounds=bounds,
               random_state=randstate)
    assert_opt(*res.x)

    nmin = logtrick_minimizer(minimize)
    res = nmin(qfun,
               w0,
               args=(data, ),
               jac=qgrad,
               method='L-BFGS-B',
               bounds=bounds)
    assert_opt(*res.x)
예제 #4
0
def learn(X,
          y,
          kerneldef,
          opt_criterion=None,
          verbose=False,
          ftol=1e-8,
          maxiter=10000):

    n, d = X.shape

    if opt_criterion is None:
        opt_criterion = criterions.negative_log_marginal_likelihood
    else:
        pass  # check type

    cov_fn = compose(kerneldef)

    # Automatically determine the range
    meta = get_meta(kerneldef)

    params = [
        Parameter(i, Bound(l, h)) for i, l, h in zip(
            meta.initial_val, meta.lower_bound, meta.upper_bound)
    ]

    def criterion(*theta):
        K = cov_fn(X, X, theta, True)  # learn with noise!
        factors = np.linalg.svd(K)
        value = opt_criterion(y, factors)
        if verbose:
            log.info("[{0}] {1}".format(value, theta))
        return value

    # up to here
    nmin = structured_minimizer(minimize)
    result = nmin(criterion,
                  params,
                  tol=ftol,
                  options={'maxiter': maxiter},
                  jac=False,
                  method='L-BFGS-B')
    print(result)
    return result.x
예제 #5
0
def test_bases(make_gaus_data):

    X, _, _ = make_gaus_data
    N, d = X.shape
    nC = 10

    bases = [
        bs.BiasBasis(),
        bs.LinearBasis(onescol=True),
        bs.PolynomialBasis(order=2),
        bs.RadialBasis(centres=X[:nC, :]),
        bs.RadialBasis(centres=X[:nC, :],
                       lenscale_init=Parameter(np.ones(d), Positive())),
        bs.SigmoidalBasis(centres=X[:nC, :]),
        bs.SigmoidalBasis(centres=X[:nC, :],
                          lenscale_init=Parameter(np.ones(d), Positive())),
        bs.RandomRBF(Xdim=d, nbases=10),
        bs.RandomRBF(Xdim=d,
                     nbases=10,
                     lenscale_init=Parameter(np.ones(d), Positive())),
        bs.FastFoodRBF(Xdim=d, nbases=10),
        bs.FastFoodRBF(Xdim=d,
                       nbases=10,
                       lenscale_init=Parameter(np.ones(d), Positive())),
        bs.FastFoodGM(Xdim=d, nbases=10),
        bs.FastFoodGM(Xdim=d,
                      nbases=10,
                      mean_init=Parameter(np.zeros(d), Bound()),
                      lenscale_init=Parameter(np.ones(d), Positive())),
    ]

    hypers = [(), (), (), (1., ), (np.ones(d), ), (1., ), (np.ones(d), ),
              (1., ), (np.ones(d), ), (1., ), (np.ones(d), ),
              (np.ones(d), np.ones(d)), (np.ones(d), np.ones(d))]

    for b, h in zip(bases, hypers):
        P = b.transform(X, *h)
        dP = b.grad(X, *h)

        assert P.shape[0] == N
        if not issequence(dP):
            assert dP.shape[0] == N if not isinstance(dP, list) else dP == []
        else:
            for dp in dP:
                assert dp.shape[0] == N
        assert P.ndim == 2

    bcat = reduce(add, bases)
    hyps = []
    for h in hypers:
        hyps.extend(list(h))
    P = bcat.transform(X, *hyps)
    dP = bcat.grad(X, *hyps)

    assert bcat.get_dim(X) == P.shape[1]
    assert P.shape[0] == N
    assert P.ndim == 2
    for dp in dP:
        if not issequence(dP):
            assert dP.shape[0] == N if not isinstance(dP, list) else dP == []
        else:
            for dp in dP:
                assert dp.shape[0] == N