Beispiel #1
0
def get_static_surrogate(D):
    N = 200
    X = np.random.randn(N, D)
    est = KernelExpLiteGaussian(sigma=1, lmbda=.1, D=D, N=N)
    est.fit(X)
    
    return est
Beispiel #2
0
def get_static_surrogate(D):
    N = 200
    X = np.random.randn(N, D)
    est = KernelExpLiteGaussian(sigma=1, lmbda=.1, D=D, N=N)
    est.fit(X)

    return est
def test_third_order_derivative_tensor_execute():
    if not theano_available:
        raise SkipTest("Theano not available.")
    sigma = 1.
    lmbda = 1.
    N = 100
    D = 2
    X = np.random.randn(N, D)
    
    est = KernelExpLiteGaussian(sigma, lmbda, D, N)
    est.fit(X)
    est.third_order_derivative_tensor(X[0])
def test_hessian_execute():
    if not theano_available:
        raise SkipTest("Theano not available.")
    sigma = 1.
    lmbda = 1.
    N = 100
    D = 2
    X = np.random.randn(N, D)
    
    est = KernelExpLiteGaussian(sigma, lmbda, D, N)
    est.fit(X)
    est.hessian(X[0])
Beispiel #5
0
class KernelExpLiteGaussianSurrogate(StaticSurrogate):
    def __init__(self, ndim, sigma, lmbda, N):
        self.surrogate = KernelExpLiteGaussian(sigma=sigma,
                                               lmbda=lmbda,
                                               N=N,
                                               D=ndim)

    def train(self, samples):
        self.surrogate.fit(samples)

    def log_pdf_gradient(self, x):
        return self.surrogate.grad(x)
Beispiel #6
0
def get_kmc_static_kernel():
    num_steps_min, num_steps_max, step_size_min, step_size_max = get_hmc_parameters()
    target, momentum = get_target_momentum()

    N = 200
    X = np.random.randn(N, momentum.D)
    est = KernelExpLiteGaussian(sigma=1, lmbda=.1, D=momentum.D, N=N)
    est.fit(X)
    
    surrogate = get_static_surrogate(momentum.D)
    kmc = KMCStatic(surrogate, target, momentum, num_steps_min, num_steps_max, step_size_min, step_size_max)
    
    return kmc
Beispiel #7
0
    def __init__(self,
                 sigma,
                 lmbda,
                 D,
                 N,
                 eta=0.1,
                 cg_tol=1e-3,
                 cg_maxiter=None):
        KernelExpLiteGaussian.__init__(self, sigma, lmbda, D, N)

        self.eta = eta
        self.cg_tol = cg_tol
        self.cg_maxiter = cg_maxiter
Beispiel #8
0
def get_kmc_static_kernel():
    num_steps_min, num_steps_max, step_size_min, step_size_max = get_hmc_parameters(
    )
    target, momentum = get_target_momentum()

    N = 200
    X = np.random.randn(N, momentum.D)
    est = KernelExpLiteGaussian(sigma=1, lmbda=.1, D=momentum.D, N=N)
    est.fit(X)

    surrogate = get_static_surrogate(momentum.D)
    kmc = KMCStatic(surrogate, target, momentum, num_steps_min, num_steps_max,
                    step_size_min, step_size_max)

    return kmc
def test_third_order_derivative_tensor_execute():
    if not theano_available:
        raise SkipTest("Theano not available.")
    sigma = 1.
    lmbda = 1.
    N = 100
    D = 2
    X = np.random.randn(N, D)

    est = KernelExpLiteGaussian(sigma, lmbda, D, N)
    est.fit(X)
    est.third_order_derivative_tensor(X[0])
def test_hessian_execute():
    if not theano_available:
        raise SkipTest("Theano not available.")
    sigma = 1.
    lmbda = 1.
    N = 100
    D = 2
    X = np.random.randn(N, D)

    est = KernelExpLiteGaussian(sigma, lmbda, D, N)
    est.fit(X)
    est.hessian(X[0])
    estimator, based on a Bayesian optimisation black-box optimiser.
    Note that this optimiser can be "hot-started", i.e. it can be reset, but using
    the previous model as initialiser for the new optimisation, which is useful
    when the objective function changes slightly, e.g. when a new data was added
    to the kernel exponential family model.
    """
    N = 200
    D = 2

    # fit model to samples from a standard Gaussian
    X = np.random.randn(N, D)

    # use any of the below models, might have to change parameter bounds
    estimators = [
        KernelExpFiniteGaussian(sigma=1., lmbda=1., m=N, D=D),
        KernelExpLiteGaussian(sigma=1., lmbda=.001, D=D, N=N),
    ]

    for est in estimators:
        print(est.__class__.__name__)

        est.fit(X)

        # specify bounds of parameters to search for
        param_bounds = {
            #             'lmbda': [-5,0], # fixed lmbda, uncomment to include in search
            'sigma': [-2, 3],
        }

        # oop interface for optimising and using results
        # objective is not put through log here, if it is, might want to bound away from zero
Beispiel #12
0
 def _instantiate(self, D):
     return KernelExpLiteGaussian(self.sigma, self.lmbda, D, self.m,
                                  reg_f_norm=True, reg_alpha_norm=True)
Beispiel #13
0
 def __init__(self, ndim, sigma, lmbda, N):
     self.surrogate = KernelExpLiteGaussian(sigma=sigma,
                                            lmbda=lmbda,
                                            N=N,
                                            D=ndim)
    # for D=2, the fitted log-density is plotted, otherwise trajectory only
    D = 2
    N = 1000

    # target is banana density, fallback to Gaussian if theano is not present
    if banana_available:
        target = Banana(D=D)
        X = sample_banana(N, D)
    else:
        target = IsotropicZeroMeanGaussian(D=D)
        X = sample_gaussian(N=N)

    # plot trajectories for both KMC lite and finite, parameters are chosen for D=2
    for surrogate in [
            KernelExpFiniteGaussian(sigma=2, lmbda=0.001, m=N, D=D),
            KernelExpLiteGaussian(sigma=20., lmbda=0.001, D=D, N=N),
            KernelExpLiteGaussianLowRank(sigma=20,
                                         lmbda=0.1,
                                         D=D,
                                         N=N,
                                         cg_tol=0.01),
    ]:
        # try uncommenting this line to illustrate KMC's ability to mix even
        # when no (or incomplete) samples from the target are available
        surrogate.fit(X)

        # HMC parameters, fixed here, use oracle mean variance to set momentum
        momentum = IsotropicZeroMeanGaussian(D=D,
                                             sigma=np.sqrt(
                                                 np.mean(np.var(X, 0))))
        num_steps_min = 10
 # fit model to samples from a standard Gaussian
 X = np.random.randn(N, D)
 
 
 # create grid over sigma parameters, fixed regulariser
 log_sigmas = np.linspace(-5, 10, 20)
 lmbda = 0.001
 
 # evaluate objective function over all those parameters
 O = np.zeros(len(log_sigmas))
 O_lower = np.zeros(len(log_sigmas))
 O_upper = np.zeros(len(log_sigmas))
 
 # grid search
 for i, sigma in enumerate(log_sigmas):
     est = KernelExpLiteGaussian(np.exp(sigma), lmbda, D, N)
     
     # this is an array num_repetitions x num_folds, each containing a objective
     xval_result = est.xvalidate_objective(X, num_folds=5, num_repetitions=2)
     O[i] = np.mean(xval_result)
     O_lower[i] = np.percentile(xval_result, 10)
     O_upper[i] = np.percentile(xval_result, 90)
 
 # best parameter
 best_log_sigma = log_sigmas[np.argmin(O)]
 
 # visualisation
 plt.figure()
 plt.plot([best_log_sigma, best_log_sigma], [np.min(O), np.max(O)], 'r')
 plt.plot(log_sigmas, O, 'b-')
 plt.plot(log_sigmas, O_lower, 'b--')
Beispiel #16
0
def get_KernelExpLiteGaussian_instance(D, N):
    # arbitrary choice of parameters here
    sigma = 1.
    lmbda = 0.01
    return KernelExpLiteGaussian(sigma, lmbda, D, N)
    def __init__(self, sigma, lmbda, D, N, eta=0.1, cg_tol=1e-3, cg_maxiter=None):
        KernelExpLiteGaussian.__init__(self, sigma, lmbda, D, N)

        self.eta = eta
        self.cg_tol = cg_tol
        self.cg_maxiter = cg_maxiter
Beispiel #18
0
    sigma = np.median(squareform(pdist(X))**2) / np.log(N + 1.0) * 2
    M = 200
    if N < M:
        start_samples = np.tile(
            X, [int(M / N) + 1, 1])[:M] + np.random.randn(M, 2) * 2
    else:
        start_samples = X[:M] + np.random.randn(M, 2) * 2
    #start_samples[:, 0] *= 4; start_samples[:, 1] *= 2

    # plot trajectories for both KMC lite and finite, parameters are chosen for D=2
    results = []
    num_steps = 2000
    step_size = 0.1

    for surrogate in [
            KernelExpLiteGaussian(sigma=25 * sigma, lmbda=0.01, D=D, N=N),
            KernelExpStein(sigma=16 * sigma, lmbda=0.01, D=D, N=N),
            KernelExpSteinNonparam(sigma=9 * sigma, lmbda=0.01, D=D, N=N)
    ]:
        surrogate.fit(X)

        # HMC parameters
        momentum = IsotropicZeroMeanGaussian(D=D, sigma=1.0)

        # kmc sampler instance
        kmc = KMCStatic(surrogate, target, momentum, num_steps, num_steps,
                        step_size, step_size)

        # simulate trajectory from starting point, note _proposal_trajectory is a "hidden" method
        Qs_total = []
        acc_probs_total = []
Beispiel #19
0
def get_instace_KernelExpLiteGaussian(N):
    sigma = 2.
    lmbda = 1.
    D = 2
    return KernelExpLiteGaussian(sigma, lmbda, D, N)
 Ns_fit = np.array([5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2000, 5000])
 
 
 sigma = 1
 lmbda = 0.01
 
 grad = lambda x: est.grad(np.array([x]))[0]
 s =  GaussianQuadraticTest(grad)
 num_bootstrap = 200
 
 result_fname = os.path.splitext(os.path.basename(__file__))[0] + ".txt"
 
 num_repetitions = 150
 for _ in range(num_repetitions):
     for N in Ns_fit:
         est = KernelExpLiteGaussian(sigma, lmbda, D, N)
         X_test = np.random.randn(N_test, D)
         
         X = np.random.randn(N, D)
         est.fit(X)
         
         U_matrix, stat = s.get_statistic_multiple(X_test[:,0])
     
         bootsraped_stats = np.empty(num_bootstrap)
         for i in range(num_bootstrap):
             W = np.sign(np.random.randn(N_test))
             WW = np.outer(W, W)
             st = np.mean(U_matrix * WW)
             bootsraped_stats[i] = N_test * st
         
         p_value = np.mean(bootsraped_stats>stat)