Exemple #1
0
def test_isotropic_zero_mean_equals_log_gaussian_pdf():
    D = 2
    x = np.random.randn(D)
    g = IsotropicZeroMeanGaussian(sigma=np.sqrt(2))
    log_pdf = log_gaussian_pdf(x,
                               mu=np.zeros(D),
                               Sigma=np.eye(D) * 2,
                               is_cholesky=False,
                               compute_grad=False)
    assert_close(log_pdf, g.log_pdf(x))
def habc_generator(num_warmup, thin_step):
    D = 10

    step_size_min = 0.01
    step_size_max = .1
    num_steps_min = 50
    num_steps_max = 50
    sigma_p = 1.

    momentum_seed = np.random.randint(time.time())

    momentum = IsotropicZeroMeanGaussian(sigma=sigma_p, D=D)

    abc_target = ABCSkewNormalPosterior(theta_true=np.ones(D) * 10)
    start = abc_target.theta_true

    job = HABCJob(abc_target, momentum, num_iterations, start, num_steps_min,
                  num_steps_max, step_size_min, step_size_max, momentum_seed,
                  statistics, num_warmup, thin_step)

    job.walltime = 60 * 60

    # store results in home dir straight away
    d = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1]) + os.sep
    job.aggregator = MCMCJobResultAggregatorStoreHome(d)

    return job
def kmc_generator(num_warmup, thin_step):
    D = 10

    step_size_min = 0.01
    step_size_max = .1
    num_steps_min = 50
    num_steps_max = 50
    sigma_p = 1.

    momentum_seed = np.random.randint(time.time())

    momentum = IsotropicZeroMeanGaussian(sigma=sigma_p, D=D)

    abc_target = ABCSkewNormalPosterior(theta_true=np.ones(D) * 10)
    start = abc_target.theta_true

    Z = np.load("../ground_truth/benchmark_samples.arr")[:1000]
    learn_parameters = False
    force_relearn_parameters = False

    lmbda = 1.
    sigma = 2**4
    if False and True:
        sigma = select_sigma_grid(Z)
        select_sigma_lambda_cma(Z,
                                num_folds=5,
                                num_repetitions=1,
                                sigma0=0.31,
                                lmbda0=1.)
        exit()

    logger.info("Using sigma=%.6f" % sigma)

    job = KMCLiteJob(Z, sigma, lmbda, abc_target, momentum, num_iterations,
                     start, num_steps_min, num_steps_max, step_size_min,
                     step_size_max, momentum_seed, learn_parameters,
                     force_relearn_parameters, statistics, num_warmup,
                     thin_step)

    # marginal sampler
    job.recompute_log_pdf = True

    job.walltime = 60 * 60

    # store results in home dir straight away
    d = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1]) + os.sep
    job.aggregator = MCMCJobResultAggregatorStoreHome(d)

    return job
Exemple #4
0
def kmc_generator(num_warmup, thin_step):
    D = 9
    start = np.random.randn(D) * 0

    step_size_min = 0.02
    step_size_max = 0.2
    num_steps_min = 50
    num_steps_max = 100
    sigma_p = 1.

    momentum_seed = np.random.randint(time.time())

    momentum = IsotropicZeroMeanGaussian(sigma=sigma_p, D=D)

    # estimator parameters (comments are x-validation scores on benchmark samples for lmbda = 0.000001 and m=1000)
    #     sigma = 0.5 # -10.5821369355, -10.621941424
    sigma = 0.6  # -11.1362549964, -11.2676576859, -11.0283705786
    #     sigma = 0.65 # -11.2538670706, -11.247122781, -10.9686599646
    #     sigma = 0.7 # -11.052076379, -11.1454946033, -10.7527806709
    #     sigma = 0.8 # -10.2950464716, -10.6814600267
    lmbda = 0.000001

    target = GlassPosterior()
    m = 1000
    Z = np.load("benchmark_samples.arr")[:m]
    learn_parameters = False
    force_relearn_parameters = False
    job = KMCRandomFeatsJob(Z, m, sigma, lmbda, target, momentum,
                            num_iterations, start, num_steps_min,
                            num_steps_max, step_size_min, step_size_max,
                            momentum_seed, learn_parameters,
                            force_relearn_parameters, statistics, num_warmup,
                            thin_step)

    job.walltime = 60 * 60

    # store results in home dir straight away
    d = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1]) + os.sep
    job.aggregator = MCMCJobResultAggregatorStoreHome(d)

    return job
def kmc_generator(N, D, target, num_warmup, thin_step, momentum_seed):
    if D == 2:
        step_size_min = 0.8
        step_size_max = 1.5
    elif D == 8:
        step_size_min = 0.6
        step_size_max = 1.3

    start = get_start(D)
    momentum = IsotropicZeroMeanGaussian(sigma=sigma_p, D=D)

    # estimator parameters
    sigma = 0.46
    lmbda = 0.000001

    learn_parameters = True if N < 500 else False
    force_relearn_parameters = True if N < 500 else False

    # oracle samples
    Z = sample_banana(N, D, bananicity, V)
    job = KMCRandomFeatsJob(Z,
                            N,
                            sigma,
                            lmbda,
                            target,
                            momentum,
                            num_iterations,
                            start,
                            num_steps_min,
                            num_steps_max,
                            step_size_min,
                            step_size_max,
                            momentum_seed,
                            learn_parameters=learn_parameters,
                            force_relearn_parameters=force_relearn_parameters,
                            statistics=statistics,
                            num_warmup=num_warmup,
                            thin_step=thin_step)
    job.plot = False
    return job
def kmc_generator(num_warmup, thin_step):
    D = 9
    start = np.random.randn(D) * 0

    step_size_min = 0.01
    step_size_max = 0.1
    num_steps_min = 1
    num_steps_max = 10
    sigma_p = 1.

    momentum_seed = np.random.randint(time.time())

    momentum = IsotropicZeroMeanGaussian(sigma=sigma_p, D=D)

    target = GlassPosterior()
    Z = np.load("../ground_truth/benchmark_samples.arr")[:1000]
    learn_parameters = False
    force_relearn_parameters = False

    lmbda = 1.
    if False:
        sigma = select_sigma_grid(Z, lmbda=lmbda, plot_surface=True)

    sigma = 2**4

    job = KMCLiteJob(Z, sigma, lmbda, target, momentum, num_iterations, start,
                     num_steps_min, num_steps_max, step_size_min,
                     step_size_max, momentum_seed, learn_parameters,
                     force_relearn_parameters, statistics, num_warmup,
                     thin_step)

    job.walltime = 60 * 60

    # store results in home dir straight away
    d = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1]) + os.sep
    job.aggregator = MCMCJobResultAggregatorStoreHome(d)

    return job
def hmc_generator(D, target, num_warmup, thin_step, momentum_seed):
    # determined by pilot runs
    if D == 2:
        step_size_min = 0.8
        step_size_max = 1.5
    elif D == 8:
        step_size_min = 0.6
        step_size_max = 1.3

    start = get_start(D)
    momentum = IsotropicZeroMeanGaussian(sigma=sigma_p, D=D)
    return HMCJob(target,
                  momentum,
                  num_iterations,
                  start,
                  num_steps_min,
                  num_steps_max,
                  step_size_min,
                  step_size_max,
                  momentum_seed,
                  statistics=statistics,
                  num_warmup=num_warmup,
                  thin_step=thin_step)
Exemple #8
0
def kmc_generator(N, D, target, num_warmup, thin_step, momentum_seed):
    if D == 2:
        step_size_min = 0.8
        step_size_max = 1.5
    elif D == 8:
        step_size_min = 0.6
        step_size_max = 1.3

    start = get_start(D)
    momentum = IsotropicZeroMeanGaussian(sigma=sigma_p, D=D)

    learn_parameters = False
    force_relearn_parameters = False

    # oracle samples
    Z = sample_banana(N, D, bananicity, V=100)

    # estimator parameters
    lmbda = 1.
    #     sigma = select_sigma_grid(Z, lmbda=lmbda, plot_surface=True)
    sigma = 50.80

    job = KMCLiteJob(Z,
                     sigma,
                     lmbda,
                     target,
                     momentum,
                     num_iterations,
                     start,
                     num_steps_min,
                     num_steps_max,
                     step_size_min,
                     step_size_max,
                     momentum_seed,
                     learn_parameters=learn_parameters,
                     force_relearn_parameters=force_relearn_parameters,
                     statistics=statistics,
                     num_warmup=num_warmup,
                     thin_step=thin_step)
    #     job.set_up()
    #
    #
    #     res = 50
    #     Xs = np.linspace(-15, 15,res)
    #     Ys = np.linspace(-10, 5,res)
    #
    #     # evaluate density and estimate
    #     D1=0
    #     D2=1
    #     def dummy_grad(X_2d):
    #         theta = start.copy()
    # #                 theta = np.mean(self.Z, 0)
    #         theta[D1]=X_2d[0]
    #         theta[D2]=X_2d[1]
    #         return job.target.grad(theta)
    #
    #     def dummy(X_2d):
    #         theta = start.copy()
    # #                 theta = np.mean(self.Z, 0)
    #         theta[D1]=X_2d[0]
    #         theta[D2]=X_2d[1]
    #         return job.target.log_pdf(theta)
    #
    #     import matplotlib.pyplot as plt
    #     plt.figure()
    #     G = evaluate_density_grid(Xs, Ys, dummy)
    #     plot_array(Xs, Ys, G)
    #     plt.plot(Z[:,D1], Z[:,D2], '.')
    #     plt.plot(start[D1], start[D2], 'b*', markersize=15)
    #
    #     plt.figure()
    #     G_norm, U, V, X, Y = evaluate_gradient_grid(Xs, Ys, dummy_grad)
    #     plot_array(Xs, Ys, G_norm)
    #     plt.plot(Z[:,D1], Z[:,D2], '.')
    #     plt.quiver(X, Y, U, V, color='m')
    #     plt.show()

    job.plot = False
    return job