Exemple #1
0
def train_bnn(data='expx', n_data=50, n_samples=20, arch=[1,20,1],
              prior_params=None, prior_type=None, act='rbf',
              iters=300, lr=0.01, plot=True, save=False):

    if type(data) == str:
        inputs, targets = build_toy_dataset(data=data, n_data=n_data)
    else:
        inputs, targets = data

    if plot: fig, ax = p.setup_plot()

    init_params= init_var_params(arch)

    def loss(params, t):
        return vlb_objective(params, inputs, targets, arch, n_samples, act=act,
                             prior_params=prior_params, prior_type=prior_type)



    def callback(params, t, g):
        plot_inputs = np.linspace(-10, 10, num=500)[:, None]

        f_bnn = sample_bnn(params, plot_inputs, 5, arch, act)
        #print(params[1])
        # Plot data and functions.
        p.plot_iter(ax, inputs, plot_inputs, targets, f_bnn)
        print("ITER {} | LOSS {}".format(t, -loss(params, t)))

    var_params = adam(grad(loss),init_params ,
                      step_size=lr, num_iters=iters, callback=callback)
Exemple #2
0
def train_gp(D=1, data='xsinx', n_data=5):

    num_params = D + 3  # mean , 2 kernel params, noise
    params = 0.1 * rs.randn(num_params)

    X, y = build_toy_dataset(data, n_data)
    y = y.ravel()
    D = X, y[:, None]

    fig, ax = plotting.setup_plot()

    x_plot = np.reshape(np.linspace(-8, 8, 400), (400, 1))
    pred_mean, pred_cov = predict(params, X, y,
                                  x_plot)  # shapes [N_data], [N_data, N_data]
    std = np.sqrt(np.diag(pred_cov))  # shape [N_data]
    ax.plot(x_plot, pred_mean, 'b')
    p = sample_functions(params, X, y, x_plot, 3)  # [nf, ]
    ax.plot(x_plot, p)
    ax.plot(X.ravel(), y.ravel(), '.')
    plt.show()
Exemple #3
0
def train_bnn(data='expx', n_data=20, n_samples=5, arch=[1,20,20,1],
              prior_params=None, prior_type=None, act='rbf',
              iters=65, lr=0.07, plot=True, save=False):

    if type(data) == str:
        inputs, targets = build_toy_dataset()
    else:
        inputs, targets = data

    if plot: fig, ax = p.setup_plot()

    def loss(params, t):
        return vlb_objective(params, inputs, targets, arch, n_samples, act=act,
                             prior_params=prior_params, prior_type=prior_type)

    def callback(params, t, g):
        plot_inputs = np.linspace(-8, 8, num=400)[:, None]
        f_bnn = sample_bnn(params, plot_inputs, 5, arch, act)

        # Plot data and functions.
        p.plot_iter(ax, inputs, plot_inputs, targets, f_bnn)
        print("ITER {} | LOSS {}".format(t, -loss(params, t)))
        if t > 50:
            D = inputs, targets
            x_plot = np.reshape(np.linspace(-8, 8, 400), (400, 1))
            pred = sample_bnn(params, x_plot, 5, arch, act)
            p.plot_deciles(x_plot.ravel(), pred.T, D, str(t) + "bnnpostfullprior", plot="gpp")

    var_params = adam(grad(loss), init_var_params(arch),
                      step_size=lr, num_iters=iters, callback=callback)


    D = inputs, targets
    x_plot = np.reshape(np.linspace(-8, 8, 400), (400, 1))
    pred = sample_bnn(var_params, x_plot, 5, arch, act)
    p.plot_deciles(x_plot.ravel(), pred.T, D,"bnnpostfullprior", plot="gpp")
Exemple #4
0
    mean = a_samples * (fs - m) / unbiased(fs)

    log_qy = diag_gaussian_log_density(y, mean, noise)
    log_qa = mvn.logpdf(a_samples, qa_mean, qa_Sigma)
    log_pa = diag_gaussian_log_density(a_samples, 0, 1)

    return np.mean(log_qy - log_qa + log_pa)


if __name__ == '__main__':

    arch = [1, 20, 20, 1]
    activation = 'rbf'
    num_fncs = 20
    noise = 0.1
    inputs, targets = build_toy_dataset(data='cubic', n_data=70)

    fig = plt.figure(facecolor='white')
    ax = fig.add_subplot(111)
    plt.ion()
    plt.show(block=False)

    def objective(prior_params, qa_params, t):
        return -elbo(prior_params, qa_params, inputs, targets, num_fncs, arch,
                     activation, noise)

    def callback(params, t, g):

        plot_inputs = np.linspace(-8, 8, num=400).reshape(400, 1)
        f_bnn = predictions_qa(params, inputs, targets, noise, num_fncs, arch,
                               activation)
Exemple #5
0
    exp_num = 2
    n_data = 70
    iters = 5
    data = "expx"
    samples = 5
    save_plots = True
    plot_during = False
    rs = npr.RandomState(0)
    mvnorm = rs.multivariate_normal
    save_title = "exp-" + str(exp_num)+data + "-posterior samples {}".format(samples)
    save_dir = os.path.join(os.getcwd(), 'plots', 'gp', save_title)

    num_params, predict, log_marginal_likelihood, sample_f = \
        make_gp_funs(rbf_covariance, num_cov_params=D + 1)

    X, y = build_toy_dataset(data, n_data)
    y = y.ravel()

    objective = lambda params, t: log_marginal_likelihood(params, X, y)

    if plot_during:
        fig = plt.figure(figsize=(12,8), facecolor='white')
        ax = fig.add_subplot(111, frameon=False)
        plt.show(block=False)

    def callback(params, t, g):
        print("iteration {} Log likelihood {}".format(t, objective(params, t)))

        if plot_during:
            plt.cla()
            x_plot = np.reshape(np.linspace(-8, 8, 400), (400, 1))
Exemple #6
0
if __name__ == '__main__':

    D = 1
    exp_num = 2
    n_data = 70
    iters = 20
    data = "expx"
    samples = 5
    save_plots = True
    plot_during = True
    rs = npr.RandomState(0)
    mvnorm = rs.multivariate_normal

    num_params = D + 3  # mean , 2 kernel params, noise

    X, y = build_toy_dataset()
    y = y.ravel()

    objective = lambda params, t: -log_marginal_likelihood(params, X, y)

    if plot_during:
        fig = plt.figure(figsize=(12, 8), facecolor='white')
        ax = fig.add_subplot(111, frameon=False)
        plt.show(block=False)

    def callback(params, t, g):
        print("iteration {} Log likelihood {}".format(t, objective(params, t)))

        if plot_during:
            plt.cla()
            x_plot = np.reshape(np.linspace(-8, 8, 400), (400, 1))
Exemple #7
0
        if save_during:
            title = " iter {} kl {:5}".format(iter, kl_val)
            plotting.plot_priors(plot_inputs, fs,
                                 os.path.join(save_dir, title))

        print("Iteration {} KL {} ".format(iter, kl_val))

    # ----------------------------------------------------------------
    # Initialize the variational prior params (phi) HERE for q(w|phi)

    init_var_params = init_bnn_params(num_weights, scale=scale)

    # ---------------------- MINIMIZE THE KL --------------------------

    prior_params = adam(grad_kl,
                        init_var_params,
                        step_size=step,
                        num_iters=iters_1,
                        callback=callback_kl)

    # --------------------- MINIMIZE THE VLB -----------------------------------

    # Set up
    data = 'xsinx'  # or expx or cosx
    iters_2 = 100
    N_data = 70
    inputs, targets = build_toy_dataset(data, n_data=N_data)

    min_vlb = True