Exemple #1
0
    N_weights_enc, encoder, encoder_log_like = make_gaussian_nn(enc_layers)
    N_weights_dec, decoder, decoder_log_like = make_binary_nn(dec_layers)

    # run_aevb(train_images)
    # with open('parameters.pkl') as f:
    #     parameters = pickle.load(f)

    with open('mnist_models.pkl') as f:
        trained_weights, all_mean, all_cov = pickle.load(f)

    banded_cov = create_banded_cov(all_cov.shape[0], 100)
    # Build likelihood model.
    L2_reg = 1
    layer_sizes = [784, 200, 100, 10]
    num_weights, make_predictions, likelihood = make_classification_nn(
        layer_sizes)
    classifier_loglik = lambda image, c: make_predictions(
        trained_weights, np.atleast_2d(image))[:, c]

    image_prior = build_logprob_mvn(all_mean, banded_cov)
    # Combine prior and likelihood.
    model_ll = lambda image, c: image_prior(image) + classifier_loglik(
        image, c)

    def model_nll(image, c):
        return -1 * model_ll(image, c)

    model_nll_with_grad = value_and_grad(model_nll)

    # Optimize a random image to maximize this likelihood.
    cur_class = 1
Exemple #2
0

if __name__ == '__main__':
    # load_and_pickle_binary_mnist()
    with open('../../../autopaint/mnist_binary_data.pkl') as f:
        N_data, train_images, train_labels, test_images, test_labels = pickle.load(
            f)

    # Create aevb function
    # Training parameters

    D = train_images.shape[1]

    enc_layers = [D, hidden_units, 2 * latent_dimensions]
    dec_layers = [latent_dimensions, hidden_units, D]

    N_weights_enc, encoder, encoder_log_like = make_gaussian_nn(enc_layers)

    with open('mnist_models.pkl') as f:
        trained_weights, all_mean, all_cov = pickle.load(f)

# Build likelihood model.
    L2_reg = 1
    layer_sizes = [784, 200, 100, 10]
    N_weights_dec, decoder, decoder_log_like = make_classification_nn(
        layer_sizes)

    decoder = run_variational_network(train_labels, N_weights_dec, decoder,
                                      decoder_log_like, trained_weights,
                                      all_mean)