Beispiel #1
0
    def callback(params, i, grad):
        ml = big_batch_value_and_grad(params, i)
        print "log marginal likelihood:", ml

        print "----- iter ", i
        if i % 1000 == 0 and not np.isnan(ml):
            print 'SAVING ==== '
            save_string = 'parameters10l300hfor' + str(i) + '.pkl'

            parameters = params, N_weights_enc, samples_per_image, latent_dimensions, rs
            print 'SAVING AS: ', save_string
            print 'LATENTS DIMS', latent_dimensions
            with open(save_string, 'w') as f:
                pickle.dump(parameters, f, 1)

        #Generate samples
        num_samples = 100
        images_per_row = 10
        zs = rs.randn(num_samples, latent_dimensions)
        samples = decoder(parser.get(params, 'decoding weights'), zs)[:, 0:784]
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')
        if i % 100 == 0:
            enc_w = params[0:N_weights_enc]
            dec_w = params[N_weights_enc:len(params)]
            plot_latent_centers(encoder, decoder, enc_w, dec_w, train)
Beispiel #2
0
    def callback(params, i, grad):
        n_iter = 0.0
        sum_ml = 0
        for j in xrange(0, 100):
            ml = batch_value_and_grad(params, j)
            print "---- log marginal likelihood:", ml
            n_iter += 1
            sum_ml += ml
            print '-------- avg_ml', sum_ml / n_iter

        kill
        #Print params
        print 'norm of stdev', np.linalg.norm(
            np.exp(parser.get(params, 'mean')))
        print 'stepsize', np.exp(parser.get(params, 'log_stepsize'))

        #Generate samples
        num_samples = 100
        images_per_row = 10
        zs = rs.randn(num_samples, latent_dimensions)
        # samples = np.random.binomial(1,decoder(parser.get(params, 'decoding weights'), zs))
        samples = decoder(parser.get(params, 'decoding weights'), zs)
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')
Beispiel #3
0
def plot_centers(train_images):
    im_clus = kmeans(10)
    im_clus.fit(train_images)
    centers = im_clus.cluster_centers_[:,0:784]
    fig = plt.figure(1)
    fig.clf()
    ax = fig.add_subplot(111)
    plot_images(centers, ax, ims_per_row=10)
    plt.savefig('centroid.png')
Beispiel #4
0
 def callback(weights, iter, grad):
     #Generate samples
     num_samples = 100
     zs = rs.randn(num_samples, latent_dimension)
     samples = decoder(combined_parser.get(weights, 'decoder weights'), zs)
     fig = plt.figure(1)
     fig.clf()
     ax = fig.add_subplot(111)
     plot_images(samples, ax, ims_per_row=10)
     plt.savefig('samples.png')
Beispiel #5
0
def plot_projected_centers(encoder, decoder, enc_w, dec_w):
    latent_images = encoder(enc_w, train_images)[0]
    im_clus = kmeans(10)
    im_clus.fit(latent_images)
    centers = im_clus.cluster_centers_
    im_cents = decoder(dec_w, centers)
    fig = plt.figure(1)
    fig.clf()
    ax = fig.add_subplot(111)
    plot_images(im_cents, ax, ims_per_row=10)
    plt.savefig('centroid.png')
Beispiel #6
0
    def batch_marginal_likelihood_estimate(sampler_params):
        samples, likelihood_estimates, entropy_estimates = sample_and_run_langevin(
            sampler_params, rs, num_samples)
        print "Mean loglik:", np.mean(
            likelihood_estimates.value), "Mean entropy:", np.mean(
                entropy_estimates.value)
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples.value, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')

        return np.mean(likelihood_estimates + entropy_estimates)
Beispiel #7
0
    def get_batch_marginal_likelihood_estimate(sampler_params,i):
        samples, entropy_estimates = flow_sample(sampler_params,num_samples, rs)
        likelihood_estimates = class_ll(samples)
        print "Mean loglik:", np.mean(likelihood_estimates.value),\
              "Mean entropy:", np.mean(entropy_estimates.value)

        images_per_row = 10
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples.value, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')
        return np.mean(likelihood_estimates + entropy_estimates)
Beispiel #8
0
    def callback(params, i, grad):
        ml = batch_value_and_grad(params, i)
        print "log marginal likelihood:", ml

        #Generate samples
        num_samples = 100
        images_per_row = 10
        zs = rs.randn(num_samples, latent_dimensions)
        samples = decoder(parser.get(params, 'decoding weights'), zs)
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')
Beispiel #9
0
 def likelihood(weights, inputs, targets):
     pred_probs = make_predictions(weights, inputs)
     fig = plt.figure(1)
     fig.clf()
     ax = fig.add_subplot(111)
     try:
         samples = pred_probs.value
     except:
         samples = pred_probs
     plot_images(samples, ax, ims_per_row=10)
     plt.savefig('decoded_samples.png')
     label_probabilities = np.log(pred_probs)       * targets \
                         + np.log((1 - pred_probs)) * (1 - targets)
     return np.sum(label_probabilities, axis=1)   # Sum across pixels.
Beispiel #10
0
    def batch_marginal_likelihood_estimate(sampler_params):
        samples, likelihood_estimates, entropy_estimates = sample_and_run_langevin(
            sampler_params, rs, num_samples)
        #matplotlib.image.imsave("optimizing", (samples[0,:].reshape((28,28))).value)
        #marginal_likelihood_estimates
        print "mean loglik:", np.mean(
            likelihood_estimates), " mean entropy:", np.mean(entropy_estimates)
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples.value, ax)
        plt.savefig('samples.png')

        return np.mean(likelihood_estimates + entropy_estimates)
Beispiel #11
0
    def callback(params, i, grad):
        ml = batch_value_and_grad(params,i)
        print "log marginal likelihood:", ml

        # #Generate samples
        num_samples = 100
        images_per_row = 10
        zs = rs.randn(num_samples,latent_dimensions)
        base_test = np.zeros((num_samples,D_b))
        for i in xrange(num_samples):
            base_test[i,i%10] = 1

        dec_in = np.concatenate((zs,base_test),axis = 1)
        samples = decoder(parser.get(params, 'decoding weights'), dec_in)
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')
Beispiel #12
0
    def callback(params, i, grad):
        ml = batch_value_and_grad(params, i)
        print "log marginal likelihood:", ml

        # #Generate samples
        num_samples = 100
        images_per_row = 10
        zs = rs.randn(100, latent_dimensions)
        # zs = rs.randn(10,latent_dimensions)
        # zs = np.repeat(zs,10,axis = 0)
        # base_test = base_data[0:num_samples,:]
        # base_test = np.repeat(base_data[0:10,:],10,axis = 0)
        dec_in = np.concatenate((zs, base_test), axis=1)
        samples = decoder(parser.get(params, 'decoding weights'), dec_in)
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        # plot_images(samples, ax, ims_per_row=images_per_row)
        plot_shape = (100, 784)
        im_samples = np.zeros(plot_shape)
        im_mean = np.zeros(plot_shape)
        im_map = np.zeros(plot_shape)
        for k in xrange(plot_shape[0]):
            if k % 10 == 0:
                im_samples[k, :] = base_test[k, :]
                im_mean[k, :] = base_test[k, :]
                im_map[k, :] = base_test[k, :]
            else:
                im_mean[k, :] = samples[k - 1, :]
                im_samples[k, :] = np.random.binomial(1, samples[k - 1, :])
                im_map[k, :] = np.round(samples[k - 1, :])

        plot_images(im_samples, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')

        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(im_mean, ax, ims_per_row=images_per_row)
        plt.savefig('mean_samples.png')

        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(im_map, ax, ims_per_row=images_per_row)
        plt.savefig('map_samples.png')

        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(base_test, ax, ims_per_row=images_per_row)
        plt.savefig('blurred_samples.png')
Beispiel #13
0
    def callback(params, i, grad):
        ml = batch_value_and_grad(params, i)
        print "log marginal likelihood:", ml
        #Generate samples
        num_samples = 100
        images_per_row = 10
        # zs = train_images[0:100,:]
        zs = np.random.dirichlet(.1 * np.ones(10), 100)
        (mus, log_sigs) = encoder(params, zs)
        # sigs = np.exp(log_sigs)
        # noise = rs.randn(1,100,784)
        # samples = mus + sigs*noise
        # samples = np.reshape(samples,(100*1,784),order = 'F')
        samples = mus

        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples, ax, ims_per_row=images_per_row)
        plt.savefig('samples.png')