示例#1
0
    rs = np.random.npr.RandomState(0)

    num_samples = 500
    num_steps = 32
    num_sampler_optimization_steps = 400
    sampler_learn_rate = 0.01

    D = 2
    init_mean = np.zeros(D)
    init_log_stddevs = np.log(0.1*np.ones(D))
    init_output_weights = 0.1*rs.randn(num_steps, D)
    init_transform_weights = 0.1*rs.randn(num_steps, D)
    init_biases = 0.1*rs.randn(num_steps)

    logprob_mvn = build_logprob_mvn(mean=np.array([0.2,0.4]), cov=np.array([[1.0,0.9], [0.9,1.0]]))
    flow_sample, parser = build_flow_sampler(D, num_steps)

    sampler_params = np.zeros(len(parser))
    parser.put(sampler_params, 'mean', init_mean)
    parser.put(sampler_params, 'log_stddev', init_log_stddevs)
    parser.put(sampler_params, 'output weights', init_output_weights)
    parser.put(sampler_params, 'transform weights', init_transform_weights)
    parser.put(sampler_params, 'biases', init_biases)

    def get_batch_marginal_likelihood_estimate(sampler_params):
        samples, entropy_estimates = flow_sample(sampler_params, num_samples,rs)
        likelihood_estimates = logprob_mvn(samples)
        print "Mean loglik:", np.mean(likelihood_estimates.value),\
              "Mean entropy:", np.mean(entropy_estimates.value)
        plot_density(samples.value, "approximating_dist.png")
        return np.mean(likelihood_estimates + entropy_estimates)
示例#2
0
文件: main.py 项目: afcarl/autopaint
    enc_layer_sizes = [D, hidden_units, 2 * latent_dimension]
    dec_layer_sizes = [latent_dimension, hidden_units, D]

    N_weights_enc, encoder, _ = make_gaussian_nn(enc_layer_sizes)
    N_weights_dec, decoder, decoder_log_like = make_binary_nn(dec_layer_sizes)

    # Optimization parameters.
    batch_size = 100
    num_training_iters = 100
    sampler_learn_rate = 0.01
    batch_idxs = make_batches(train_images.shape[0], batch_size)

    init_enc_w = rs.randn(N_weights_enc) * param_scale
    init_dec_w = rs.randn(N_weights_dec) * param_scale

    flow_sampler, flow_parser = build_flow_sampler(latent_dimension, num_flow_steps)

    combined_parser = WeightsParser()
    combined_parser.add_shape('encoder weights', N_weights_enc)
    combined_parser.add_shape('decoder weights', N_weights_dec)
    combined_parser.add_shape('flow params', len(flow_parser))

    combined_params = np.zeros(len(combined_parser))
    combined_parser.put(combined_params, 'encoder weights', init_enc_w)
    combined_parser.put(combined_params, 'flow params', init_flow_params(flow_parser, rs))
    combined_parser.put(combined_params, 'decoder weights', init_dec_w)

    def get_batch_lower_bound(cur_params, iter):
        encoder_weights = combined_parser.get(cur_params, 'encoder weights')
        flow_params     = combined_parser.get(cur_params, 'flow params')
        decoder_weights = combined_parser.get(cur_params, 'decoder weights')
示例#3
0
文件: main.py 项目: afcarl/autopaint
    rs = np.random.npr.RandomState(0)

    num_samples = 500
    num_steps = 32
    num_sampler_optimization_steps = 400
    sampler_learn_rate = 0.01

    D = 2
    init_mean = np.zeros(D)
    init_log_stddevs = np.log(0.1 * np.ones(D))
    init_output_weights = 0.1 * rs.randn(num_steps, D)
    init_transform_weights = 0.1 * rs.randn(num_steps, D)
    init_biases = 0.1 * rs.randn(num_steps)

    #logprob_mvn = build_logprob_mvn(mean=np.array([0.2,0.4]), cov=np.array([[1.0,0.9], [0.9,1.0]]))
    flow_sample, parser = build_flow_sampler(logprob_wiggle, D, num_steps)

    sampler_params = np.zeros(len(parser))
    parser.put(sampler_params, 'mean', init_mean)
    parser.put(sampler_params, 'log_stddev', init_log_stddevs)
    parser.put(sampler_params, 'output weights', init_output_weights)
    parser.put(sampler_params, 'transform weights', init_transform_weights)
    parser.put(sampler_params, 'biases', init_biases)

    def get_batch_marginal_likelihood_estimate(sampler_params):
        samples, likelihood_estimates, entropy_estimates = flow_sample(
            sampler_params, rs, num_samples)
        print "Mean loglik:", np.mean(likelihood_estimates.value),\
              "Mean entropy:", np.mean(entropy_estimates.value)
        plot_density(samples.value, "approximating_dist.png")
        return np.mean(likelihood_estimates + entropy_estimates)
示例#4
0
def time_and_acc(latent_dimension):

    start_time = time.time()
    rs = np.random.npr.RandomState(0)
    #load_and_pickle_binary_mnist()
    with open('../../../autopaint/mnist_binary_data.pkl') as f:
        N_data, train_images, train_labels, test_images, test_labels = pickle.load(
            f)

    D = train_images.shape[1]
    enc_layer_sizes = [D, hidden_units, 2 * latent_dimension]
    dec_layer_sizes = [latent_dimension, hidden_units, D]

    N_weights_enc, encoder, encoder_log_like = make_gaussian_nn(
        enc_layer_sizes)
    N_weights_dec, decoder, decoder_log_like = make_binary_nn(dec_layer_sizes)

    # Optimization parameters.
    batch_size = 100
    num_training_iters = 100
    sampler_learn_rate = 0.01
    batch_idxs = make_batches(train_images.shape[0], batch_size)

    init_enc_w = rs.randn(N_weights_enc) * param_scale
    init_dec_w = rs.randn(N_weights_dec) * param_scale

    flow_sampler, flow_parser = build_flow_sampler(latent_dimension,
                                                   num_flow_steps)

    combined_parser = WeightsParser()
    combined_parser.add_shape('encoder weights', N_weights_enc)
    combined_parser.add_shape('decoder weights', N_weights_dec)
    combined_parser.add_shape('flow params', len(flow_parser))

    combined_params = np.zeros(len(combined_parser))
    combined_parser.put(combined_params, 'encoder weights', init_enc_w)
    combined_parser.put(combined_params, 'flow params',
                        init_flow_params(flow_parser, rs, latent_dimension))
    combined_parser.put(combined_params, 'decoder weights', init_dec_w)

    def get_batch_lower_bound(cur_params, iter):
        encoder_weights = combined_parser.get(cur_params, 'encoder weights')
        flow_params = combined_parser.get(cur_params, 'flow params')
        decoder_weights = combined_parser.get(cur_params, 'decoder weights')

        cur_data = train_images[batch_idxs[iter]]
        mus, log_sigs = encoder(encoder_weights, cur_data)
        samples, entropy_estimates = flow_sampler(flow_params, mus,
                                                  np.exp(log_sigs), rs)
        loglikes = decoder_log_like(decoder_weights, samples, cur_data)

        print "Iter", iter, "loglik:", np.mean(loglikes).value, \
            "entropy:", np.mean(entropy_estimates).value, "marg. like:", np.mean(entropy_estimates + loglikes).value
        lastVal = np.mean(entropy_estimates + loglikes).value
        with open('lastVal.pkl', 'w') as f:
            pickle.dump(lastVal, f, 1)
        return np.mean(entropy_estimates + loglikes)

    lb_grad = grad(get_batch_lower_bound)

    def callback(weights, iter, grad):
        #Generate samples
        num_samples = 100
        zs = rs.randn(num_samples, latent_dimension)
        samples = decoder(combined_parser.get(weights, 'decoder weights'), zs)
        fig = plt.figure(1)
        fig.clf()
        ax = fig.add_subplot(111)
        plot_images(samples, ax, ims_per_row=10)
        plt.savefig('samples.png')

    final_params = adam(lb_grad,
                        combined_params,
                        num_training_iters,
                        callback=callback)

    finish_time = time.time()
    # #Broken and very mysterious:
    # lb_val_grad = value_and_grad(get_batch_lower_bound)
    # lb_est = lb_val_grad(final_params,num_training_iters+2)
    # print lb_est
    # lb_est = lb_est[0]
    with open('lastVal.pkl') as f:
        lb_est = pickle.load(f)
    print 'lb_est is', lb_est
    print "Total training time:", finish_time - start_time
    return finish_time, lb_est
示例#5
0
    t0 = time.time()
    rs = np.random.npr.RandomState(0)

    num_samples = 50
    num_steps = 10
    num_sampler_optimization_steps = 400

    D = 2
    init_mean = np.zeros(D)
    init_log_stddevs = np.log(10 * np.ones(D))
    init_output_weights = 0.1 * rs.randn(num_steps, D)
    init_transform_weights = 0.1 * rs.randn(num_steps, D)
    init_biases = 0.1 * rs.randn(num_steps)

    #logprob_mvn = build_logprob_mvn(mean=np.array([0.2,0.4]), cov=np.array([[1.0,0.9], [0.9,1.0]]))
    flow_sample, parser = build_flow_sampler(log_tapered_inv_rosenbrock, D,
                                             num_steps)

    sampler_params = np.zeros(len(parser))
    parser.put(sampler_params, 'mean', init_mean)
    parser.put(sampler_params, 'log_stddev', init_log_stddevs)
    parser.put(sampler_params, 'output weights', init_output_weights)
    parser.put(sampler_params, 'transform weights', init_transform_weights)
    parser.put(sampler_params, 'biases', init_biases)

    def get_batch_marginal_likelihood_estimate(sampler_params):
        samples, likelihood_estimates, entropy_estimates = flow_sample(
            sampler_params, rs, num_samples)
        print "Mean loglik:", np.mean(likelihood_estimates.value),\
              "Mean entropy:", np.mean(entropy_estimates.value)
        plot_density(samples.value, "approximating_dist.png")
        return np.mean(likelihood_estimates + entropy_estimates)