Exemplo n.º 1
0
# Approximate partition function by reverse AIS (tends to underestimate)
logZ = estimator.reverse_annealed_importance_sampling(rbm)[0]
LL_train = numx.mean(estimator.log_likelihood_v(rbm, logZ, train_data))
LL_test = numx.mean(estimator.log_likelihood_v(rbm, logZ, test_data))
print 'reverse AIS \t%0.5f \t%0.5f' % (LL_train, LL_test)

# Reorder RBM features by average activity decreasingly
rbmReordered = vis.reorder_filter_by_hidden_activation(rbm, train_data)

# Display RBM parameters
vis.imshow_standard_rbm_parameters(rbmReordered, v1, v2, h1, h2)

# Sample some steps and show results
samples = vis.generate_samples(rbm, train_data[0:30], 30, 1, v1, v2, False,
                               None)
vis.imshow_matrix(samples, 'Samples')

# Get the optimal gabor wavelet frequency and angle for the filters
opt_frq, opt_ang = vis.filter_frequency_and_angle(rbm.w, num_of_angles=40)

# Show some tuning curves
num_filters = 20
vis.imshow_filter_tuning_curve(rbm.w[:, 0:num_filters], num_of_ang=40)

# Show some optima grating
vis.imshow_filter_optimal_gratings(rbm.w[:, 0:num_filters],
                                   opt_frq[0:num_filters],
                                   opt_ang[0:num_filters])

# Show histograms of frequencies and angles.
vis.imshow_filter_frequency_angle_histogram(opt_frq=opt_frq,
Exemplo n.º 2
0
            reg_contractive=0.0,
            reg_slowness=0.0,
            data_next=None,
            # The gradient restriction is important for fast learning, see also GRBMs
            restrict_gradient=0.1,
            restriction_norm='Cols')

# Show filters/features
filters = vis.tile_matrix_rows(ae.w,
                               v1,
                               v2,
                               h1,
                               h2,
                               border_size=1,
                               normalized=True)
vis.imshow_matrix(filters, 'Filter')

# Show samples
samples = vis.tile_matrix_rows(train_data[0:100].T,
                               v1,
                               v2,
                               10,
                               10,
                               border_size=1,
                               normalized=True)
vis.imshow_matrix(samples, 'Data samples')

# Show reconstruction
samples = vis.tile_matrix_rows(ae.decode(ae.encode(train_data[0:100])).T,
                               v1,
                               v2,
Exemplo n.º 3
0
        # Update the model using the sampled states and learning rates
        model.update(chain_d, chain_m, lr_W1, lr_b1, lr_o1)

    # Print Norms of the Parameters
    print numx.mean(numxExt.get_norms(wl1.weights)), '\t', numx.mean(
        numxExt.get_norms(wl2.weights)), '\t',
    print numx.mean(numxExt.get_norms(l1.bias)), '\t', numx.mean(
        numxExt.get_norms(l2.bias)), '\t',
    print numx.mean(numxExt.get_norms(l3.bias)), '\t', numx.mean(
        l1.offset), '\t', numx.mean(l2.offset), '\t', numx.mean(l3.offset)

# Show weights
VIS.imshow_matrix(
    VIS.tile_matrix_rows(wl1.weights,
                         v11,
                         v12,
                         v21,
                         v22,
                         border_size=1,
                         normalized=False), 'Weights 1')
VIS.imshow_matrix(
    VIS.tile_matrix_rows(numx.dot(wl1.weights, wl2.weights),
                         v11,
                         v12,
                         v31,
                         v32,
                         border_size=1,
                         normalized=False), 'Weights 2')

# # Samplesome steps
chain_m = [
    numx.float64(numx.random.rand(10 * batch_size, v11 * v12) < 0.5),
Exemplo n.º 4
0
width = height = 64

# PCA
pca = PCA(input_dim=width * height)
pca.train(data=data)

# Show the first 100 eigenvectors of the covariance matrix
eigenvectors = vis.tile_matrix_rows(matrix=pca.projection_matrix,
                                    tile_width=width,
                                    tile_height=height,
                                    num_tiles_x=10,
                                    num_tiles_y=10,
                                    border_size=1,
                                    normalized=True)
vis.imshow_matrix(
    matrix=eigenvectors,
    windowtitle='First 100 Eigenvectors of the covariance matrix')

# Show the first 100 images
images = vis.tile_matrix_rows(matrix=data[0:100].T,
                              tile_width=width,
                              tile_height=height,
                              num_tiles_x=10,
                              num_tiles_y=10,
                              border_size=1,
                              normalized=True)
vis.imshow_matrix(matrix=images, windowtitle='First 100 Face images')

# Plot the cumulative sum of teh Eigenvalues.
eigenvalue_sum = numx.cumsum(pca.eigen_values / numx.sum(pca.eigen_values))
vis.imshow_plot(matrix=eigenvalue_sum,
Exemplo n.º 5
0
        batch = train_data[b:b + batch_size, :]
        trainer.train(data=batch, epsilon=0.1, regL2Norm= 0.001)

    # Calculate Log-Likelihood, reconstruction error and expected end time every 10th epoch
    if (epoch % 10 == 0):
        RE = numx.mean(ESTIMATOR.reconstruction_error(rbm, train_data))
        print '%d\t\t%8.6f\t\t' % (epoch, RE),
        print measurer.get_expected_end_time(epoch , epochs),
        print

measurer.end()

# Print end time
print
print 'End-time: \t', measurer.get_end_time()
print 'Training time:\t', measurer.get_interval()

# Reorder RBM features by average activity decreasingly
reordered_rbm = STATISTICS.reorder_filter_by_hidden_activation(rbm, train_data)
# Display RBM parameters
VISUALIZATION.imshow_standard_rbm_parameters(reordered_rbm, v1, v2, h1, h2)
# Sample some steps and show results
samples = STATISTICS.generate_samples(rbm, train_data[0:30], 30, 1, v1, v2, False, None)
VISUALIZATION.imshow_matrix(samples, 'Samples')

VISUALIZATION.show()




Exemplo n.º 6
0
            trainer.model, betas=betas)
        train_LL = numx.mean(
            ESTIMATOR.LL_lower_bound(trainer.model, train_set, logZ))
        print("AIS  LL: ", 2**(v11 + 1) * train_LL)

        logZ = ESTIMATOR.partition_function_exact(trainer.model)
        train_LL = numx.mean(ESTIMATOR.LL_exact(trainer.model, train_set,
                                                logZ))
        print("True LL: ", 2**(v11 + 1) * train_LL)
        print()

# Show weights
VIS.imshow_matrix(
    VIS.tile_matrix_rows(dbm.W1,
                         v11,
                         v12,
                         v21,
                         v22,
                         border_size=1,
                         normalized=False), 'Weights 1')
VIS.imshow_matrix(
    VIS.tile_matrix_rows(numx.dot(dbm.W1, dbm.W2),
                         v11,
                         v12,
                         v31,
                         v32,
                         border_size=1,
                         normalized=False), 'Weights 2')

VIS.show()
Exemplo n.º 7
0
# Create a ZCA node and train it (you could also use PCA whitened=True)
ica = ICA(input_dim=width * height)
ica.train(data=whitened_data,
          iterations=100,
          convergence=1.0,
          status=True)

# Show whitened images
images = vis.tile_matrix_rows(matrix=data[0:100].T,
                              tile_width=width,
                              tile_height=height,
                              num_tiles_x=10,
                              num_tiles_y=10,
                              border_size=1,
                              normalized=True)
vis.imshow_matrix(matrix=images,
                  windowtitle='First 100 image patches')

# Show some whitened images
images = vis.tile_matrix_rows(matrix=whitened_data[0:100].T,
                              tile_width=width,
                              tile_height=height,
                              num_tiles_x=10,
                              num_tiles_y=10,
                              border_size=1,
                              normalized=True)
vis.imshow_matrix(matrix=images,
                  windowtitle='First 100 image patches whitened')

# Show the ICA filters/bases
ica_filters = vis.tile_matrix_rows(matrix=ica.projection_matrix,
                                   tile_width=width,
Exemplo n.º 8
0
    # Calculate Log-Likelihood, reconstruction error and expected end time every 10th epoch
    if epoch % 10 == 0:
        Z = ESTIMATOR.partition_function_factorize_h(rbm)
        LL = numx.mean(ESTIMATOR.log_likelihood_v(rbm, Z, train_data))
        RE = numx.mean(ESTIMATOR.reconstruction_error(rbm, train_data))
        print "%d\t\t%8.6f\t\t%8.4f\t\t" % (epoch, RE, LL),
        print measurer.get_expected_end_time(epoch, epochs),
        print

measurer.end()

# Print end time
print
print "End-time: \t", measurer.get_end_time()
print "Training time:\t", measurer.get_interval()

# Calculate and approximate partition function
Z = ESTIMATOR.partition_function_factorize_h(rbm, batchsize_exponent=h1, status=False)

print "True Partition: ", Z, " (LL: ", numx.mean(ESTIMATOR.log_likelihood_v(rbm, Z, train_data)), ")"

# Reorder RBM features by average activity decreasingly
reordered_rbm = STATISTICS.reorder_filter_by_hidden_activation(rbm, train_data)
# Display RBM parameters
VISUALIZATION.imshow_standard_rbm_parameters(reordered_rbm, v1, v2, h1, h2)
# Sample some steps and show results
samples = STATISTICS.generate_samples(rbm, train_data[0:30], 30, 1, v1, v2, False, None)
VISUALIZATION.imshow_matrix(samples, "Samples")

VISUALIZATION.show()