示例#1
0
    VIS.tile_matrix_rows(wl1.weights,
                         v11,
                         v12,
                         v21,
                         v22,
                         border_size=1,
                         normalized=False), 'Weights 1')
VIS.imshow_matrix(
    VIS.tile_matrix_rows(numx.dot(wl1.weights, wl2.weights),
                         v11,
                         v12,
                         v31,
                         v32,
                         border_size=1,
                         normalized=False), 'Weights 2')

# # Samplesome steps
chain_m = [
    numx.float64(numx.random.rand(10 * batch_size, v11 * v12) < 0.5),
    numx.float64(numx.random.rand(10 * batch_size, v21 * v22) < 0.5),
    numx.float64(numx.random.rand(10 * batch_size, v31 * v32) < 0.5)
]
model.sample(chain_m, 100, [False, False, False], True)
# GEt probabilities
samples = l1.activation(None, chain_m[1])[0]
VIS.imshow_matrix(
    VIS.tile_matrix_columns(samples, v11, v12, 10, batch_size, 1, False),
    'Samples')

VIS.show()
示例#2
0
vis.figure(0, figsize=[7, 7])
vis.title("Data with estimated principal components")
vis.plot_2d_data(data)
vis.plot_2d_weights(scale_factor*pca.projection_matrix)
vis.axis('equal')
vis.axis([-4, 4, -4, 4])

# Figure 2 - Data with estimated principal components in projected space
vis.figure(2, figsize=[7, 7])
vis.title("Data with estimated principal components in projected space")
vis.plot_2d_data(data_pca)
vis.plot_2d_weights(scale_factor*pca.project(pca.projection_matrix.T))
vis.axis('equal')
vis.axis([-4, 4, -4, 4])

# PCA with whitening
pca = PCA(data.shape[1], whiten=True)
pca.train(data)
data_pca = pca.project(data)

# Figure 3 - Data with estimated principal components in whitened space
vis.figure(3, figsize=[7, 7])
vis.title("Data with estimated principal components in whitened space")
vis.plot_2d_data(data_pca)
vis.plot_2d_weights(pca.project(pca.projection_matrix.T).T)
vis.axis('equal')
vis.axis([-4, 4, -4, 4])

# Show all windows
vis.show()
示例#3
0
        batch = train_data[b:b + batch_size, :]
        trainer.train(data=batch, epsilon=0.1, regL2Norm= 0.001)

    # Calculate Log-Likelihood, reconstruction error and expected end time every 10th epoch
    if (epoch % 10 == 0):
        RE = numx.mean(ESTIMATOR.reconstruction_error(rbm, train_data))
        print '%d\t\t%8.6f\t\t' % (epoch, RE),
        print measurer.get_expected_end_time(epoch , epochs),
        print

measurer.end()

# Print end time
print
print 'End-time: \t', measurer.get_end_time()
print 'Training time:\t', measurer.get_interval()

# Reorder RBM features by average activity decreasingly
reordered_rbm = STATISTICS.reorder_filter_by_hidden_activation(rbm, train_data)
# Display RBM parameters
VISUALIZATION.imshow_standard_rbm_parameters(reordered_rbm, v1, v2, h1, h2)
# Sample some steps and show results
samples = STATISTICS.generate_samples(rbm, train_data[0:30], 30, 1, v1, v2, False, None)
VISUALIZATION.imshow_matrix(samples, 'Samples')

VISUALIZATION.show()