# PCA pca = PCA(data.shape[1]) pca.train(data) data_pca = pca.project(data) # Display results # For better visualization the principal components are rescaled scale_factor = 3 # Figure 1 - Data with estimated principal components vis.figure(0, figsize=[7, 7]) vis.title("Data with estimated principal components") vis.plot_2d_data(data) vis.plot_2d_weights(scale_factor*pca.projection_matrix) vis.axis('equal') vis.axis([-4, 4, -4, 4]) # Figure 2 - Data with estimated principal components in projected space vis.figure(2, figsize=[7, 7]) vis.title("Data with estimated principal components in projected space") vis.plot_2d_data(data_pca) vis.plot_2d_weights(scale_factor*pca.project(pca.projection_matrix.T)) vis.axis('equal') vis.axis([-4, 4, -4, 4]) # PCA with whitening pca = PCA(data.shape[1], whiten=True) pca.train(data) data_pca = pca.project(data)
vis.calculate_amari_distance(zca.project(mixing_matrix.T), numx.vstack( (rbm.w.T[2:3], rbm.w.T[3:4]))))) # Display results # create a new figure of size 5x5 vis.figure(0, figsize=[7, 7]) vis.title("P(x)") # plot the data vis.plot_2d_data(whitened_data) # plot weights vis.plot_2d_weights(rbm.w, rbm.bv) # pass our P(x) as function to plotting function vis.plot_2d_contour(lambda v: numx.exp(rbm.log_probability_v(logZ, v))) # No inconsistent scaling vis.axis('equal') # Set size of the plot vis.axis([-5, 5, -5, 5]) # Do the sam efor the LOG-Plot # create a new figure of size 5x5 vis.figure(1, figsize=[7, 7]) vis.title("Ln( P(x) )") # plot the data vis.plot_2d_data(whitened_data) # plot weights vis.plot_2d_weights(rbm.w, rbm.bv) # pass our P(x) as function to plotting function vis.plot_2d_contour(lambda v: rbm.log_probability_v(logZ, v)) # No inconsistent scaling vis.axis('equal')