def verify_cubic_features1D(): X = np.array([[np.sqrt(3)], [0]]) X_cube = np.sort(cubic_features(X)) X_correct = np.array([[1., np.sqrt(9), np.sqrt(27), np.sqrt(27)], [0., 0., 0., 1.]]) if np.all(np.absolute(X_cube - X_correct) < 1.0e-6): print("Verifying cubic features of 1 dimension: Passed") else: print("Verifying cubic features of 1 dimension: Failed")
def verify_cubic_features2D2(): X = np.array([[np.sqrt(3), 0], [0, np.sqrt(3)]]) X_cube = np.sort(cubic_features(X)) X_correct = np.array([[0., 0., 0., 0., 0., 0., 1., 3., 5.19615242, 5.19615242], [0., 0., 0., 0., 0., 0., 1., 3., 5.19615242, 5.19615242]]) if np.all(np.absolute(X_cube - X_correct) < 1.0e-6): print("Verifying cubic features of 2 dimensions asymmetric vectors: Passed") else: print("Verifying cubic features of 2 dimensions asymmetric vectors: Failed")
def verify_cubic_features2D(): X = np.array([[np.sqrt(3), np.sqrt(3)], [0, 0]]) X_cube = np.sort(cubic_features(X)) X_correct = np.array([[1., 3., 3., 5.19615242, 5.19615242, 5.19615242, 5.19615242, 7.34846923, 9., 9.], [0., 0., 0., 0., 0., 0., 0., 0., 0., 1.]]) if np.all(np.absolute(X_cube - X_correct) < 1.0e-6): print("Verifying cubic features of 2 dimensions: Passed") else: print("Verifying cubic features of 2 dimensions: Failed")
secondimage_reconstructed = reconstruct_PC(train_pca[1, ], pcs, n_components, train_x) plot_images(secondimage_reconstructed) plot_images(train_x[1, ]) ## Cubic Kernel ## # TODO: Find the 10-dimensional PCA representation of the training and test set n_components = 10 pcs10 = principal_components(train_x) train_pca10 = project_onto_PC(train_x, pcs10, n_components) test_pca10 = project_onto_PC(test_x, pcs10, n_components) # TODO: First fill out cubicFeatures() function in features.py as the below code requires it. train_cube = cubic_features(train_pca10) test_cube = cubic_features(test_pca10) # train_cube (and test_cube) is a representation of our training (and test) data # after applying the cubic kernel feature mapping to the 10-dimensional PCA representations. # TODO: Train your softmax regression model using (train_cube, train_y) # and evaluate its accuracy on (test_cube, test_y). temp_parameter = 1 theta, cost_function_history = softmax_regression(train_cube, train_y, temp_parameter, alpha=0.3, lambda_factor=1.0e-4, k=10, num_iterations=150) plot_cost_function_over_time(cost_function_history)