def test_random(): # Sanity test to make sure that feature number positively impacts least squares error. num_points = 100 num_data_per_point = 55 learning_rate = 0.5 x_in = np.random.normal(size=(num_data_per_point, num_points)) for num_features in [1, 5, 10, 15, 20, 40, 70]: ae = AutoEncoder(x_in, num_features, random_seed=1234) w_in = np.random.normal(size=(num_data_per_point, num_features)) z_out, least_squares_test = ae.psi(w_in) print( f"(# features : Least squares error = ({num_features} : {least_squares_test})" ) print("Starting gradient decent...") loss_values = [] # Keep track of loss values over epochs for epoch in range(1000): z_grd, ls_grd, grd = ae.calc_g( w_in) # Calculate Z, Error, and Gradient Matrix w_in = w_in - (learning_rate * grd ) # Update W using Gradient Matrix loss_values.append(ls_grd) # Log loss print(f"Epoch: {epoch}\t----------\tLoss: {ls_grd}") # print(loss_values) plotter.plot_loss( loss_values, f"Gradient Loss Over Epochs (test) (num_features: {num_features})")