Пример #1
0
def red_wine_run(train_red_x, train_red_y, test_red_x, test_red_y):
    # Red wine data
    print('---------------\nRed Wine Data\n---------------')

    # Training Phase
    # values for 2D-grid search
    lam = []  # regularization weight [min, max]
    alpha = []  # learning rate [min, max]
    nepochs = []  # sample # of epochs
    epsilon = 0.0  # epsilon value
    param = []
    # end TODO

    # using this alpha and lambda values run the training
    print(f"alpha: {alpha}, lambda:{lam}")
    print("Running Training phase")
    # return param and optimal values for alpha and lambda from SGDSolver
    param, alpha, lam = SGDSolver('Training', train_red_x, train_red_y, alpha,
                                  lam, nepochs, epsilon, param)

    # optimal values from 2-D search
    print(f"optimal values\nalpha: {alpha}, lambda: {lam}")

    # Note: validation and testing phases only take a single value for (alpha, lam) and not a list.
    # Validation Phase
    x_mse_val = SGDSolver('Validation', test_red_x, test_red_y, alpha, lam,
                          nepochs, epsilon, param)
    print(f"Current Red Wine Data MSE is: {mse_val}.")

    # Testing Phase
    red_wine_predicted = SGDSolver('Testing', test_red_x, test_red_y, alpha,
                                   lam, nepochs, epsilon, param)

    for i in range(0, 50):
        print(f"Predicted: {red_wine_predicted[i]}, Real: {test_red_y[i]}")
Пример #2
0
def white_wine_run(train_white_x, train_white_y, test_white_x, test_white_y):
    # White wine data
    print('---------------\nWhite Wine Data\n---------------')

    # TODO: Change hyperparameter values here as needed
    # similar to red_wine_run
    # values for 2D-grid search
    lam = [1e-3, 2]  # regularization weight [min, max]
    alpha = [1e-6, 1e-5]  # learning rate [min, max]
    nepochs = 500  # sample # of epochs
    epsilon = 0.05  # epsilon value
    param = np.random.standard_normal(size=np.shape(train_white_x)[1] + 1)
    param = np.reshape(param, newshape=(
        np.shape(train_white_x)[1] + 1,
        1,
    ))
    # end TODO

    # Training Phase
    print(f"alpha: {alpha}, lambda:{lam}")
    print("Running Training phase")
    # return param and optimal values for alpha and lambda from SGDSolver
    training_start = time.time()
    param, alpha, lam = SGDSolver('Training', train_white_x, train_white_y,
                                  alpha, lam, nepochs, epsilon, param)
    training_time = time.time() - training_start
    # optimal values from 2-D search
    print(f"optimal values\nalpha: {alpha}, lambda: {lam}")

    # Note: validation and testing phases only take a single value for (alpha, lam) and not a list.
    # Validation Phase

    x_mse_val = SGDSolver('Validation', test_white_x, test_white_y, alpha, lam,
                          nepochs, epsilon, param)
    print(f"Current White Wine Data MSE is: {x_mse_val}.")

    # Testing Phase
    test_start = time.time()
    white_wine_predicted = SGDSolver('Testing', test_white_x, test_white_y,
                                     alpha, lam, nepochs, epsilon, param)
    test_time = time.time() - test_start
    #for i in range(100, 150):
    #    print(f"Predicted: {white_wine_predicted[i]}, Real: {test_white_y[i]}")
    print(
        f'Training took: {training_time:.2f}\nTesting took: {test_time:.3f}s')
    test_white_y = bc.classify_real_result(test_white_y)
    confusion_matrix = cf.calculate_confusion_matrix(white_wine_predicted,
                                                     test_white_y)
    accuracy = cf.calculate_accuracy(white_wine_predicted, test_white_y)
    cf.print_confusion_matrix(confusion_matrix)
    cf.print_accuracy(accuracy)
Пример #3
0
def white_wine_run(train_white_x, train_white_y, test_white_x, test_white_y):
    # White wine data
    print('---------------\nWhite Wine Data\n---------------')

    # TODO: Change hyperparameter values here as needed
    # similar to red_wine_run
    # values for 2D-grid search
    lam = [0.001, 0.01]  # regularization weight [min, max]
    alpha = [0.0, 0.1]  # learning rate [min, max]
    nepochs = 5  # sample # of epochs
    epsilon = 0.0  # epsilon value
    param = []
    # end TODO

    # Training Phase
    print(f"alpha: {alpha}, lambda:{lam}")
    print("Running Training phase")
    # return param and optimal values for alpha and lambda from SGDSolver
    param, alpha, lam = SGDSolver('Training', train_white_x, train_white_y,
                                  alpha, lam, nepochs, epsilon, param)

    # optimal values from 2-D search
    print(f"optimal values\nalpha: {alpha}, lambda: {lam}")

    # Note: validation and testing phases only take a single value for (alpha, lam) and not a list.
    # Validation Phase
    x_mse = SGDSolver('Validation', test_white_x, test_white_y, alpha, lam,
                      nepochs, epsilon, param)
    print(f"Current White Wine Data MSE is: {x_mse}.")
    print("\nAccuracy is: ")
    acc = 1 - math.sqrt(x_mse)
    print(acc)
    # Testing Phase
    white_wine_predicted = SGDSolver('Testing', test_white_x, test_white_y,
                                     alpha, lam, nepochs, epsilon, param)

    for i in range(0, 50):
        print(f"Predicted: {white_wine_predicted[i]}, Real: {test_white_y[i]}")