Example #1
0
def best_poly_cross_validation(t, x, D=len(t), K=2, seed=1, istraining=False):
    poly_Means = list()
    poly_Stds = list()
    tr_error_Means = list()
    tr_error_Stds = list()

    lamb = 0  # doesn't consider the ridge regression here (?)
    lowest_order = 0  # will keep track the polynomial value with lowest MSE avg
    lowest_mean = 9999999  #the lowest MSE avg from the lowest_order polynomial value
    if (istraining == False):
        for power in range(0, D + 1):
            predictor_matrix = ols_main_vector.creates_predictor_matrix(
                x, power)
            temp_mean, temp_std = cross_validation(t, x, K, seed, lamb,
                                                   istraining)
            poly_Means.append(temp_mean)
            poly_Stds.append(temp_std)
            if (lowest_mean > temp_mean):
                lowest_mean = temp_mean
                lowest_order = power
        return poly_Means, poly_Stds, lowest_mean, lowest_order
    else:
        for power in range(0, D + 1):
            predictor_matrix = ols_main_vector.creates_predictor_matrix(
                x, power)
            temp_mean, temp_std, temp_error = cross_validation(
                t, x, K, seed, lamb, istraining)
            poly_Means.append(temp_mean)
            poly_Stds.append(temp_std)
            tr_error_Means.append(statistics.mean(temp_error))
            tr_error_Stds.append(statistics.mean(temp_error))
            if (lowest_mean > temp_mean):
                lowest_mean = temp_mean
                lowest_order = power
        return poly_Means, poly_Stds, lowest_mean, lowest_order, tr_error_Means, tr_error_Stds
Example #2
0
def best_poly_cross_validation(t, x, D=None, K=2, seed=1, istraining=False):
    if (D == None):
        D = len(t)
    validation_error_mean = list()
    validation_error_std = list()
    trainning_error_mean = list()
    trainning_error_std = list()
    lamb = 0
    lowest_order = 0
    lowest_mean = 9999999
    for power in range(1, D + 1):
        X = ols_main_vector.creates_predictor_matrix(x, power)
        result = cross_validation(t, X, K, seed, lamb, istraining)

        validation_mean = result[0]
        validation_sdt = result[1]

        validation_error_mean.append(validation_mean)
        validation_error_std.append(validation_sdt)

        if (lowest_mean > validation_mean):
            lowest_mean = validation_mean
            lowest_order = power

        if (istraining == True):
            trainning_mean = result[2]
            trainning_std = result[3]
            trainning_error_mean.append(trainning_mean)
            trainning_error_std.append(trainning_std)

    if (istraining == True):
        return validation_error_mean, validation_error_std, lowest_mean, lowest_order, trainning_error_mean, trainning_error_std
    else:
        return validation_error_mean, validation_error_std, lowest_mean, lowest_order
Example #3
0
def compare(master_trainning_x, master_trainning_t, master_testing_x,
            master_testing_t, D, lamb):
    result = list()
    result.append(0)
    for power in range(1, D + 1):
        master_traiining_X = ols_main_vector.creates_predictor_matrix(
            master_trainning_x, power)
        w = ols_main_vector.ols_coefficent_prediction_lamda(
            master_traiining_X, master_trainning_t, lamb)
        t_hat = ols_main_vector.generate_predition_vector(master_testing_x, w)
        error = cross.mean_squared_error(master_testing_t, t_hat)
        result.append(error)
    return result