Exemple #1
0
    z = (z-z_mean)/np.sqrt(z_var)

    k = 5                                                   # number of folds

    N_pol = [3, 5, 7]                                       # polynomial degrees
    lambdas = np.logspace(-4, 3, 8)                         # penalties

    MSE_b = np.zeros([len(N_pol), len(lambdas)])
    bias_b = np.zeros([len(N_pol), len(lambdas), 2])
    var_b = np.zeros([len(N_pol), len(lambdas), 2])

    MSE_k = np.zeros([len(N_pol), len(lambdas), 2])

    for i, deg in enumerate(N_pol):
        # prepare data
        X = design_matrix(x, y, n=deg)

        # preprocess data
        scaler = StandardScaler()
        scaler.fit(X)
        X = scaler.transform(X)

        for j, l in enumerate(lambdas):
            # bootstrap
            MSE_b[i,j], bias_b[i,j], var_b[i,j] = bias_variance_tradeoff(X, z,
                                                  500, model=Ridge, lambda_=l)

            # kfold
            MSE_k[i,j] = kfold(k, X, z, model=Ridge, lambda_=l)

    for i, deg in enumerate(N_pol):
Exemple #2
0
MSE_test = np.zeros(N)
MSE_train = np.zeros(N)
R2_test = np.zeros(N)
R2_train = np.zeros(N)

MSE_boots = np.zeros(N)
bias_boots = np.zeros([N, 2])
var_boots = np.zeros([N, 2])

MSE_kfold = np.zeros(N)
std_kfold = np.zeros(N)

for i in range(1, N+1):
    # prepare data
    X = design_matrix(x, y, n=i)
    X_train, X_test, z_train, z_test = train_test_split(X, z, test_size=0.35)

    # preprocess data
    scaler_train = StandardScaler()
    scaler_train.fit(X_train)
    X_train = scaler_train.transform(X_train)
    X_test = scaler_train.transform(X_test)

    scaler_X = StandardScaler()
    scaler_X.fit(X)
    X = scaler_X.transform(X)

    # fit
    beta, beta_var = LinearRegression(X_train, z_train, r_var=1)