Esempio n. 1
0
def plot_error(normalized):
    targets = values[:, 1]
    if normalized == 'yes':
        x = a1.normalize_data(values[:, 7:])
    else:
        x = values[:, 7:]
    N_TRAIN = 100
    x_train = x[0:N_TRAIN, :]
    x_test = x[N_TRAIN:, :]
    t_train = targets[0:N_TRAIN]
    t_test = targets[N_TRAIN:]

    # Complete the linear_regression and evaluate_regression functions of the assignment1.py
    # Pass the required parameters to these functions

    tr_dicts = {}
    te_dicts = {}
    keys = range(1, 7)
    for degree in range(1, 7):
        (w, train_err) = a1.linear_regression(x_train, t_train, 'polynomial',
                                              0, degree, 'yes', 0, 0)
        (t_est, test_err) = a1.evaluate_regression(x_test, t_test, w, degree,
                                                   'polynomial', 'yes', 0, 0)
        tr_dicts[degree] = float(train_err)
        te_dicts[degree] = float(test_err)

    # Produce a plot of results.
    plt.rcParams.update({'font.size': 15})
    plt.plot(list(tr_dicts.keys()), list(tr_dicts.values()))
    plt.plot(list(te_dicts.keys()), list(te_dicts.values()))
    plt.ylabel('RMS')
    plt.legend(['Training error', 'Testing error'])
    plt.title('Fit with polynomials, no regularization')
    plt.xlabel('Polynomial degree')
    plt.show()
Esempio n. 2
0
def CV(lamb):
    offset, test_err = 10, 0
    for i in range(0, 10):
        boundA = i * offset + i
        boundB = i * offset + i + offset

        x_test = x[boundA:boundB]
        t_test = targets[boundA:boundB]

        x_train = np.concatenate((x[0:boundA - 1, :], x[boundB + 1:, :]))
        t_train = np.concatenate((targets[0:boundA - 1], targets[boundB + 1:]))

        (pred_train, w_train,
         RMSE_train) = a1.linear_regression(x_train,
                                            t_train,
                                            basis='polynomial',
                                            reg_lambda=lamb,
                                            degree=2,
                                            include_bias=True)
        (pred_test, RMSE_test) = a1.evaluate_regression(
            x_test,
            t_test,
            w_train,
            basis='polynomial',
            degree=2,
            include_bias=True)  # !!pass in w_train

        test_err += RMSE_test  # RMSE_test
    return test_err / 10
def plot_multiple_error(title, bias):
    #for features 8-15
    for iterator in range(7, 15):
        x = values[:, iterator]
        x_train = x[0:N_TRAIN, :]
        x_test = x[N_TRAIN:, :]
        (w, train_err) = a1.linear_regression(x_train, t_train, 'polynomial',
                                              0, 3, bias, 0, 0)
        (t_est, test_err) = a1.evaluate_regression(x_test, t_test, w, 3,
                                                   'polynomial', bias, 0, 0)
        #store the values in two dicts
        tr_dicts[1 + iterator] = float(train_err)
        te_dicts[1.35 + iterator] = float(test_err)
    #print(tr_dicts)
    #print(te_dicts)

    # Produce a plot of results.
    plt.rcParams.update({'font.size': 15})
    plt.bar(list(tr_dicts.keys()), list(tr_dicts.values()), width=0.35)
    plt.bar(list(te_dicts.keys()), list(te_dicts.values()), width=0.35)
    plt.ylabel('RMS')
    plt.legend(['Training error', 'Testing error'])
    plt.title('Fit with degree=3 polynomial, no regularization,' + title)
    plt.xlabel('Feature index')
    plt.show()
Esempio n. 4
0
def regression_reg(lamda, deg):
    validation_err = 0
    for i in range(0, 10):
        print(i)
        rms_val_error = 0
        x_val = x_train[i * 10:(i + 1) * 10, :]
        t_val = t_train[i * 10:(i + 1) * 10, :]
        x_train_use = np.concatenate(
            (x_train[0:i * 10, :], x_train[(i + 1) * 10:, :]), 0)
        t_train_use = np.concatenate(
            (t_train[0:i * 10, :], t_train[(i + 1) * 10:, :]), 0)

        bigphi = a1.design_matrix('polynomial', x_train_use, deg, 1)

        (w, rms_train) = a1.linear_regression(x_train_use, t_train_use, bigphi,
                                              lamda, deg, 0, 0)
        #print(w)

        bigfai_val = a1.design_matrix('polynomial', x_val, deg, 1)
        rms_val_error = a1.evaluate_regression(bigfai_val, w, t_val)
        #print(rms_val_error)

        validation_err += rms_val_error
        #print(validation_err)
    validation_err_avg[lamda] = validation_err / 10
    print(validation_err_avg)
Esempio n. 5
0
def plot_bar_chart():
    '''
	Performing regression using just a single input feature 
	Trying features 8-15. For each (un-normalized) feature fitting a degree 3 polynomial (unregularized).
	'''
    for column in range(0, 8):
        (w, train_error) = a1.linear_regression(x_training[:, column],
                                                t_training, 'polynomial', 0, 3)
        (_, test_error) = a1.evaluate_regression(x_testing[:, column],
                                                 t_testing, w, 'polynomial', 3)
        training_error[column + 7] = train_error
        testing_error[column + 7] = test_error

    index = np.arange(7, 15) + 1
    bar_size = 0.35
    opacity = 0.8

    mat_plot.bar(index,
                 a1.array(testing_error.values()),
                 bar_size,
                 alpha=opacity,
                 color=(0.3, 0.5, 0.7, 0.7))
    mat_plot.bar(index + bar_size,
                 a1.array(training_error.values()),
                 bar_size,
                 alpha=opacity,
                 color=(0.9, 0.6, 0.1, 0.7))

    mat_plot.ylabel('RMSE')
    mat_plot.legend(['Test error', 'Training error'])
    mat_plot.title('RMSE for single input feature, no regularization')
    mat_plot.xlabel('Feature index')
    mat_plot.show()
Esempio n. 6
0
def plot_feature(feature):
    x_train = x[0:N_TRAIN,feature-8]
    x_test = x[N_TRAIN:,feature-8]
    # Plot a curve showing learned function.
    # Use linspace to get a set of samples on which to evaluate
    x_ev = np.linspace(np.asscalar(min(x_train)), np.asscalar(max(x_train)), num=500)

    bigphi = a1.design_matrix('polynomial', x_train, 3, 1)
    bigfai_ev = a1.design_matrix('polynomial',np.transpose(np.asmatrix(x_ev)),3,1)

    (w, rms_train) = a1.linear_regression(x_train, t_train, bigphi, -1, 3, 0, 0)
    y_ev= np.transpose(w) * np.transpose(bigfai_ev)
    plt.plot(x_train,t_train,'bo')
    plt.plot(x_test,t_test,'go')
    plt.plot(x_ev,np.transpose(y_ev),'r.-')

    plt.show()
Esempio n. 7
0
def PolynomialRegression(bias):
    (countries, features, values) = a1.load_unicef_data()
    targets = values[:, 1]
    x = values[:, 7:]
    x = a1.normalize_data(x)
    N_TRAIN = 100
    ALL = 195
    x_train = x[0:N_TRAIN, :]
    x_test = x[N_TRAIN:, :]
    t_train = targets[0:N_TRAIN]
    t_test = targets[N_TRAIN:]
    train_error = {}
    test_error = {}
    for degrees in range(1, 7):
        (w, t_err) = a1.linear_regression(x_train, t_train, 'polynomial', 0,
                                          degrees, 0, 1, N_TRAIN, bias)
        (t_est, te_err) = a1.evaluate_regression('polynomial', x_test, w,
                                                 t_test, degrees,
                                                 ALL - N_TRAIN, bias)
        print('degree = ', degrees)
        print(t_err)
        train_error[degrees] = np.sqrt(np.sum(t_err) / 100)
        print('sum=', np.sum(t_est, axis=0))
        print('train_error = ', train_error[degrees])
        test_error[degrees] = np.sqrt(np.sum(te_err) / 95)

    for i in range(1, 7):
        print(train_error[i])
    # for i in range (1,7):
    #     print(test_error[i])
    print(type(train_error))
    plt.rcParams.update({'font.size': 15})

    plt.plot([1, 2, 3, 4, 5, 6], [
        train_error[1], train_error[2], train_error[3], train_error[4],
        train_error[5], train_error[6]
    ])
    plt.plot([1, 2, 3, 4, 5, 6], [
        test_error[1], test_error[2], test_error[3], test_error[4],
        test_error[5], test_error[6]
    ])
    plt.ylabel('RMS')
    plt.legend(['Training error', 'Testing error'])
    plt.title('Fit with polynomials, no regularization, bias:' + bias)
    plt.xlabel('Polynomial degree')
    plt.show()
def PolynomialRegression(bias):
    (countries, features, values) = a1.load_unicef_data()
    targets = values[:,1]
    x = values[:,7:]
    # x = a1.normalize_data(x)
    N_TRAIN = 100
    ALL = 195
    x_train = x[0:N_TRAIN,:]
    x_test = x[N_TRAIN:,:]
    t_train = targets[0:N_TRAIN]
    t_test = targets[N_TRAIN:]
    train_error = {}
    test_error = {}

    print(x_train)
    for featureNum in range(0,8):
        print('__________',featureNum,'___________________')
        print(x_train[:,featureNum])
    for featureNum in range(0,8):
        x_trainFeature = x_train[:,featureNum]
        x_testFeature = x_test[:,featureNum]
        (w, t_err) = a1.linear_regression(x_trainFeature, t_train, 'polynomial', 0, 3,0 ,1 ,N_TRAIN,bias)
        (t_est, te_err) = a1.evaluate_regression('polynomial',x_testFeature, w, t_test, 3, ALL-N_TRAIN, bias)
        print('featureNum = ',featureNum)
        print(t_err)
        train_error[featureNum] =   np.sqrt(np.sum(t_err)/100)
        print('sum=', np.sum(t_est,axis=0))
        print('train_error = ',  train_error[featureNum])
        test_error[featureNum] = np.sqrt(np.sum(te_err)/95)
        print('train_error = ',  test_error[featureNum])
        print('____________________________')
    x=[8,9,10,11,12,13,14,15]
    x1=[8.35,9.35,10.35,11.35,12.35,13.35,14.35,15.35]
    y_train = [train_error[0],train_error[1],train_error[2],train_error[3],train_error[4],train_error[5],train_error[6],train_error[7]]
    y_test =  [test_error[0],test_error[1],test_error[2],test_error[3],test_error[4],test_error[5],test_error[6],test_error[7]]
    width = 0.35
    fig, ax = plt.subplots()
    ax.bar(x,y_train,width)
    ax.bar(x1,y_test,0.35)
    plt.ylabel('RMS')
    plt.legend(['Training error','Testing error'])
    plt.title('Fit with polynomials, no regularization, bias:'+bias)
    plt.xlabel('Polynomial degree')
    plt.show()
def PolynomialRegression(bias,featureNum):
    (countries, features, values) = a1.load_unicef_data()
    targets = values[:,1]
    x = values[:,7:]
    # x = a1.normalize_data(x)
    N_TRAIN = 100
    ALL = 195
    x_train = x[0:N_TRAIN,:]
    x_test = x[N_TRAIN:,:]
    t_train = targets[0:N_TRAIN]
    t_test = targets[N_TRAIN:]
    train_error = {}
    test_error = {}
    x_trainFeature = x_train[:,featureNum]
    x_testFeature = x_test[:,featureNum]

    (w, t_err) = a1.linear_regression(x_trainFeature, t_train, 'polynomial', 0, 3,0 ,1 ,N_TRAIN,bias)
    (t_est, te_err) = a1.evaluate_regression('polynomial',x_testFeature, w, t_test, 3, ALL-N_TRAIN, bias)
    train_error =  np.sqrt(np.sum(t_err)/100)
    test_error = np.sqrt(np.sum(te_err)/95)

    fig, (ax1, ax2) = plt.subplots(2)
    fig.suptitle('Visulization of feature '+ str(featureNum+8) )
    NumOfPoints = 500
    x_ev1 = np.linspace(np.asscalar(min(x_trainFeature)), np.asscalar(max(x_trainFeature)), num=NumOfPoints)
    x_ev1 = np.array(x_ev1).reshape(NumOfPoints,1)
    phi1 = a1.design_matrix('polynomial', bias, x_ev1, NumOfPoints, 3, 0, 0 )
    y1 = phi1.dot(w)

    x_ev2 = np.linspace(np.asscalar(min(min(x_trainFeature),min(x_testFeature))), np.asscalar(max(max(x_trainFeature) ,max(x_testFeature) )), num=NumOfPoints)
    x_ev2 = np.array(x_ev2).reshape(NumOfPoints,1)
    phi2 = a1.design_matrix('polynomial', bias, x_ev2, NumOfPoints, 3, 0, 0 )
    y2 = phi2.dot(w)

    ax1.plot(x_ev1,y1,'r.-')
    ax1.plot(x_trainFeature,t_train,'bo', color='b')
    ax1.plot(x_testFeature,t_test,'bo',color='g')
    ax2.plot(x_ev2,y2,'r.-')
    ax2.plot(x_trainFeature,t_train,'bo', color='b')
    ax2.plot(x_testFeature,t_test,'bo',color='g')


    
    plt.show()
def plot_fit(feature, bias, title, linspace_scale):
    x_train = values[0:N_TRAIN, feature - 1]
    x_test = values[N_TRAIN:, feature - 1]
    (w, train_err) = a1.linear_regression(x_train, t_train, 'polynomial', 0, 3,
                                          bias, 0, 0)
    if linspace_scale == 'small':
        x_ev = np.linspace(np.asscalar(min(x_train)),
                           np.asscalar(max(x_train)),
                           num=500)
    if linspace_scale == 'large':
        x_ev = np.linspace(np.asscalar(min(min(x_train), min(x_test))),
                           np.asscalar(max(max(x_train), max(x_test))),
                           num=500)
    x_ev = np.asmatrix(x_ev)
    phi_x_ev = a1.design_matrix('polynomial', np.transpose(x_ev), 3, bias, 0,
                                0)
    y_ev = np.dot(phi_x_ev, w)
    plt.plot(x_train, t_train, 'yo')
    plt.plot(x_test, t_test, 'bo')
    plt.plot(x_ev, np.transpose(y_ev), 'r.-')
    plt.legend(['Training data', 'Test data', 'Learned Polynomial'])
    plt.title('Visualization of a function and some data points' + title)
    plt.show()
Esempio n. 11
0
        elif i == k - 1:
            x_train = x_data[:i * 10, :]
            x_validation = x_data[i * 10:, :]
            t_train = t_data[:i * 10]
            t_validation = t_data[i * 10:]
        else:
            pos = 10 * i
            x_train = np.concatenate((x_data[:pos, :], x_data[pos + 10:, :]),
                                     axis=0)
            x_validation = x_data[pos:pos + 10, :]
            t_train = np.concatenate((t_data[:pos], t_data[pos + 10:]), axis=0)
            t_validation = t_data[pos:pos + 10]
        (w, tr_err) = a1.linear_regression(x_train,
                                           t_train,
                                           basis='polynomial',
                                           reg_lambda=lambda_value,
                                           degree=2,
                                           mu=0,
                                           s=1,
                                           bias_term=True)

        (test_preds, test_err) = a1.evaluate_regression(x_validation,
                                                        t_validation,
                                                        w,
                                                        degree=2,
                                                        bias_term=True,
                                                        basis='polynomial')
        train_error_arr.append(tr_err)
        valiation_error_arr.append(test_err)
    train_error_dict[lambda_value] = np.mean(train_error_arr)
    validation_error_dict[lambda_value] = np.mean(valiation_error_arr)
print('\n\n', validation_error_dict)
N_TRAIN = 100
x_train = x[0:N_TRAIN, :]
x_test = x[N_TRAIN:, :]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]


# Complete the linear_regression and evaluate_regression functions of the assignment1.py
# Pass the required parameters to these functions
max_degree = 6
train_err = dict()
test_err = dict()
# fit a polynomial basis function for degree 1 to degree 6
for degree in range(1, max_degree+1):
    (w, tr_err) = a1.linear_regression(x_train, t_train, 'polynomial', 0, degree=degree, bias=1)
    # evaluate the RMS error for test data
    (t_est, te_err) = a1.evaluate_regression(x_test, t_test, w, 'polynomial', degree)
    train_err[degree] = tr_err
    test_err[degree] = te_err
print(train_err)
print(test_err)


# Produce a plot of results.
plt.rcParams.update({'font.size': 15})
plt.plot(list(train_err.keys()), list(train_err.values()))
plt.plot(list(test_err.keys()), list(test_err.values()))
plt.ylabel('RMS')
plt.legend(['Training error', 'Testing error'])
plt.title('Fit with normalized polynomials, no regularization ')
Esempio n. 13
0
            x_train=x_trainData[0:10*(i-1),:]
            t_validate=t_trainData[(10*(i-1)):i*10,:]
            t_train=t_trainData[0:10*(i-1),:]                    
        
        else:
            x_validate=x_trainData[10*(i-1):i*10,:]
            t_validate=t_trainData[10*(i-1):i*10,:]                   
            x_train1=x_trainData[0:10*(i-1),:]   
            x_train2=x_trainData[(i*10):N_TRAIN,:] 
            x_train=np.append(x_train1,x_train2,axis=0)    
             
            t_train1=t_trainData[0:10*(i-1),:]   
            t_train2=t_trainData[(i*10):N_TRAIN,:]  
            t_train=np.append(t_train1,t_train2,axis=0)    
        
        w,t_err= a1.linear_regression(x_train,t_train,'polynomial',True,lambda_Val,2,None,None)
        pred,val_err= a1.evaluate_regression(x_validate,t_validate,'polynomial',True,w,2,None,None)
        #print("trainnnnnnnnnn",t_err)
        #print("testtttttttttt",val_err)
        val_err_List.append(val_err)  
 
        
    sum_of_val_err=sum(val_err_List)
    avg_of_val_err=sum_of_val_err/10
    if lambda_Val!=0:        
        avg.append(avg_of_val_err)
    else:
        avglamzero= avg_of_val_err

del lam[0]    
print("Average",avg)
N_TRAIN = 100
x_train = x[0:N_TRAIN, :]
x_test = x[N_TRAIN:, :]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

# TO DO:: Complete the linear_regression and evaluate_regression functions of the assignment1.py
tr_err = np.zeros(6)
te_err = np.zeros(6)

# Repeat for degree 1 to degree 6 polynomial
for degree in range(1, 7):

    # linear_regression(x, t, basis, reg_lambda=0, degree=0):
    (w, tr_err[degree - 1]) = a1.linear_regression(x_train, t_train,
                                                   'polynomial', 0, degree)
    #evaluate_regression(x, t, w, basis, degree)
    (t_est,
     te_err[degree - 1]) = a1.evaluate_regression(x_test, t_test, w,
                                                  'polynomial', degree)

#Produce a plot of results.
degree = [1, 2, 3, 4, 5, 6]
plt.plot(degree, te_err)
plt.plot(degree, tr_err)
plt.ylabel('RMS')
plt.legend(['Test error', 'Training error'])
plt.title(
    'Fit with polynomials, no regularization, un-normalized training set')
plt.xlabel('Polynomial degree')
plt.show()
t_test = targets[N_TRAIN:]

x_train = x[0:N_TRAIN, 3]
x_test = x[N_TRAIN:, 3]

print(x_train)

i_basis = 'ReLU'
i_degree = 0

# TO DO:: Complete the linear_regression and evaluate_regression functions of the assignment1.py

train_err = []
test_err = []

(w, tr_err) = a1.linear_regression(x_train, t_train, i_basis, degree=i_degree)
train_err.append((1, tr_err))

(t_est, te_err) = a1.evaluate_regression(x_test,
                                         t_test,
                                         w,
                                         i_basis,
                                         degree=i_degree)
test_err.append((1, te_err))

train_err = np.array(train_err)
test_err = np.array(test_err)
print(train_err)
print(test_err)
# Produce a plot of results.
plt.plot(train_err[:, 0], train_err[:, 1], 'bo')
Esempio n. 16
0
import matplotlib.pyplot as plt

(countries, features, values) = a1.load_unicef_data()
N_TRAIN = 100
x = values[:, 7:]
targets = values[:, 1]
training = x[0:N_TRAIN, :]
reg_test = x[N_TRAIN:, :]
testing = targets[0:N_TRAIN]
test = targets[N_TRAIN:]
train_err = {}
test_err = {}

for val in range(1, 7):
    w, tr_err = a1.linear_regression(training,
                                     testing,
                                     basis='polynomial',
                                     degree=val)
    train_err[val] = tr_err
    test, test_error = a1.evaluate_regression(reg_test,
                                              test,
                                              w=w,
                                              basis='polynomial',
                                              degree=val)
    test_err[val] = test_error

plt.plot(train_err.keys(), train_err.values())
plt.plot(test_err.keys(), test_err.values())
plt.ylabel('Root Mean Square')
plt.legend(['Test Error', 'Train Error'])
plt.title('Fit with polynomials, no Regularization')
plt.xlabel('Polynomial Degree')
Esempio n. 17
0
x_train = x[0:N_TRAIN, :]
x_test = x[N_TRAIN:, :]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]
N_TEST = x_test.shape[0]

target_value = values[:, 1]
#input_features = values[:,7:40]

feature_size = x_train.shape[1]
phi = []
#print x_train

degree = 0
# TO DO:: Complete the linear_regression and evaluate_regression functions of the assignment1.py
(w, training_err) = a1.linear_regression(x_train, t_train, 'ReLU', degree)
train_err = training_err
(t_est, testing_err) = a1.evaluate_regression(x_test, t_test, w, 'ReLU',
                                              degree)
test_err = testing_err
print("training error is: " + str(training_err))
print("testing error is: " + str(testing_err))
x_ev = x_train[:, 0]
x_sample = np.linspace(np.asscalar(min(x_ev)), np.asscalar(max(x_ev)), num=500)
phi_sample = np.matrix([[1] + [max(0, (5000 - x_sample[i]))]
                        for i in range(len(x_sample))])
y_sample = phi_sample * w
#print w.shape
#print phi_sample.shape
#print y_sample.shape
Esempio n. 18
0
tr_err = {}
t_est = {}
te_err = {}

col = 11
mu = [100, 10000]
s = 2000.0

x_train = values[0:N_TRAIN, col]
x_test = values[N_TRAIN:, col]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

(w, tr_err) = a1.linear_regression(x_train,
                                   t_train,
                                   'sigmoid',
                                   bias=bias,
                                   mu=mu,
                                   s=s)
(t_est, te_err) = a1.evaluate_regression(x_test,
                                         t_test,
                                         w,
                                         'sigmoid',
                                         bias=bias,
                                         mu=mu,
                                         s=s)

# importlib.reload(a1)

## Produce a plot of results.
#plt.close('all')
#
Esempio n. 19
0
(countries, features, values) = a1.load_unicef_data()

targets = values[:, 1]
x = values[:, :]
#x = a1.normalize_data(x)

# select the feture
feture = 12
print(features[feture])
N_TRAIN = 100
x_train = x[0:N_TRAIN, feture]
x_test = x[N_TRAIN:, feture]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

(w, tr_err) = a1.linear_regression(x_train, t_train, 'polynomial', 0, 3)
# Plot a curve showing learned function.
# Use linspace to get a set of samples on which to evaluate
x_ev = np.linspace(np.asscalar(min(x_train)),
                   np.asscalar(max(x_train)),
                   num=500).reshape(500, 1)

# x1_ev = np.linspace(0, 10, num=500)
# x2_ev = np.linspace(0, 10, num=50)

# TO DO::
# Perform regression on the linspace samples.
# Put your regression estimate here in place of y_ev.

x_designM = a1.design_matrix(x_ev, 3, 'polynomial')
y_ev = x_designM * w
Esempio n. 20
0
import numpy as np
import matplotlib.pyplot as plt

(countries, features, values) = a1.load_unicef_data()

targets = values[:, 1]
x = values[:, 7:]
x = a1.normalize_data(x)
N_TRAIN = 100
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

for i in range(10, 13):
    x_1 = values[:, i]
    x_train = x_1[0:N_TRAIN, :]
    x_test = x_1[N_TRAIN:, :]
    x_ev = np.linspace(np.asscalar(min(x_train)),
                       np.asscalar(max(x_train)),
                       num=500)
    x_ev = np.reshape(x_ev, (x_ev.shape[0], 1))
    phi_ev = a1.design_matrix(x_ev, basis='polynomial', degree=3)
    w, _ = a1.linear_regression(x_train,
                                t_train,
                                basis='one_polynomial',
                                degree=3)
    y_ev = phi_ev * w
    plt.plot(x_ev, y_ev, 'r.-', color='pink')
    plt.plot(x_train, t_train, 'ro', color='yellow')
    plt.plot(x_test, t_test, 'go', color='blue')
    plt.show()
Esempio n. 21
0
#lamda_val = [0]
avg_lambda_err = []

# Produce a plot of results.
for i in lamda_val:
    er = []
    for j in range(10, 101, 10):
        x_val_set = x_train[j - 10:j, :]
        val_1 = x_train[0:j - 10, :]
        val_2 = x_train[j:100, :]
        x_train_set = np.vstack((val_1, val_2))
        t_val_set = t_train[j - 10:j, :]
        val_1 = t_train[0:j - 10, :]
        val_2 = t_train[j:100, :]
        t_train_set = np.vstack((val_1, val_2))
        (w, tr_err, pred) = a1.linear_regression(x_train_set, t_train_set,
                                                 'polynomial', i, 2)
        (y_ev, te_err) = a1.evaluate_regression(x_val_set, t_val_set, w,
                                                'polynomial', 2)
        er.append(te_err)
    avg_lambda_err.append(np.mean(er))
# TO DO:: Put your regression estimate here in place of x_ev.
# Evaluate regression on the linspace samples.
# y_ev, _  = a1.evaluate_regression()

plt.semilogx(lamda_val, avg_lambda_err)
plt.ylabel('Average Vaildation Set Error')
plt.title('Regularized Polynomial Regression 10 Fold')
plt.xlabel('Lambda on log scale')
plt.show()
N_TRAIN = 100
x_train = x[0:N_TRAIN, :]
x_test = x[N_TRAIN:, :]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

i_basis = 'polynomial'

# TO DO:: Complete the linear_regression and evaluate_regression functions of the assignment1.py
train_err = []
test_err = []
for p in range(1, 7):
    print(p)
    (w, tr_err) = a1.linear_regression(x_train,
                                       t_train,
                                       i_basis,
                                       reg_lambda=0,
                                       degree=p)
    train_err.append((p, tr_err))
    print(tr_err)
    (t_est, te_err) = a1.evaluate_regression(x_test,
                                             t_test,
                                             w,
                                             i_basis,
                                             degree=p)
    print(te_err)
    test_err.append((p, te_err))

train_err = np.array(train_err)
test_err = np.array(test_err)
N_TRAIN = 100
x_train = x[0:N_TRAIN, :]
x_test = x[N_TRAIN:, :]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

degree = 6

train_err = dict()
test_err = dict()

for i in range(1, degree + 1):
    bigphi = a1.design_matrix('polynomial', x_train, i, 1)

    (w, rms_train) = a1.linear_regression(x_train, t_train, bigphi, -1, i, 0,
                                          0)

    test_phi = a1.design_matrix('polynomial', x_test, i, 1)
    rms_test = a1.evaluate_regression(test_phi, w, t_test)
    train_err[i] = rms_train
    test_err[i] = rms_test

# Complete the linear_regression and evaluate_regression functions of the assignment1.py
# Pass the required parameters to these functions

# (w, tr_err) = a1.linear_regression()
# (t_est, te_err) = a1.evaluate_regression()

# Produce a plot of results.
plt.rcParams.update({'font.size': 15})
plt.plot([float(k) for k in train_err.keys()],
Esempio n. 24
0
x = a1.normalize_data(x)

N_TRAIN = 100
targets = values[:N_TRAIN, 1]
x = x[0:N_TRAIN, :]
lambda_list = [0, 0.01, 0.1, 1, 10, 100, 1000, 10000]
average_list = []

for i in [0, 0.01, 0.1, 1, 10, 100, 1000, 10000]:
    sum = 0
    for fold in range(1, 11):
        x_vali = x[(fold - 1) * 10:fold * 10, :]
        t_vali = targets[(fold - 1) * 10:fold * 10]
        x_train = np.vstack((x[0:(fold - 1) * 10, :], x[10 * fold:, :]))
        t_train = np.vstack((targets[0:(fold - 1) * 10], targets[10 * fold:]))
        (w, train_err) = a1.linear_regression(x_train, t_train, 'polynomial',
                                              i, 2, 'yes', 0, 0)
        (t_est, test_err) = a1.evaluate_regression(x_vali, t_vali, w, 2,
                                                   'polynomial', 'yes', 0, 0)
        #print(test_err)
        sum = sum + float(test_err)
        #print(sum)
    average = float(sum / 10)
    print(average)
    average_list.append(average)

plt.rcParams.update({'font.size': 15})
plt.semilogx(lambda_list, average_list)
plt.ylabel('Average RMS')
plt.legend(['Average Validation error'])
plt.title('Fit with degree 2 polynomials')
plt.xlabel('log scale lambda')
Esempio n. 25
0
x_ev = np.linspace(np.ndarray.item(min(x_train[:, f])),
                   np.ndarray.item(max(x_train[:, f])),
                   num=500)
x_ev2 = np.linspace(np.ndarray.item(min(min(x_train[:, f]), min(x_test[:,
                                                                       f]))),
                    np.ndarray.item(max(max(x_train[:, f]), max(x_test[:,
                                                                       f]))),
                    num=500)
x_ev = x_ev.reshape((500, 1))
x_ev2 = x_ev2.reshape((500, 1))

# TO DO::
# Perform regression on the linspace samples.
(w, tr_err) = a1.linear_regression(x_train[:, f],
                                   t_train,
                                   'polynomial',
                                   reg_lambda=0,
                                   degree=3,
                                   bias=1)
# print(w)
phi = a1.design_matrix(x_ev, degree=3, basis='polynomial', bias=1)
phi2 = a1.design_matrix(x_ev2, degree=3, basis='polynomial', bias=1)
# Put your regression estimate here in place of y_ev.
y_ev = np.dot(phi, w)
y_ev2 = np.dot(phi2, w)

# Produce a plot of results.
plt.subplot(121)
plt.plot(x_ev, y_ev, 'r.-')
plt.plot(x_train[:, f], t_train, 'b.')
plt.plot(x_test[:, f], t_test, 'g.')
plt.legend(['fit polynomial', 'Training data', 'Testing data'])
Esempio n. 26
0
(countries, features, values) = a1.load_unicef_data()

targets = values[:, 1]
x = values[:, 7:]

N_TRAIN = 100
#Example of selecting a single feature for training
x_train = x[0:N_TRAIN, 3]
t_train = targets[0:N_TRAIN]

#Selecting a feature for both test inputs and test targets [example]
x_test = x[N_TRAIN:, 3]
t_test = targets[N_TRAIN:]

#print("x_train",x_train)
(weights, training_error) = a1.linear_regression(x_train, t_train, "ReLU", 0,
                                                 0)
tup = (weights, training_error)

(estimate, test_err) = a1.evaluate_regression(x_test, t_test, tup[0], "ReLU",
                                              0)
tupl = (estimate, test_err)

print(tup[0])
print("training error: ", tup[1])

print("test error is: ", tupl[1])

min = np.amin(x_train)
max = np.amax(x_train)

x_ev = np.linspace(min, max, num=500)
Esempio n. 27
0
    feature_list_input_train.append(x[0:N_TRAIN, i])

## List to store our test inputs for test validation
feature_list_target_test = []
for i in range(number_of_input_features):
    feature_list_target_test.append(x[N_TRAIN:, i])

print("---Printing element list from feature list---")

print("Target dimensions: ", t_train.shape[0], t_train.shape[1])
lenArg = len(feature_list_input_train)

# List to store the following as elements (weights,trainingerror)
werr = []
for i in range(lenArg):
    (w, tr_err) = a1.linear_regression(feature_list_input_train[i], t_train,
                                       "polynomial", 0, 3)
    tup = (w, tr_err)
    werr.append(tup)
    #print(feature_list[i].shape[0])
    #print(feature_list[i].shape[1])

#for j in range(len(werr)):
#print(werr[j][0])

### List to store the following as elements (estimates, te_err)
lstZ = []
for i in range(lenArg):
    (t_est, te_err) = a1.evaluate_regression(feature_list_target_test[i],
                                             t_test, werr[i][0], "polynomial",
                                             3)
    tup2 = (t_est, te_err)
Esempio n. 28
0
targets = values[:,1]
x = values[:,10]
#x = a1.normalize_data(x)

N_TRAIN = 100
x_train = x[0:N_TRAIN,:]
x_test = x[N_TRAIN:,:]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]
train_err = dict()
test_err = dict()


train_bigfai=a1.design_matrix('sigmoid',x_train,0,0)
(w,train_error)=a1.linear_regression(x,t_train,train_bigfai,-1,0,100,2000)

test_bigfai=a1.design_matrix('sigmoid',x_test,0,0)
test_error=a1.evaluate_regression(test_bigfai,w,t_test)


print(train_error)


print(test_error)



#create a plot
x_ev = np.linspace(np.asscalar(min(x_train)), np.asscalar(max(x_train)), num=500)
x_ev = np.transpose(np.asmatrix(x_ev))
for bias in bias_arr:
    weight = []
    train_err = []
    test_err = []

    for i in range(7, 15):
        targets = values[:, 1]
        x = values[:, i]

        N_TRAIN = 100
        x_train = x[0:N_TRAIN, :]
        x_test = x[N_TRAIN:, :]
        t_train = targets[0:N_TRAIN]
        t_test = targets[N_TRAIN:]

        w, t_err = a1.linear_regression(x_train, t_train, 'polynomial', bias,
                                        0, 3, 1)
        pred, te_err = a1.evaluate_regression(x_test, t_test, 'polynomial',
                                              bias, w, 3)
        weight.append(w)
        train_err.append(t_err)
        test_err.append(te_err)

    print("TrainError for bais =", bias, train_err)
    print("TestError for bais =", bias, test_err)

    # create plot
    fig, ax = plt.subplots()
    index = np.arange(8)
    bar_width = 0.35
    opacity = 0.8
Esempio n. 30
0

N_TRAIN = 100;
x_train = x[0:N_TRAIN,:]
x_test = x[N_TRAIN:,:]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

train_err = np.zeros(6)
test_err = np.zeros(6)

# TO DO:: Complete the linear_regression and evaluate_regression functions of the assignment1.py

for n in range(1,7):

	(w_tr, tr_err) = a1.linear_regression(x=x_train, t=t_train, basis = 'polynomial', degree=n, dflag=1, w=0)
	te_err = a1.linear_regression(x=x_test, t=t_test, basis = 'polynomial', degree=n, dflag=0, w=w_tr)
	train_err[n-1] = tr_err
	test_err[n-1] = te_err

x = a1.normalize_data(x)
x_train = x[0:N_TRAIN,:]
x_test = x[N_TRAIN:,:]
ntrain_err = np.zeros(6)
ntest_err = np.zeros(6)


for n in range(1,7):

	(w_tr, tr_err) = a1.linear_regression(x=x_train, t=t_train, basis = 'polynomial', degree=n, dflag=1, w=0)
	te_err = a1.linear_regression(x=x_test, t=t_test, basis = 'polynomial', degree=n, dflag=0, w=w_tr)