Beispiel #1
0
def regression_reg(lamda, deg):
    validation_err = 0
    for i in range(0, 10):
        print(i)
        rms_val_error = 0
        x_val = x_train[i * 10:(i + 1) * 10, :]
        t_val = t_train[i * 10:(i + 1) * 10, :]
        x_train_use = np.concatenate(
            (x_train[0:i * 10, :], x_train[(i + 1) * 10:, :]), 0)
        t_train_use = np.concatenate(
            (t_train[0:i * 10, :], t_train[(i + 1) * 10:, :]), 0)

        bigphi = a1.design_matrix('polynomial', x_train_use, deg, 1)

        (w, rms_train) = a1.linear_regression(x_train_use, t_train_use, bigphi,
                                              lamda, deg, 0, 0)
        #print(w)

        bigfai_val = a1.design_matrix('polynomial', x_val, deg, 1)
        rms_val_error = a1.evaluate_regression(bigfai_val, w, t_val)
        #print(rms_val_error)

        validation_err += rms_val_error
        #print(validation_err)
    validation_err_avg[lamda] = validation_err / 10
    print(validation_err_avg)
Beispiel #2
0
def plot_curve(title, w, feature_no, max_x_limit):
    # Plot a curve showing learned function.

    # Use linspace to get a set of samples on which to evaluate
    x2_ev = np.linspace(np.asscalar(min(x[:, feature_no])),
                        np.asscalar(max(x[:, feature_no])),
                        num=500)
    x2_ev = np.reshape(x2_ev, (-1, 1))

    # TO DO::
    phi = a1.design_matrix(x2_ev,
                           'polynomial',
                           poly_degree=POLYNOMIAL_DEGREE,
                           include_bias=True)
    y_ev = phi * w

    plt.xlim(0, max_x_limit)
    plt.plot(x_train, t_train, 'bo', markersize=3)
    plt.plot(x_test, t_test, 'g^', markersize=3)
    plt.plot(x2_ev, y_ev, 'r.-')

    plt.xlabel('x')
    plt.ylabel('t')

    plt.title(title)
    plt.show()
Beispiel #3
0
def plot_feature(feature):
    x_train = x[0:N_TRAIN,feature-8]
    x_test = x[N_TRAIN:,feature-8]
    # Plot a curve showing learned function.
    # Use linspace to get a set of samples on which to evaluate
    x_ev = np.linspace(np.asscalar(min(x_train)), np.asscalar(max(x_train)), num=500)

    bigphi = a1.design_matrix('polynomial', x_train, 3, 1)
    bigfai_ev = a1.design_matrix('polynomial',np.transpose(np.asmatrix(x_ev)),3,1)

    (w, rms_train) = a1.linear_regression(x_train, t_train, bigphi, -1, 3, 0, 0)
    y_ev= np.transpose(w) * np.transpose(bigfai_ev)
    plt.plot(x_train,t_train,'bo')
    plt.plot(x_test,t_test,'go')
    plt.plot(x_ev,np.transpose(y_ev),'r.-')

    plt.show()
def PolynomialRegression(bias,featureNum):
    (countries, features, values) = a1.load_unicef_data()
    targets = values[:,1]
    x = values[:,7:]
    # x = a1.normalize_data(x)
    N_TRAIN = 100
    ALL = 195
    x_train = x[0:N_TRAIN,:]
    x_test = x[N_TRAIN:,:]
    t_train = targets[0:N_TRAIN]
    t_test = targets[N_TRAIN:]
    train_error = {}
    test_error = {}
    x_trainFeature = x_train[:,featureNum]
    x_testFeature = x_test[:,featureNum]

    (w, t_err) = a1.linear_regression(x_trainFeature, t_train, 'polynomial', 0, 3,0 ,1 ,N_TRAIN,bias)
    (t_est, te_err) = a1.evaluate_regression('polynomial',x_testFeature, w, t_test, 3, ALL-N_TRAIN, bias)
    train_error =  np.sqrt(np.sum(t_err)/100)
    test_error = np.sqrt(np.sum(te_err)/95)

    fig, (ax1, ax2) = plt.subplots(2)
    fig.suptitle('Visulization of feature '+ str(featureNum+8) )
    NumOfPoints = 500
    x_ev1 = np.linspace(np.asscalar(min(x_trainFeature)), np.asscalar(max(x_trainFeature)), num=NumOfPoints)
    x_ev1 = np.array(x_ev1).reshape(NumOfPoints,1)
    phi1 = a1.design_matrix('polynomial', bias, x_ev1, NumOfPoints, 3, 0, 0 )
    y1 = phi1.dot(w)

    x_ev2 = np.linspace(np.asscalar(min(min(x_trainFeature),min(x_testFeature))), np.asscalar(max(max(x_trainFeature) ,max(x_testFeature) )), num=NumOfPoints)
    x_ev2 = np.array(x_ev2).reshape(NumOfPoints,1)
    phi2 = a1.design_matrix('polynomial', bias, x_ev2, NumOfPoints, 3, 0, 0 )
    y2 = phi2.dot(w)

    ax1.plot(x_ev1,y1,'r.-')
    ax1.plot(x_trainFeature,t_train,'bo', color='b')
    ax1.plot(x_testFeature,t_test,'bo',color='g')
    ax2.plot(x_ev2,y2,'r.-')
    ax2.plot(x_trainFeature,t_train,'bo', color='b')
    ax2.plot(x_testFeature,t_test,'bo',color='g')


    
    plt.show()
Beispiel #5
0
def plot_learned_function(title):
    # Use linspace to get a set of samples on which to evaluate
    x_ev = np.linspace(np.asscalar(min(x)), np.asscalar(max(x)), num=500)

    # learn y_ev based on x_ev
    x_ev_reshaped=np.reshape(x_ev, (-1,1))
    phi = a1.design_matrix(x_ev_reshaped, 'sigmoid', miu=MU, s=S, include_bias=True)
    y_ev = phi * w

    plt.plot(x_train, t_train, 'bo', markersize=3)
    plt.plot(x_test, t_test,'g^', markersize=3)
    plt.plot(x_ev, y_ev, 'r.-')

    plt.xlim(0,200000)
    plt.ylim(0, 200)

    plt.xlabel('x')
    plt.ylabel('t')

    plt.title(title)
    plt.show()
def plot_fit(feature, bias, title, linspace_scale):
    x_train = values[0:N_TRAIN, feature - 1]
    x_test = values[N_TRAIN:, feature - 1]
    (w, train_err) = a1.linear_regression(x_train, t_train, 'polynomial', 0, 3,
                                          bias, 0, 0)
    if linspace_scale == 'small':
        x_ev = np.linspace(np.asscalar(min(x_train)),
                           np.asscalar(max(x_train)),
                           num=500)
    if linspace_scale == 'large':
        x_ev = np.linspace(np.asscalar(min(min(x_train), min(x_test))),
                           np.asscalar(max(max(x_train), max(x_test))),
                           num=500)
    x_ev = np.asmatrix(x_ev)
    phi_x_ev = a1.design_matrix('polynomial', np.transpose(x_ev), 3, bias, 0,
                                0)
    y_ev = np.dot(phi_x_ev, w)
    plt.plot(x_train, t_train, 'yo')
    plt.plot(x_test, t_test, 'bo')
    plt.plot(x_ev, np.transpose(y_ev), 'r.-')
    plt.legend(['Training data', 'Test data', 'Learned Polynomial'])
    plt.title('Visualization of a function and some data points' + title)
    plt.show()
(countries, features, values) = a1.load_unicef_data()

targets = values[:,1]
x = values[:,10]
#x = a1.normalize_data(x)

N_TRAIN = 100
x_train = x[0:N_TRAIN,:]
x_test = x[N_TRAIN:,:]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]
train_err = dict()
test_err = dict()


train_bigfai=a1.design_matrix('sigmoid',x_train,0,0)
(w,train_error)=a1.linear_regression(x,t_train,train_bigfai,-1,0,100,2000)

test_bigfai=a1.design_matrix('sigmoid',x_test,0,0)
test_error=a1.evaluate_regression(test_bigfai,w,t_test)


print(train_error)


print(test_error)



#create a plot
x_ev = np.linspace(np.asscalar(min(x_train)), np.asscalar(max(x_train)), num=500)
Beispiel #8
0
                                                         mu=MU,
                                                         s=S,
                                                         include_bias=True)
(pred_test, RMSE_test) = a1.evaluate_regression(x_test,
                                                t_test,
                                                w_train,
                                                basis='sigmoid',
                                                degree=-1,
                                                mu=MU,
                                                s=S,
                                                include_bias=True)
# print('Training error is: ',RMSE_train)
# print('Testing error is: ',RMSE_test)

xx = np.linspace(np.min(x_train), np.max(x_train), num=1000).reshape(1000, 1)
phi = a1.design_matrix(xx, "sigmoid", -1, MU, S, True)
y = phi * w_train
plt.plot(x_train, t_train, 'b+')
plt.plot(x_test, t_test, 'r+')
plt.plot(xx, y, 'y.')
plt.xlabel('Capita')
plt.ylabel('GNI in US$')
plt.title('Visualization of a sigmoid function with bias for ' + features)
plt.legend(['Training data', 'Testing data', 'Linear reg with sigmoid basis'])
plt.show()

print(RMSE_train)
print(RMSE_test)
plt.bar(1, RMSE_train, 0.20, label='training RMSe')
plt.bar(1 + 0.3, RMSE_test, 0.20, label='testing RMSe')
plt.xlabel('Set')

train_err1 = dict()
test_err1 = dict()

train_err = dict()
test_err = dict()


#with bias


for i in range(0,8):
    train_feature=x_train[:,i]
    test_feature = x_test[:,i]
    bigphi=a1.design_matrix('polynomial',train_feature,3,1)

    (w,rms_train)=a1.linear_regression(train_feature,t_train,bigphi,-1,3,0,0)

    test_phi=a1.design_matrix('polynomial',test_feature,3,1)
    rms_test=a1.evaluate_regression(test_phi,w,t_test)
    train_err[i] = rms_train
    test_err[i] = rms_test

#without bias
for i in range(0,8):
    train_feature=x_train[:,i]
    test_feature = x_test[:,i]
    bigphi=a1.design_matrix('polynomial',train_feature,3,0)

    (w,rms_train)=a1.linear_regression(train_feature,t_train,bigphi,-1,3,0,0)
Beispiel #10
0
                                   'sigmoid',
                                   mu=10000,
                                   s=2000)
print(w)
(t_est, te_err) = a1.evaluate_regression(w,
                                         x_test,
                                         t_test,
                                         "sigmoid",
                                         mu=10000,
                                         s=2000)

# Plot a curve showing learned function.
# Use linspace to get a set of samples on which to evaluate
x_ev = np.linspace(np.asscalar(min(x_train)),
                   np.asscalar(max(x_train)),
                   num=500).reshape(500, 1)

# TO DO::
# Perform regression on the linspace samples.
# Put your regression estimate here in place of y_ev.

x_designM = a1.design_matrix(x_ev, 3, 'sigmoid', mu=100, s=2000)
y_ev = x_designM * w
print(tr_err, te_err)

plt.plot(x_ev, y_ev, 'r,-')
plt.plot(x_train, t_train, 'bo')
plt.plot(x_test, t_test, 'go')
plt.legend(['Fitted regression', 'Train data', 'Test Data'])
plt.title('Visualization of a function and some data points')
plt.show()
Beispiel #11
0
(w, tr_err) = a1.linear_regression(x_train, t_train, 'polynomial', 0, 3)
# Plot a curve showing learned function.
# Use linspace to get a set of samples on which to evaluate
x_ev = np.linspace(np.asscalar(min(x_train)),
                   np.asscalar(max(x_train)),
                   num=500).reshape(500, 1)

# x1_ev = np.linspace(0, 10, num=500)
# x2_ev = np.linspace(0, 10, num=50)

# TO DO::
# Perform regression on the linspace samples.
# Put your regression estimate here in place of y_ev.

x_designM = a1.design_matrix(x_ev, 3, 'polynomial')
y_ev = x_designM * w

# y1_ev = np.random.random_sample(x1_ev.shape)
# y2_ev = np.random.random_sample(x2_ev.shape)
# y1_ev = 100*np.sin(x1_ev)
# y2_ev = 100*np.sin(x2_ev)

# plt.plot(x1_ev,y1_ev,'r.-')
# plt.plot(x2_ev,y2_ev,'bo')
plt.plot(x_ev, y_ev, 'r,-')
plt.plot(x_train, t_train, 'bo')
plt.plot(x_test, t_test, 'go')
plt.legend(['Fitted regression', 'Train data', 'Test Data'])
plt.title('Visualization of a function and some data points')
plt.show()
                                   s=s,
                                   bias_term=bias)
(test_preds, test_err) = a1.evaluate_regression(x_test,
                                                t_test,
                                                w,
                                                degree=3,
                                                bias_term=bias,
                                                basis='sigmoid',
                                                mu=mu,
                                                s=s)

x_ev = np.linspace(np.asscalar(min(x_train)),
                   np.asscalar(max(x_train)),
                   num=500)
x_ev = np.asmatrix(x_ev).T
phi = a1.design_matrix(x_ev, 3, basis='sigmoid', bias_term=bias, mu=mu, s=s)
train_preds = phi @ w
plt.plot(x_train, t_train, 'c*')
plt.plot(x_ev, train_preds, 'ro')
plt.plot(x_test, t_test, 'b*')
plt.xlabel('X')
plt.ylabel('RMSE')
plt.legend(['Train data', 'Polynomial', 'Test data'])
title = 'Polynomial for feature 11 using sigmoid basis function'
plt.title(title)
plt.show()
labels = ['train_error', 'test_error']
errors = [tr_err, test_err]
print(errors)
n_groups = 2
fig, ax = plt.subplots()
test_err_bias[features[specific_feature]] = te_err
print("train_error is:" + str(tr_err))
print("test_error is:" + str(te_err))

# Use linspace to get a set of samples on which to evaluate
x_ev = np.linspace(np.ndarray.item(min(x_train)),
                   np.ndarray.item(max(x_train)),
                   num=500)
x_ev2 = np.linspace(np.ndarray.item(min(min(x_train), min(x_test))),
                    np.ndarray.item(max(max(x_train), max(x_test))),
                    num=500)
x_ev = x_ev.reshape((500, 1))
x_ev2 = x_ev2.reshape((500, 1))

# Perform regression on the linspace samples.
phi = a1.design_matrix(x_ev, mu=mu, s=s, basis='sigmoid', bias=1)
phi2 = a1.design_matrix(x_ev2, mu=mu, s=s, basis='sigmoid', bias=1)
y_ev = np.dot(phi, w)
y_ev2 = np.dot(phi2, w)

# Produce a plot of results.
plt.subplot(121)
plt.plot(x_ev, y_ev, 'r.-')
plt.plot(x_train, t_train, 'b.')
plt.plot(x_test, t_test, 'g.')
plt.legend(['fit polynomial', 'Training data', 'Testing data'])
plt.title('Visualization of linear regression function')
plt.xlabel(features[specific_feature] +
           " values \nfrom [min(x_train), max(x_train)]")
plt.ylabel("target values")
Beispiel #14
0
x_test = x[N_TRAIN:, :]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]
featureNum = 3
x_trainFeature = x_train[:, featureNum]
x_testFeature = x_test[:, featureNum]
mu = [100, 10000]
s = 2000.0
bias = 'yes'
(w, t_err) = a1.linear_regression(x_trainFeature, t_train, 'sigmoid', 0, 0, mu,
                                  s, N_TRAIN, bias)
(t_est, te_err) = a1.evaluate_regression('sigmoid', x_testFeature, w, t_test,
                                         0, ALL - N_TRAIN, bias, mu, s)
train_error = np.sqrt(np.sum(t_err) / 100)
test_error = np.sqrt(np.sum(te_err) / 95)
print('train error = ', train_error)
print('test  error = ', test_error)

NumOfPoints = 500
x_ev2 = np.linspace(np.asscalar(min(min(x_trainFeature), min(x_testFeature))),
                    np.asscalar(max(max(x_trainFeature), max(x_testFeature))),
                    num=NumOfPoints)
x_ev2 = np.array(x_ev2).reshape(NumOfPoints, 1)
phi2 = a1.design_matrix('sigmoid', bias, x_ev2, NumOfPoints, 3, mu, s)
y2 = phi2.dot(w)

plt.plot(x_trainFeature, t_train, 'bo', color='b')
plt.plot(x_testFeature, t_test, 'bo', color='g')
plt.plot(x_ev2, y2, 'r.-')
plt.show()
    # print(features[i])
    xx = np.linspace(np.min(x_train[:, i]), np.max(x_train[:, i]),
                     1000).reshape(1000, 1)
    xx_test = np.linspace(np.min(x_test[:, i]), np.max(x_test[:, i]),
                          1000).reshape(1000, 1)
    (pred_train, w_train,
     RMSE_train) = a1.linear_regression(x_train[:, i],
                                        t_train,
                                        basis='polynomial',
                                        reg_lambda=0,
                                        degree=3,
                                        mu=0,
                                        s=1,
                                        include_bias=True)
    phi = a1.design_matrix(xx, 'polynomial', 3, 0, 1, True)
    y = phi * w_train

    plt.plot(x_train[:, i], t_train, 'b+')
    plt.plot(x_test[:, i], t_test, 'r+')
    plt.plot(xx, y, 'g')
    plt.title('Plot with bias of ' + features[i])
    plt.show()

############ no bias #################
names = []
t_est = []
te_err = []

for i in range(x.shape[1]):
    (pred_train, w_train,
x = values[:, 7:]
#x = a1.normalize_data(x)

N_TRAIN = 100
x_train = x[0:N_TRAIN, :]
x_test = x[N_TRAIN:, :]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

degree = 6

train_err = dict()
test_err = dict()

for i in range(1, degree + 1):
    bigphi = a1.design_matrix('polynomial', x_train, i, 1)

    (w, rms_train) = a1.linear_regression(x_train, t_train, bigphi, -1, i, 0,
                                          0)

    test_phi = a1.design_matrix('polynomial', x_test, i, 1)
    rms_test = a1.evaluate_regression(test_phi, w, t_test)
    train_err[i] = rms_train
    test_err[i] = rms_test

# Complete the linear_regression and evaluate_regression functions of the assignment1.py
# Pass the required parameters to these functions

# (w, tr_err) = a1.linear_regression()
# (t_est, te_err) = a1.evaluate_regression()
import numpy as np
import matplotlib.pyplot as plt

(countries, features, values) = a1.load_unicef_data()

targets = values[:, 1]
x = values[:, 7:]
x = a1.normalize_data(x)
N_TRAIN = 100
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

for i in range(10, 13):
    x_1 = values[:, i]
    x_train = x_1[0:N_TRAIN, :]
    x_test = x_1[N_TRAIN:, :]
    x_ev = np.linspace(np.asscalar(min(x_train)),
                       np.asscalar(max(x_train)),
                       num=500)
    x_ev = np.reshape(x_ev, (x_ev.shape[0], 1))
    phi_ev = a1.design_matrix(x_ev, basis='polynomial', degree=3)
    w, _ = a1.linear_regression(x_train,
                                t_train,
                                basis='one_polynomial',
                                degree=3)
    y_ev = phi_ev * w
    plt.plot(x_ev, y_ev, 'r.-', color='pink')
    plt.plot(x_train, t_train, 'ro', color='yellow')
    plt.plot(x_test, t_test, 'go', color='blue')
    plt.show()

for i in range(10,13):
	for bias in bias_arr:
		x_train=values[0:N_TRAIN,i]
		x_test=values[N_TRAIN:,i]

		t_train = targets[0:N_TRAIN]
		t_test = targets[N_TRAIN:]
		#bias=True
		
		(w,tr_err)=a1.linear_regression(x_train, t_train, basis='polynomial', reg_lambda=0, degree=3, mu=0, s=1,bias_term=bias)

		(test_preds,test_err)=a1.evaluate_regression(x_test,t_test,w,degree=3,bias_term=bias,basis='polynomial')
		# Use linspace to get a set of samples on which to evaluate
		x_ev = np.linspace(np.asscalar(min(x_train)), np.asscalar(max(x_train)), num=500)
		x_ev=np.asmatrix(x_ev).T

		phi = a1.design_matrix(x_ev,3,basis='polynomial',bias_term=bias)
		train_preds=phi@w

		plt.plot(x_train,t_train ,'c*')
		plt.plot(x_ev,train_preds,'ro')
		plt.plot(x_test,t_test ,'b*')
		plt.xlabel('X')
		plt.ylabel('RMSE')
		plt.legend(['Train data','Polynomial','Test data'])
		title='Polynomial for feature '+ str(i) + ' with bias ='+str(bias)
		plt.title(title)
		plt.show()
Beispiel #19
0
                    np.ndarray.item(max(max(x_train[:, f]), max(x_test[:,
                                                                       f]))),
                    num=500)
x_ev = x_ev.reshape((500, 1))
x_ev2 = x_ev2.reshape((500, 1))

# TO DO::
# Perform regression on the linspace samples.
(w, tr_err) = a1.linear_regression(x_train[:, f],
                                   t_train,
                                   'polynomial',
                                   reg_lambda=0,
                                   degree=3,
                                   bias=1)
# print(w)
phi = a1.design_matrix(x_ev, degree=3, basis='polynomial', bias=1)
phi2 = a1.design_matrix(x_ev2, degree=3, basis='polynomial', bias=1)
# Put your regression estimate here in place of y_ev.
y_ev = np.dot(phi, w)
y_ev2 = np.dot(phi2, w)

# Produce a plot of results.
plt.subplot(121)
plt.plot(x_ev, y_ev, 'r.-')
plt.plot(x_train[:, f], t_train, 'b.')
plt.plot(x_test[:, f], t_test, 'g.')
plt.legend(['fit polynomial', 'Training data', 'Testing data'])
plt.title('Visualization of linear regression function')
plt.xlabel(features[f] + "values \nfrom [min(x_train), max(x_train)]")
plt.ylabel("target values")
import matplotlib.pyplot as plt

(countries, features, values) = a1.load_unicef_data()

targets = values[:,1]
x = values[:,10]

N_TRAIN = 100;
x_train = x[0:N_TRAIN,:]
x_test = x[N_TRAIN:,:]
t_train = targets[0:N_TRAIN]
t_test = targets[N_TRAIN:]

(w, train_error) = a1.linear_regression(x_train, t_train, basis = 'ReLU')
(t_est, test_error) = a1.evaluate_regression(x_test,t_test, w = w, basis = 'ReLU')
x_ev = np.linspace(np.asscalar(min(x_train)), np.asscalar(max(x_train)), num=500)
x_ev = np.reshape(x_ev,(x_ev.shape[0],1))
phi_ev = a1.design_matrix(x_ev, basis='ReLU')
y_ev = phi_ev * w

print("Training Error:", train_error)
print("Testing Error:", test_error)

plt.plot(x_ev,y_ev,'r.-',color='blue')
plt.plot(x_train,t_train,'go',color='yellow')
plt.plot(x_test,t_test,'bo',color = 'red')
plt.xlabel('Features')
plt.ylabel('Target')
plt.title('ReLU Regression')
plt.show()
        N_TRAIN = 100
        x_train = x[0:N_TRAIN, :]
        x_test = x[N_TRAIN:, :]
        t_train = targets[0:N_TRAIN]
        t_test = targets[N_TRAIN:]

        x_ev = np.linspace(np.asscalar(min(x_train)),
                           np.asscalar(max(x_train)),
                           num=500).reshape(500, 1)
        y_ev1 = np.linspace(np.asscalar(min(t_train)),
                            np.asscalar(max(t_train)),
                            num=500).reshape(500, 1)

        w, t_err = a1.linear_regression(x_train, t_train, 'polynomial', bias,
                                        0, 3, 1)

        phi = a1.design_matrix(x_ev, "polynomial", 3, bias)
        y_ev = phi @ w

        fig, ax = plt.subplots()
        plt.plot(x_train, t_train, 'g+', label='Train Data')
        plt.plot(x_test, t_test, 'b*', label='Test Data')
        plt.plot(x_ev, y_ev, 'y.', label='Polynomial Function')
        plt.xlabel('x-value')
        plt.ylabel('y-value')
        plt.title('Visualization of a function and data points of feature ' +
                  str(i + 1) + ' with bias set to ' + str(bias))
        plt.rcParams["legend.loc"] = 'best'
        plt.legend(numpoints=1)
        plt.show()
Beispiel #22
0
t_test = targets[N_TRAIN:]
mu_arr = [100, 10000]
weight = []
train_err = []
test_err = []
s = 2000

w, t_err = a1.linear_regression(x_train, t_train, 'sigmoid', True, 0, None,
                                mu_arr, s)
weight.append(w)
train_err.append(t_err)
pred, te_err = a1.evaluate_regression(x_test, t_test, 'sigmoid', True, w, None,
                                      mu_arr, s)
test_err.append(te_err)
#print("Testttttt",te_err)
#print("predddddddd",pred)
x_ev = np.linspace(np.asscalar(min(x_train)),
                   np.asscalar(max(x_train)),
                   num=500).reshape(500, 1)

phi = a1.design_matrix(x_ev, "sigmoid", 3, True, mu_arr, s)
y_ev = phi @ w

plt.plot(x_train, t_train, 'g+', label='Train Data')
plt.plot(x_test, t_test, 'b*', label='Test Data')
plt.plot(x_ev, y_ev, 'y.', label='Polynomial Function')
plt.xlabel('x-values')
plt.ylabel('y-values')
plt.title('Visualization of a sigmoid function for feature 11')
plt.legend(numpoints=1)
plt.show()