Example #1
0
def output(partId):
    # Random Test Cases
    X1 = np.column_stack(
        (np.ones(20), np.exp(1) + np.exp(2) * np.linspace(0.1, 2, 20)))
    Y1 = X1[:, 1] + np.sin(X1[:, 0]) + np.cos(X1[:, 1])
    X2 = np.column_stack((X1, X1[:, 1]**0.5, X1[:, 1]**0.25))
    Y2 = np.power(Y1, 0.5) + Y1
    if partId == '1':
        out = formatter('%0.5f ', warmUpExercise())
    elif partId == '2':
        out = formatter('%0.5f ', computeCost(X1, Y1, np.array([0.5, -0.5])))
    elif partId == '3':
        out = formatter(
            '%0.5f ', gradientDescent(X1, Y1, np.array([0.5, -0.5]), 0.01, 10))
    elif partId == '4':
        out = formatter('%0.5f ', featureNormalize(X2[:, 1:4]))
    elif partId == '5':
        out = formatter(
            '%0.5f ', computeCostMulti(X2, Y2, np.array([0.1, 0.2, 0.3, 0.4])))
    elif partId == '6':
        out = formatter(
            '%0.5f ',
            gradientDescentMulti(X2, Y2, np.array([-0.1, -0.2, -0.3, -0.4]),
                                 0.01, 10))
    elif partId == '7':
        out = formatter('%0.5f ', normalEqn(X2, Y2))
    return out
Example #2
0
def output(partId):
	# Random Test Cases
	X1 = column_stack((ones(20), exp(1) + dot(exp(2), arange(0.1, 2.1, 0.1))))
	Y1 = X1[:,1] + sin(X1[:,0]) + cos(X1[:,1])
	X2 = column_stack((X1, X1[:,1]**0.5, X1[:,1]**0.25))
	Y2 = Y1**0.5 + Y1
	if partId == '1':
		return sprintf('%0.5f ', warmUpExercise())
	elif partId == '2':
		return sprintf('%0.5f ', computeCost(X1, Y1, array([0.5, -0.5])))
	elif partId == '3':
		return sprintf('%0.5f ', gradientDescent(X1, Y1, array([0.5, -0.5]), 0.01, 10))
	elif partId == '4':
		return sprintf('%0.5f ', featureNormalize(X2[:,1:3]));
	elif partId == '5':
		return sprintf('%0.5f ', computeCostMulti(X2, Y2, array([0.1, 0.2, 0.3, 0.4])))
	elif partId == '6':
		return sprintf('%0.5f ', gradientDescentMulti(X2, Y2, array([-0.1, -0.2, -0.3, -0.4]), 0.01, 10))
	elif partId == '7':
		return sprintf('%0.5f ', normalEqn(X2, Y2))
input('Program paused. Press enter to continue.\n')

## ================ Part 3: Normal Equations ================

print('Solving with normal equations...\n')

## Load Data
data = np.loadtxt('ex1data2.txt', delimiter=",")
X = data[:, 0:2]
y = data[:, 2].reshape(-1, 1)
m = len(y)

# Add intercept term to X
X = np.vstack((np.ones(m), X.T)).T

# Calculate the parameters from the normal equation
theta = normalEqn(X, y)

# Display normal equation's result
print('Theta computed from the normal equations: \n')
print(theta)
print('\n')

temp = np.array([[1.0, 1650.0, 3.0]])
price = np.dot(temp, theta)

print(
    'Predicted price of a 1650 sq-ft, 3 br house (using gradient descent):\n $%f\n'
    % price)

input('Program paused. Press enter to continue.\n')
Example #4
0
    print('Solving with normal equations...')

    # ====================== YOUR CODE HERE ====================

    # Load Data
    data = np.asmatrix(np.loadtxt('ex1data2.txt', delimiter=','))
    X = data[:, :2]
    y = data[:, 2]
    m = y.shape[0]

    # Add intercept term to X
    X = np.hstack((np.ones((m, 1)), X))

    # Calculate the parameters from the normal equation
    theta = normalEqn(X, y)

    # Display normal equation's result
    print('Theta computed from the normal equations:')
    print(theta)

    # Estimate the price of a 1650 sq-ft, 3 br house
    # ====================== YOUR CODE HERE ======================

    price = 0  # You should change this
    price = np.matrix([1, 1650, 3]).dot(theta)

    # ============================================================

    print('Predicted price of a 1650 sq-ft, 3 br house ' +
          '(using normal equations):\n $%f\n' % (price))
#
#               After doing so, you should complete this code
#               to predict the price of a 1650 sq-ft, 3 br house.
#

## Load Data
data = np.loadtxt('ex1data2.txt', delimiter=",")
X = data[:, :2]
y = data[:, 2]
m = len(y)  # number of training examples

# Add intercept term to X
X_padded = np.column_stack((np.ones((m, 1)), X))

# Calculate the parameters from the normal equation
theta = ne.normalEqn(X_padded, y)

# Display normal equation's result
print('Theta computed from the normal equations:')
print("{:f}, {:f}, {:f}".format(theta[0], theta[1], theta[2]))
print('')

# Estimate the price of a 1650 sq-ft, 3 br house
# ====================== YOUR CODE HERE ======================
house_norm_padded = np.array([1, 1650, 3])
price = np.array(house_norm_padded).dot(theta)

# ============================================================

print(
    "Predicted price of a 1650 sq-ft, 3 br house (using gradient descent):\n ${:,.2f}"
Example #6
0
# ====================== YOUR CODE HERE ======================
# Instructions: The following code computes the closed form solution for linear regression using the normal
#               equations. You should complete the code in normalEqn.m
#               After doing so, you should complete this code to predict the price of a 1650 sq-ft, 3 br house.

# # Load Data
data = np.loadtxt('ex1data2.txt', delimiter=',')
X = data[:, 0:2]
y = data[:, 2]
m = len(y)

# Add intercept term to X
X = np.column_stack((np.ones((m, 1)), X))

# Calculate the parameters from the normal equation
theta = ne.normalEqn(X, y)

# Display normal equation's result
print('Theta computed from the normal equations: \n')
print(' {:f} {:f} {:f}\n'.format(theta[0], theta[1], theta[2]))
print('\n')

# Estimate the price of a 1650 sq-ft, 3 br house
# ====================== YOUR CODE HERE ======================
price = 0  # You should change this
area = 1650
br = 3
added = np.array([1, area, br])
theta = np.array(theta)
price = np.dot(added, theta)
# ============================================================
Example #7
0
plt.plot(np.arange(0, num_iters), J_history, "b-")
plt.xlabel('Number of Iteration')
plt.ylabel('Cost J')
plt.title('Gradient Descent')
plt.draw()
plt.pause(0.001)

#Display gradient descent's result
print('Theta computed from gradient descent: \n', theta)

# % Estimate the price of a 1650 sq-ft, 3 br house
# % Recall that the first column of X is all-ones. Thus, it does not need to be normalized.
price = ((np.array([[1, (1650 - mu[0]) / sigma[0],
                     (3 - mu[1]) / sigma[1]]])) @ theta)[0, 0]
print("Estimated price of a 1650 sq-ft, 3 br house $", price)
input("Press Enter to continue...")

# ================ Part 3: Normal Equations ================
print('Solving with normal equations...\n')
data = pd.read_csv('ex1data2.txt', header=None)
X, y = data.iloc[:, :2], data.iloc[:,
                                   2]  #Data Separated into two pandas series
X = np.concatenate([np.ones((m, 1)), np.array(X)], axis=1)
theta = NE.normalEqn(X, y)

print('Theta computed from the normal equations\n', theta)
price = np.array([[1, 1650, 3]]) @ theta
print(
    'Predicted price of a 1650 sq-ft, 3 br house (using normal equations):\n',
    price)
Example #8
0
# The complete model to execute the whole multi-regression

import loadData
import featureNormalize
import normalEqn
import gradientDescent
import numpy


def init(number):
    return numpy.random.normal(0, 1, number + 1)


def multiRegression(number, x, y, learning_rate=1e-3, epoch=50):
    weights = init(number)
    for i in range(epoch):
        weights = gradientDescent.computeCost(learning_rate, weights, x, y)
    return weights


if __name__ == '__main__':
    x, y = loadData.loadData('ex1data2.txt', 2)
    x = featureNormalize.featureNormalize(x, 2)
    print normalEqn.normalEqn(x, y)
    print multiRegression(2, x, y)
# Choose some alpha value
alpha = 0.01
num_iters = 1500

# Init Theta and Run Gradient Descent
theta = np.zeros((3, 1))

theta, J_history = gdm.gradientDescent(X_norm, y, theta, alpha, num_iters)


#Plot the convergence of the cost function
def plotConvergence(jvec):
    plt.figure()
    plt.plot(range(len(jvec)), jvec, 'bo')
    plt.grid(True)
    plt.title("Convergence of Cost Function")
    plt.xlabel("Iteration number")
    plt.ylabel("Cost function")
    plt.show()


#Plot convergence of cost function:
# plotConvergence(J_history)

#compute the gradient by using Normal Equations without feature scaling and gradient descent
X = np.column_stack((np.ones(m), X))  # Add a column of ones to x
theta = neqn.normalEqn(X, y)

print(theta)

print("$%0.2f" % np.dot(theta.T, [[1], [1650.], [3]]))
#
#               After doing so, you should complete this code 
#               to predict the price of a 1650 sq-ft, 3 br house.
#

## Load Data
data = np.loadtxt('ex1data2.txt', delimiter=",")
X = data[:,:2]
y = data[:,2]
m = len(y) # number of training examples

# Add intercept term to X
X_padded = np.column_stack((np.ones((m,1)), X)) 

# Calculate the parameters from the normal equation
theta = ne.normalEqn(X_padded, y)

# Display normal equation's result
print('Theta computed from the normal equations:')
print("{:f}, {:f}, {:f}".format(theta[0], theta[1], theta[2]))
print('')


# Estimate the price of a 1650 sq-ft, 3 br house
# ====================== YOUR CODE HERE ======================
house_norm_padded = np.array([1, 1650, 3])
price = np.array(house_norm_padded).dot(theta)

# ============================================================

print("Predicted price of a 1650 sq-ft, 3 br house (using gradient descent):\n ${:,.2f}".format(price))
print('\nRunning Gradient Descent ...\n')
# run gradient descent
from gradientDescent import gradientDescent
theta, J_history = gradientDescent(X, y, theta, alpha, iterations)

# plot the convergence graph
print('\nploting the convergence graph...\n')
plt.plot(range(iterations), J_history, color='coral')
plt.xlabel('Number of iterations')
plt.ylabel('Cost J')
plt.show()

print('\nRunning Normal Equation ...\n')
# run normal equation
from normalEqn import normalEqn
theta_ne = normalEqn(X, y)

# print theta to screen
print('Theta found by gradient descent:\n')
print(' {:.4f}\n  {:.4f}\n'.format(theta[0][0], theta[1][0]))
print('Theta found by normal equation:\n')
print(' {:.4f}\n  {:.4f}\n'.format(theta_ne[0, 0], theta_ne[1, 0]))
print('Expected theta values (approx)\n')
print(' -3.6303\n  1.1664\n\n')

print('\nProgram paused. Press enter to continue.\n')
input()

# plot the linear fit
print('\nploting linear fit...\n')
plt.scatter(X[:, 1], y, color='red', marker='x', s=10)
Example #12
0
#Display gradient descent's result
print('Theta computed from gradient descent: \n');
print(" %s \n" %theta);
print('\n');
price =np.matrix( [ 1 , -0.44604386, -0.224428357 ]) * theta
price=str(price).replace('[','').replace(']','')
print("Predicted price of 1650sq with 3 bedrooms is %s $\n"%price)

"""==============================Normal Equation ==================="""
#load Data since we dont normalize it in Norma Equation
data = np.loadtxt(('ex1data2.txt'),delimiter=",");
X = data[:, 0:2]
y = data[:, 2:3];
m = len(y);
# Add intercept term to X
X=np.c_[np.ones(m),X]

#Display theta computed from norma equations

print('Theta computed from the normal equations: \n');
theta=nEqn.normalEqn(X,y)
print(' %s \n' %(theta));
print('\n');
price =np.matrix( [1,  1650, 3] )* theta

price=str(price).replace('[','').replace(']','')
"""==========================predicting price of a house using normal Eqns=================================="""

print('Predicted price of a 1650 sq-ft, 3 br house (using normal equations):\n%s $\n' %price);
#               to predict the price of a 1650 sq-ft, 3 br house.
#

print('Solving with normal equations...')

# Load Data
data = np.loadtxt('ex1data2.txt', delimiter=',')
X = data[:, :2]
y = data[:, 2]
m = y.T.size

# Add intercept term to X
X = np.concatenate((np.ones((m, 1)), X), axis=1)

# Calculate the parameters from the normal equation
theta_norm = normalEqn(X, y)

# Display normal equation's result
print('Theta computed from the normal equations: $')
print('%s \n' % theta)

# Estimate the price of a 1650 sq-ft, 3 br house
price = np.array([1, 1650, 3]).dot(theta_norm)

# ============================================================

print("Predicted price of a 1650 sq-ft, 3 br house ")
print('(using normal equations):\n $%f\n' % price)

input("Program paused. Press Enter to continue...")
def ex1_multi():
    # Initialization

    # ================ Part 1: Feature Normalization ================

    # Clear and Close Figures
    #clear ; close all; clc

    print('Loading data ...')

    # Load Data
    data = np.loadtxt('ex1data2.txt', delimiter=',')
    X = np.reshape(data[:, 0:2], (data.shape[0], 2))
    y = np.reshape(data[:, 2], (data.shape[0], 1))
    m = y.shape[0]

    # Print out some data points
    print('First 10 examples from the dataset: ')
    print(np.c_[X[0:10, :], y[0:10, :]].T)

    print('Program paused. Press enter to continue.')
    #input()

    # Scale features and set them to zero mean
    print('Normalizing Features ...')

    X, mu, sigma = featureNormalize(X)

    # Add intercept term to X
    X = np.c_[np.ones((m, 1)), X]


    # ================ Part 2: Gradient Descent ================

    # ====================== YOUR CODE HERE ======================
    # Instructions: We have provided you with the following starter
    #               code that runs gradient descent with a particular
    #               learning rate (alpha).
    #
    #               Your task is to first make sure that your functions -
    #               computeCost and gradientDescent already work with
    #               this starter code and support multiple variables.
    #
    #               After that, try running gradient descent with
    #               different values of alpha and see which one gives
    #               you the best result.
    #
    #               Finally, you should complete the code at the end
    #               to predict the price of a 1650 sq-ft, 3 br house.
    #
    # Hint: By using the 'hold on' command, you can plot multiple
    #       graphs on the same figure.

    # Hint: At prediction, make sure you do the same feature normalization.


    # Begin: My code plotting for different learning rates
    alphas = [0.3, 0.1, 0.03, 0.01]
    colors = ['r', 'g', 'b', 'k']
    short_iters = 50
    fig = plt.figure()
    #hold on;
    plt.xlabel('Number of iterations')
    plt.ylabel('Cost J')
    for i in range(len(alphas)):
        _, J = gradientDescentMulti(X, y, np.reshape(np.zeros((3, 1)), (3, 1)), alphas[i], short_iters)
        plt.plot(range(len(J)), J, colors[i], markersize=2)
    plt.savefig('figure1.multi.png')
    # End: My code plotting for different learning rates

    print('Running gradient descent ...')

    # Choose some alpha value
    alpha = 0.01
    num_iters = 400

    # Init Theta and Run Gradient Descent
    theta = np.reshape(np.zeros((3, 1)), (3, 1))
    theta, J_history = gradientDescentMulti(X, y, theta, alpha, num_iters)

    # Plot the convergence graph
    fig = plt.figure()
    plt.plot(range(len(J_history)), J_history, '-b', markersize=2)
    plt.xlabel('Number of iterations')
    plt.ylabel('Cost J')
    plt.savefig('figure2.multi.png')

    # Display gradient descent's result
    print('Theta computed from gradient descent: ')
    print(theta)
    print()

    # Estimate the price of a 1650 sq-ft, 3 br house
    # ====================== YOUR CODE HERE ======================
    # Recall that the first column of X is all-ones. Thus, it does
    # not need to be normalized.
    #price = 0; % You should change this

    price = np.dot(np.r_[1, np.divide(np.subtract([1650, 3], mu), sigma)], theta)

    # ============================================================

    print('Predicted price of a 1650 sq-ft, 3 br house (using gradient descent):\n $%f' % price)

    print('Program paused. Press enter to continue.')
    #input()

    # ================ Part 3: Normal Equations ================

    print('Solving with normal equations...')

    # ====================== YOUR CODE HERE ======================
    # Instructions: The following code computes the closed form
    #               solution for linear regression using the normal
    #               equations. You should complete the code in
    #               normalEqn.m
    #
    #               After doing so, you should complete this code
    #               to predict the price of a 1650 sq-ft, 3 br house.
    #

    # Load Data
    data = np.loadtxt('ex1data2.txt', delimiter=',')
    X = np.reshape(data[:, 0:2], (data.shape[0], 2))
    y = np.reshape(data[:, 2], (data.shape[0], 1))
    m = y.shape[0]

    # Add intercept term to X
    X = np.c_[np.ones((m, 1)), X]

    # Calculate the parameters from the normal equation
    theta = normalEqn(X, y)

    # Display normal equation's result
    print('Theta computed from the normal equations: ')
    print(theta)
    print('')


    # Estimate the price of a 1650 sq-ft, 3 br house
    # ====================== YOUR CODE HERE ======================
    price = np.dot([1, 1650, 3], theta) # You should change this


    # ============================================================

    print('Predicted price of a 1650 sq-ft, 3 br house (using normal equations):\n $%f' % price)

    # http://scikit-learn.org/stable/auto_examples/linear_model/plot_ridge_coeffs.html
    # Using sklearn
    X = np.reshape(data[:, 0:2], (data.shape[0], 2))
    y = np.reshape(data[:, 2], (data.shape[0], 1))
    model = linear_model.Ridge(max_iter=num_iters, solver='lsqr')
    count = 200
    alphas = np.logspace(-3, 1, count)
    coefs = np.zeros((count, 2))
    errors = np.zeros((count, 1))
    for i, alpha in enumerate(alphas):
        model.set_params(alpha=alpha)
        model.fit(X, y)
        coefs[i, :] = model.coef_
        errors[i, 0] = metrics.mean_squared_error(model.predict(X), y)
    results = [(r'$\theta_1$', coefs[:, 0]), (r'$\theta_2$', coefs[:, 1]), ('MSE', errors)]
    for i, result in enumerate(results):
        label, values = result
        plt.figure()
        ax = plt.gca()
        ax.set_xscale('log')
        ax.plot(alphas, values)
        plt.xlabel(r'$\alpha$')
        plt.ylabel(label)
        plt.savefig('figure%d.multi.sklearn.png' % (i + 1))
    #model = linear_model.LinearRegression()
    model = linear_model.Ridge(alpha=alpha, max_iter=num_iters, solver='lsqr')
    model.fit(X, y)
    print('Theta found: ')
    print('%f %f %f' % (model.intercept_[0], model.coef_[0, 0], model.coef_[0, 1]))
    print('Predicted price of a 1650 sq-ft, 3 br house (using sklearn):\n $%f' % model.predict([[1650, 3]]))
Example #15
0
# =============Use Scikit-learn =============
regr = linear_model.LinearRegression(fit_intercept=False, normalize=True)
regr.fit(X, y)

print 'Theta found by scikit: '
print '%s %s \n' % (regr.coef_[0], regr.coef_[1])

predict1 = np.array([1, 3.5]).dot(regr.coef_)
predict2 = np.array([1, 7]).dot(regr.coef_)
print 'For population = 35,000, we predict a profit of {:.4f}'.format(
    predict1 * 10000)
print 'For population = 70,000, we predict a profit of {:.4f}'.format(
    predict2 * 10000)

plt.figure()
plotData(data)
plt.plot(X[:, 1],
         X.dot(regr.coef_),
         '-',
         color='black',
         label='Linear regression wit scikit')
plt.legend(loc='upper right', shadow=True, fontsize='x-large', numpoints=1)
## plt.show()

raw_input("Program paused. Press Enter to continue...")

print 'Now, here are the optional parts!'
print "Normal equation theta parameters"
print normalEqn(X, y)