def testGradientDescentMulti1():
    X = array([[1.,0.]])
    y = array([0.])
    theta = array([0.,0.])
    th = gradientDescentMulti(X, y, theta, 0, 0)[0]
    assert_array_equal(th, theta)
def testGradientDescentMulti8():
    X = column_stack((ones(10), sin(arange(10)), cos(arange(10)), linspace(0.3,0.7,10)))
    y = arange(10)
    theta = array([1.,2.,3.,4.])
    th = gradientDescentMulti(X, y, theta, 0.05, 100)[0]
    assert_array_almost_equal(th, array([1.6225,  0.39764, -0.39422,  5.7765]), decimal=3)
def testGradientDescentMulti7():
    X = column_stack((ones(10), arange(10), arange(10)))
    y = arange(10)*2
    theta = array([0.,0.,0.])
    th = gradientDescentMulti(X, y, theta, 1, 1)[0]
    assert_array_almost_equal(th, array([9.,57.,57.]))
def testGradientDescentMulti5():
    X = column_stack((ones(101), linspace(0,10,101)))
    y = sin(linspace(0,10,101))
    theta = array([1.,-1.])
    th = gradientDescentMulti(X, y, theta, 0.05, 100)[0]
    assert_array_almost_equal(th, array([0.5132, -0.0545]), decimal=3)
def testGradientDescentMulti4():
    X = column_stack((ones(10), arange(10)))
    y = arange(10)*2
    theta = array([1.,2.])
    th = gradientDescentMulti(X, y, theta, 0.05, 100)[0]
    assert_array_almost_equal(th, array([0.2353, 1.9625]), decimal=3)
Esempio n. 6
0
#
# Hint: By using the 'hold(True)' command, you can plot multiple
#       graphs on the same figure.
#
# Hint: At prediction, make sure you do the same feature normalization.
#

print 'Running gradient descent ...'

# Choose some alpha value
alpha = 0.01
num_iters = 400

# Init Theta and Run Gradient Descent
theta = zeros(3)
theta, J_history = gradientDescentMulti(X_data, y, theta, alpha, num_iters);

# Plot the convergence graph
fig = figure()
plot(J_history, '-b', linewidth=2)
xlabel('Number of iterations')
ylabel('Cost J')
fig.show()

# Display gradient descent's result
print 'Theta computed from gradient descent:'
for t in theta: print t
print

# Estimate the price of a 1650 sq-ft, 3 br house
# ====================== YOUR CODE HERE ======================