Ejemplo n.º 1
0
def testGradientDescent1():
    X = array([[1., 0.]])
    y = array([0.])
    theta = array([0.,0.])
    th = gradientDescent(X, y, theta, 0, 0)[0]
    assert_array_equal(th, theta)
Ejemplo n.º 2
0
def testGradientDescent5():
    X = column_stack((ones(101), linspace(0,10,101)))
    y = sin(linspace(0,10,101))
    theta = array([1.,-1.])
    th = gradientDescent(X, y, theta, 0.05, 100)[0]
    assert_array_almost_equal(th, array([0.5132, -0.0545]), decimal=3)
Ejemplo n.º 3
0
def testGradientDescent3():
    X = column_stack((ones(10), arange(10)))
    y = arange(10)*2
    theta = array([1.,2.])
    th = gradientDescent(X, y, theta, 1, 1)[0]
    assert_array_almost_equal(th, array([0.,-2.5]))
Ejemplo n.º 4
0
def testGradientDescent4():
    X = column_stack((ones(10), arange(10)))
    y = arange(10)*2
    theta = array([1.,2.])
    th = gradientDescent(X, y, theta, 0.05, 100)[0]
    assert_array_almost_equal(th, array([0.2353, 1.9625]), decimal=3)
Ejemplo n.º 5
0
## =================== Part 3: Gradient descent ===================
print 'Running Gradient Descent ...'

X_design = column_stack((ones(m), data[:,0])) # Add a column of ones to x
theta = zeros(2) # initialize fitting parameters

# Some gradient descent settings
iterations = 1500
alpha = 0.01

# compute and display initial cost
print computeCost(X_design, y, theta)

# run gradient descent
(theta, J_history) = gradientDescent(X_design, y, theta, alpha, iterations)
#pdb.set_trace()

# print theta to screen
print 'Theta found by gradient descent: '
print '%f %f \n' % (theta[0], theta[1])

# Plot the linear fit
hold(True); # keep previous plot visible
plot(X_design[:,1], X_design.dot(theta), '-')
legend(('Training data', 'Linear regression'))
firstPlot.show()
# not sure how to avoid overlaying any more plots on this figure - call figure()?

# Predict values for population sizes of 35,000 and 70,000
predict1 = array([1, 3.5]).dot(theta)