Beispiel #1
0
	def test_grad_descent(self):
		M = 3
		#x = np.ones(M+1)
		x = np.array([.3, 8, -20, 17])
		des_obj = gd.gradDesc(x, .2, .01, True, 'curvefitting.txt', M)
		answer = des_obj.grad_descent(True)
		print "Found root at ", answer
Beispiel #2
0
	def test_grad(self):
		obj = gd.gradDesc(1, 1, .01, False, 'curvefitting.txt', 0)
		X = np.array([1,2,3,4])
		h = .000001
		true_val = obj.grad(X)
		approx_val = obj.grad_approx(X, h)
		self.assertAlmostEqual(approx_val[0], true_val[0])
		self.assertAlmostEqual(approx_val[1], true_val[1])
Beispiel #3
0
	def test_F(self):
		obj = gd.gradDesc(1, 1, .01, False, 'curvefitting.txt', 0)
		X = np.array([1,2,3,4])
		self.assertEqual(obj.F(X), 5)
Beispiel #4
0
	def test_init(self):
		obj = gd.gradDesc(1, 1, .01, False, 'curvefitting.txt', 0)
Beispiel #5
0
XwithOnes = np.empty((X.shape[0], X.shape[1] + 1))
XwithOnes[:, 1:] = X
XwithOnes[:, 0] = 1
Y = data[:, 3]

# Taking a look at data
print("All data: ")
print(pandasData)

# Initializing theta (weights)
theta = np.zeros(XwithOnes.shape[1])

# Initializing alpha
alpha = 0.01

# Initialiaing number of iterations in gradient descent
iterations = 150

# List of cost functions values during implementing gradient descent
J_history = []

# Implementing gradient descent
theta, J_history = gradDesc(XwithOnes, Y, theta, alpha, iterations)

# Checking if cost function is decreasing
plt.plot(range(iterations), J_history)
plt.xlabel('Number of iterations')
plt.ylabel('Value of the cost function')
plt.grid(linewidth=0.8, color='grey', alpha=0.3)
plt.show()
Beispiel #6
0
plt.rcParams['font.family'] = 'Calibri'
plt.rcParams["figure.figsize"] = [10, 5]
plt.xlabel("Размер города, 10.000 человек")
plt.ylabel("Доход фудтрака, $10.000")
plt.title(
    "Зависимость дохода фудтрака от размера города, в котором он размещается")
plt.xlim([0.5, 30.5])
plt.grid(linewidth=0.8, color='grey', alpha=0.3)
plt.scatter(x, y)

m = len(x)
theta = [0] * 2

iterations = 1500
alpha = 0.01
theta, J_history = gradDesc(theta, alpha, iterations, x, y)

x_hyp = np.linspace(0, 35, 100)
y_hyp = np.array(theta[0] + x_hyp * theta[1])

plt.plot(x_hyp, y_hyp)

plt.show()
plt.close()

plt.rcParams["figure.figsize"] = [10, 5]
plt.xlabel("Итерации")
plt.ylabel("Cost Function")
plt.title(
    "Зависимость функции потерь от количества итераций градиентного спуска")
plt.xlim([0, iterations])
Beispiel #7
0
import pandas as pnd
import numpy as np
from normalizeData import normalizeData
import matplotlib.pyplot as plt
from gradDesc import gradDesc

pandasData = pnd.read_csv("data.txt", names=['Размер дома, фут^2', 'Количество спален', 'Стоимость дома, $'], skip_blank_lines=True)
numpyData = np.zeros((len(pandasData.values), len(pandasData.values[0]) + 1))
numpyData[:,1:] = pandasData.values
numpyData[:,0] = 1

print(pandasData)

normData = normalizeData(numpyData)

theta = np.zeros((3,1))
iterations = 1500
alpha = 0.1

J_history = list()
theta, J_history = gradDesc(normData[:, 0:-1], normData[:,-1], theta, alpha, iterations)

plt.plot(range(iterations),J_history)
plt.show()