Example #1
0
#print header

#survived0,pclass1-0,name2,sex3-1,age4-2,sibsp5-3,parch6-4,ticket7,fare8-5,cabin9,embarked10
outputs = np.matrix(data[0:, 0]).T.astype(int)
inputs = data[0:, (1, 3, 4, 5, 6, 8)]
inputs[:, 1] = lr.eapply(lambda x: int(x), data[0::,3] == "male")

def liq(x): 
	if x == '': return 0
	else: return x
inputs = np.matrix(lr.eapply(liq, inputs))

inputs = np.matrix(inputs).T.astype(float)


w, e = lr.batchGradientDescent(inputs, outputs, np.matrix([[0] * 7]).T, 0.5, -0.01, 100)
print e[0:4], '...'
print e[-5:-1]

"""s = 0.0
for m in range(outputs.shape[0]):
	s = s + np.abs(lr.calcLogReg(w, inputs[:, m], 0.5) - outputs[m])

print float(s) / outputs.shape[0]"""




csv_file_objectT = csv.reader(open('test.csv', 'rb')) #Load in the csv file
headerT = csv_file_objectT.next() #Skip the fist line as it is a header
Example #2
0
import numpy as np
from random import seed, gauss
import LogisticRegression as lr

i = np.matrix([
	[1,1], [1,2], [1,3],
	[2,1], [2,2], [2,3],
	[3,1], [3,2], [3,3]
	]).T

o = np.matrix([[0, 0, 1, 0, 1, 1, 1, 1, 1]]).T

w = lr.batchGradientDescent(
	lr.LogisticDerivative, 
	i, o, np.matrix([[0], [0], [0]]), 0.5,  -0.5, 70)

print w

for i in xrange(3):
	for v in xrange(3):
		print i, v, lr.calcLogReg(w, np.matrix([i+1,v+1]).T, 0.5)

print lr.calcLogReg(w, np.matrix([3,0.85352157]).T, 0.5)