Esempio n. 1
0
def regression(alpha, matrix, epsilon):
        
        a=.1  #How do we determine a and b?
        b=10
        size= matrix.shape[1]-1 #don't want to include last column (y's)
        theta= cf.randomThetas(a,b,size)
        
      
        temp= np.copy(theta)
        derVector=np.ones(size)
        currentRowIndex= 0
        grad= 199 #Just to ensure enters while loop
        iterations=0
       
        while iterations < 10000: # Need to update iteration value
                iterations+=1
                theta= np.copy(temp) #When you enter loop update theta to new values Note: this could also be at end of while loop just depends.  
                currentRow= np.array(matrix[currentRowIndex:currentRowIndex+1,:matrix.shape[1]-1]) #get row from matrix make into array
                currentRow= currentRow[0] # Want to make 1d array currently 2d 
                
                rowDeviation = cf.deviation(currentRow, theta)
                for i in range(size):
                        derVector[i]=cf.costDeriv(rowDeviation, theta[i])
                        temp[i]= theta[i] -(float(alpha)/size)*derVector[i] 
                        
                if currentRowIndex==matrix.shape[0]-1:
                        courrentRowIndex=0
                else: 
                        currentRowIndex+=1
                print costFunction(theta, currentRow, matrix, size)
        return theta
Esempio n. 2
0
def regression (alpha, epsilon, gamma, secondsToRun, data, ridge):
	if not ridge:
		gamma = 0
	size = data[0].size - 1
	thetas = Cost.randomThetas(0,0, size) ## make a random list of thetas
	cur = 0
	timeout = time.time() + secondsToRun
	itera = 0
	previousCost = 0
	while True:
		va = np.asarray(data[cur])
		v = va[0]
		thetaT = np.zeros(size)
		dev = Cost.deviation(v, thetas)
		cost = Cost.costFunction(thetas, data, gamma)
		if abs(cost - previousCost) < epsilon or timeout < time.time():
			break
		for i in range(size):
			thetaT[i] = (1 - alpha * gamma) * thetas[i] - alpha * Cost.costDeriv(dev, v[i])
			
		
		thetas = thetaT
		
		if cur < data.shape[0] - 1: # I don't think this is the number you want. You want the number of rows right?
			cur += 1
		else:
			cur = 0
			itera += 1
		##print costFunction(thetas, data[cur], )
	##print cur
	##plt.plot(range(len(lst)), lst,'ro')
	##plt.axis([0, len(lst), 0, 40])
	##plt.show()  ##plot the size of gradient
		temp = cost
		previousCost = cost
	return thetas
Esempio n. 3
0
File: test.py Progetto: ershook/hw1
def main():
	a = np.ones(10)
	print cf.deviation(a, a)
	print cf.gradient(10, np.array([1,2,3]))