Exemple #1
0
def main():
  X, Y = hw1.bishopCurveData()
  M = 1
  function = lambda x, y: math.pow(x,y)
 
  
  for i in range(-1, 2, 1):
    for j in range(-1, 2, 1):
      print "Theta_1 = {}, theta_2: {}".format(i, j)
      print "approx: {}".format(approxGradient(SSE, X, Y, M, function, 1)(np.array([i, j])))
      print "analytical: {}".format(SSEgrad(X, Y, M, np.array([[i],[j]]), function))
def test_bishop_curve():
    X, Y = hw1.bishopCurveData()
    # X, Y = hw1.regressTrainData()
    # X, Y = hw1.regressTestData()
    # X, Y = hw1.regressValidateData()
    
    #print "X: {}".format(X)
    #print "Y: {}".format(Y)
    ''' 
    for M in [0, 1, 3, 9]:
      for lambd in [math.pow(2, x) for x in range(-10,0,2)]:
        theta =  ridgeRegression(Y, _phi(X, M, basis_function_poly), lambd)
        sse = SSE(X, Y, M, theta, basis_function_poly)
        print "{}, {}, {}".format(M, lambd, sse[0][0])
        regressionPlot(X,Y,M, theta, lambd, basis_function_poly)
    '''
    theta =  ridgeRegression(Y, _phi(X, 9, basis_function_poly), math.pow(2, -10))
    regressionPlot(X,Y, 9, theta, math.pow(2, -10), basis_function_poly)
    theta =  ridgeRegression(Y, _phi(X, 9, basis_function_poly), math.pow(2, -2))
    regressionPlot(X,Y, 9, theta, math.pow(2, -2), basis_function_poly)
import numpy as np
import scipy as sp
from scipy.optimize import minimize
import pylab as pl
from scipy.optimize import fmin_bfgs
from gradientDescent import gradMinimize
from regression import _phi, SSE, SSEgrad, max_likelihood_weight
import homework1 as hw1
import math

X, Y = hw1.bishopCurveData()
basis_function = lambda x, y: math.pow(x,y)

def gradMinimize(threshold, guess, step, objectiveFcn, gradientFcn, M):
  difference = threshold;
  previousGuess = guess;
  objPrev = objectiveFcn(X, Y, M, previousGuess, basis_function);
  count = 0;
  while (difference >= threshold):
    nextGuess = previousGuess - step*gradientFcn(X, Y, M, previousGuess, basis_function)
    objNext = objectiveFcn(X, Y, M, nextGuess, basis_function)
    difference = objPrev - objNext
    #print difference
    previousGuess = nextGuess
    objPrev = objNext
    count += 1;
  # return "gradMinimize soln: {}, nfev: {}, njev: {}".format(previousGuess, count+1, count)
  #sse = np.dot((previousGuess - quadSoln).T, (previousGuess - quadSoln))
  #return "soln: {}, sse: {}, step: {}, guess: {}, threshold: {}".format(previousGuess, sse, step, guess, threshold)
  return previousGuess, count
#!/usr/bin/env python

__author__ = 'Shaun Rong'
__version__ = '0.1'
__maintainer__ = 'Shaun Rong'
__email__ = '*****@*****.**'

from homework1 import regressionPlot, bishopCurveData

if __name__ == '__main__':
    X, Y = bishopCurveData()
    regressionPlot(X, Y, 0)