def gradient(y, slope, guess, e, iter, prev_val, prev_gk):
    iter = iter + 1
    # slope_val holds values of slope at guess value
    slope_gk = slope.subs(x1, guess[0]).subs(x2, guess[1])
    if iter > 1:
        # iteration 2 onwards consists of new direction search
        # find new direction dependent on previous direction
        slope_val = (-1 * slope_gk) + (
            (slope_gk.transpose() * slope_gk)[0] / (prev_gk.transpose() * prev_gk)[0]
        ) * prev_val
    else:
        slope_val = -1 * slope_gk
    # xiter and yiter hold x and y coordinates of guess for plotting
    xiter.append(guess[0])
    yiter.append(guess[1])
    # prev holds guess for comparison
    prev = guess
    # gold holds guess with alpha parameter ready for golden search
    gold = [guess[0] + (h * slope_val[0, 0]), guess[1] + (h * slope_val[1, 0])]
    # alpha is the function in alpha parameter ready to be minimized
    alpha = function(gold)
    # minima holds minimum value of alpha after golden section search
    # minimizer(function, start, end, interval spacing)
    minima = golden_search.minimizer(alpha, -10, 10, 1)
    # get new guess value
    guess = [guess[0] + (minima * slope_val[0, 0]), guess[1] + (minima * slope_val[1, 0])]
    print guess
    # check condition and proceed

    if math.fabs(prev[0] - guess[0]) > e or math.fabs(prev[1] - guess[1]) > e:
        # store current slope value (slope_val) in prev_val
        gradient(y, slope, guess, e, iter, slope_val, slope_gk)
    return xiter, yiter
def gradient(y, slope, guess, e):
    #slope_val holds values of slope at guess value
    slope_val = slope.subs(x1, guess[0]).subs(x2, guess[1])
    #xiter and yiter hold x and y coordinates of guess for plotting
    xiter.append(guess[0])
    yiter.append(guess[1])
    funcval.append(function([guess[0], guess[1]]))
    #prev holds guess for comparison
    prev = guess
    #gold holds guess with alpha parameter ready for golden search
    gold = [guess[0] - (h * slope_val[0, 0]), guess[1] - (h * slope_val[1, 0])]
    #alpha is the function in alpha parameter ready to be minimized
    alpha = function(gold)
    #minima holds minimum value of alpha after golden section search
    #minimizer(function, start, end, interval spacing)
    minima = golden_search.minimizer(alpha, -10, 10, 1)
    #get new guess value
    guess = [guess[0] - (minima * slope_val[0, 0]), guess[1] - (minima * slope_val[1, 0])]
    print guess
    
    #check condition and proceed
    if math.fabs(prev[0] - guess[0]) > e or math.fabs(prev[1] - guess[1]) > e:         
        gradient(y, slope, guess, e)
    return xiter, yiter