def run():
    theta = sp.zeros((3, 1))
    data = sp.copy(admission_data)
    X = data[:, [0, 1]]
    y = data[:, [2]]
    m = sp.shape(y)[0]

    # Add intercept term to x
    X = sp.concatenate((sp.ones((m, 1)), X), axis=1)

    """
    Part 1: Plotting
    """

    print('Plotting data with + indicating (y = 1) examples and o indicating (y = 0) examples.')
    logres.plotData(data)
    plt.xlabel('Exam 1 score')
    plt.ylabel('Exam 2 score')
    plt.legend('Admitted', 'Not admitted')
    plt.show()

    print('Program paused. Press enter to continue.')
    raw_input()

    """
    Part 2: Compute Cost and Gradient
    """

    (m, n) = X.shape

    initial_theta = sp.zeros((n, 1))

    (cost, grad) = logres.costFunction(initial_theta, X, y)

    print('Cost at initial theta (zeros): ', cost)
    print('Gradient at initial theta (zeros): ', grad)

    print('Program paused. Press enter to continue.')
    raw_input()

    """
    Part 3: Optimizing using fminunc
    """

    (theta, cost) = logres.find_minimum_theta(theta, X, y)

    print('Cost at theta found by fmin: ', cost)
    print('Theta: ', theta)

    logres.plotDecisionBoundary(data, X, theta)

    plt.show()

    """
    Part 4: Predict and Accuracies
    """

    prob = logres.sigmoid(sp.asmatrix([1, 45, 85]).dot(theta))
    print('For a student with scores 45 and 85, we predict an admission probability of ', prob[0, 0])
    print('Program paused. Press enter to continue.')
def predict(Theta1, Theta2, X):
    m = X.shape[0]
    num_labels = Theta2.shape[0]

    a1 = sp.c_[sp.ones((m, 1)), X]
    z2 = Theta1.dot(a1.T)
    a2 = logres.sigmoid(z2)
    a2 = sp.r_[sp.ones((1, a2.shape[1])), a2]

    # Output layer
    z3 = Theta2.dot(a2)
    a3 = logres.sigmoid(z3)

    result = []
    for i in range(0, m):
        prediction = sp.argmax(a3[:, i]) + 1
        result.append(prediction)
    return result
def computeCost( theta, X, y, lamda ):
    """Copied cost function because fmin_cg messes up the dimensions"""
    m = sp.shape( X )[0]
    hypo = logres.sigmoid( X.dot( theta ) )
    term1 = sp.log( hypo ).dot( -y )
    term2 = sp.log( 1.0 - hypo ).dot( 1 - y )
    left_hand = (term1 - term2) / m
    right_hand = theta.T.dot( theta ) * lamda / (2*m)
    return left_hand + right_hand
def logistic_regression():
    """
    Predicts the probability that a student will be admitted
    to a university based on how well he did on two exams
    Params:
    exam1: Integer score
    exam2: Integer score
    """
    exam1 = int(request.args.get('exam1'))
    exam2 = int(request.args.get('exam2'))
    prob = sigmoid(sp.asmatrix([1, exam1, exam2]).dot(theta))
    return jsonify({
        'probability_accepted': prob[0,0]
    })
def gradientCost( theta, X, y, lamda ):
    """Copied gradient function because fmin_cg messes up the dimensions"""
    m = sp.shape( X )[0]
    grad = X.T.dot( logres.sigmoid( X.dot( theta ) ) - y ) / m
    grad[1:] = grad[1:] + ( (theta[1:] * lamda ) / m )
    return grad