def run():
    theta = sp.zeros((3, 1))
    data = sp.copy(admission_data)
    X = data[:, [0, 1]]
    y = data[:, [2]]
    m = sp.shape(y)[0]

    # Add intercept term to x
    X = sp.concatenate((sp.ones((m, 1)), X), axis=1)

    """
    Part 1: Plotting
    """

    print('Plotting data with + indicating (y = 1) examples and o indicating (y = 0) examples.')
    logres.plotData(data)
    plt.xlabel('Exam 1 score')
    plt.ylabel('Exam 2 score')
    plt.legend('Admitted', 'Not admitted')
    plt.show()

    print('Program paused. Press enter to continue.')
    raw_input()

    """
    Part 2: Compute Cost and Gradient
    """

    (m, n) = X.shape

    initial_theta = sp.zeros((n, 1))

    (cost, grad) = logres.costFunction(initial_theta, X, y)

    print('Cost at initial theta (zeros): ', cost)
    print('Gradient at initial theta (zeros): ', grad)

    print('Program paused. Press enter to continue.')
    raw_input()

    """
    Part 3: Optimizing using fminunc
    """

    (theta, cost) = logres.find_minimum_theta(theta, X, y)

    print('Cost at theta found by fmin: ', cost)
    print('Theta: ', theta)

    logres.plotDecisionBoundary(data, X, theta)

    plt.show()

    """
    Part 4: Predict and Accuracies
    """

    prob = logres.sigmoid(sp.asmatrix([1, 45, 85]).dot(theta))
    print('For a student with scores 45 and 85, we predict an admission probability of ', prob[0, 0])
    print('Program paused. Press enter to continue.')
def run():
    theta = sp.zeros((3, 1))
    data = sp.copy(microchip_data)
    X = data[:, [0, 1]]
    y = data[:, [2]]
    m = sp.shape(y)[0]

    logres.plotData(data)
    plt.xlabel('Microchip Test 1')
    plt.ylabel('Microchip Test 2')
    plt.legend(['y = 1', 'y = 0'])
    plt.show()

    """
    Regularized Logistic Regression
    """

    X = logres.mapFeature(data[:, 0], data[:, 1])

    initial_theta = sp.zeros((X.shape[1], 1))

    lmbda = 1

    (J, grad) = logres.costFunctionReg(initial_theta, X, y, lmbda)

    print('Cost at initial theta (zeros): ', J[0,0])
    print('Program paused. Press enter to continue.')
    raw_input()

    """
    Regularization and Accuracies
    """

    initial_theta = sp.zeros((X.shape[1], 1))
    lmbda = 1

    (theta, J) = logres.find_minimum_theta_reg(initial_theta, X, y, lmbda)

    logres.plotDecisionBoundary(data, X, theta)
    plt.legend(['y = 1', 'y = 0', 'Decision Boundary'])
    plt.show()