Esempio n. 1
0
def predict(theta, X):
    """
    Return predictions for a set of test scores.
    """

    p = sigmoid(np.dot(X_array, theta)) >= 0.5
    return p
Esempio n. 2
0
    # Newton-CG works well even without the Hessian.
    # Per %timeit, about 3x faster than Nelder-Mead.
    result_Newton_CG = minimize(lambda t: costFunction(t, X_array, y_array),
                                initial_theta,
                                method='Newton-CG',
                                jac=True)

    # Print theta to screen
    print 'Cost at theta found by Nelder-Mead: %f' % result_Nelder_Mead['fun']
    print 'theta:', result_Nelder_Mead['x']
    print 'Cost at theta found by Newton-CG:   %f' % result_Newton_CG['fun']
    print 'theta:', result_Newton_CG['x']
    print ''
    theta = result_Nelder_Mead['x']

    # Plot Boundary
    plotData(X, y, theta)

    ## ============== Part 4: Predict and Accuracies ==============
    #  Predict probability for a student with score 45 on exam 1
    #  and score 85 on exam 2

    prob = sigmoid(np.dot(np.array([1, 45, 85]), theta))
    print 'For a student with scores 45 and 85, we predict an admission probability of',
    print '%.1f%%' % (prob * 100)

    # Compute accuracy on our training set
    p = predict(theta, X_array)

    print 'Train Accuracy: %.1f%%' % ((p == y_array).mean() * 100)