Exemple #1
0
    p = sigmoid(np.dot(X, theta0))
    print p.shape
    print p

    p = np.dot(X, all_theta.T)
    return np.array(map(max_index, p))


def ex3():
    input_layer_size = 400
    num_labels = 10

    vars = io.loadmat(os.path.join(ex3path, 'ex3data1.mat'))
    _X, y = np.array(vars['X'] * 128 + 128,
                     dtype=np.ubyte), vars['y'].reshape(vars['y'].size)
    size = (math.sqrt(_X.shape[1]), math.sqrt(_X.shape[1]))

    #FIXME: image loading seems not fully correct
    #img = make_grid(np.random.permutation(_X)[:400,:], size, 20, 20)
    #img.show()

    print 'Training One-vs-All Logistic Regression'
    lambda_ = 1.
    all_theta = one_vs_all(_X, y, num_labels, lambda_)
    p = predict_one_vs_all(all_theta, _X)
    print 'Training Set Accuracy:', (p == y).mean() * 100


if __name__ == '__main__':
    ex3path = os.path.join(ml_class_path(), 'ex3')
    ex3()
Exemple #2
0
    pl.show()

    print 'Theta computed from gradient descent:', theta

    a = (1650 - mu[0]) / sigma[0]
    b = (3 - mu[1]) / sigma[1]

    price = np.matrix([1, a, b]) * theta
    print 'Predicted price of a 1650 sq-ft, 3 br house',\
        '(using gradient descent):', price

    # Normal Equations
    data = load_txt(os.path.join(ex1path, 'ex1data2.txt'))
    _X = data[:, :2]
    y = data[:, 2]
    
    X = np.hstack((np.ones((m, 1)), _X))

    theta = normal_eqn(X, y)

    print 'Theta computed from the normal equations:', theta

    price = np.matrix('1 1650 3') * theta
    print 'Predicted price of a 1650 sq-ft, 3 br house',\
        '(using normal equations):', price


if __name__ == '__main__':
    ex1path = os.path.join(ml_class_path(), 'ex1')
    ex1_multi()
Exemple #3
0
    theta0 = all_theta[0]
    p = sigmoid(np.dot(X, theta0))
    print p.shape
    print p
    
    p = np.dot(X, all_theta.T)
    return np.array(map(max_index, p))


def ex3():
    input_layer_size = 400
    num_labels = 10

    vars = io.loadmat(os.path.join(ex3path, 'ex3data1.mat'))
    _X, y = np.array(vars['X']*128+128, dtype=np.ubyte), vars['y'].reshape(vars['y'].size)
    size = (math.sqrt(_X.shape[1]), math.sqrt(_X.shape[1]))

    #FIXME: image loading seems not fully correct
    #img = make_grid(np.random.permutation(_X)[:400,:], size, 20, 20)
    #img.show()

    print 'Training One-vs-All Logistic Regression'
    lambda_ = 1.
    all_theta = one_vs_all(_X, y, num_labels, lambda_)
    p = predict_one_vs_all(all_theta, _X)
    print 'Training Set Accuracy:', (p == y).mean() * 100


if __name__ == '__main__':
    ex3path = os.path.join(ml_class_path(), 'ex3')
    ex3()
Exemple #4
0
    def costf(theta):
        return logistic_cost_function(theta, X, y, lambda_)

    def difff(theta):
        return logistic_grad_function(theta, X, y, lambda_)

    maxiter = 50
    theta, allvec = opt.fmin_ncg(costf,
                                 initial_theta,
                                 difff,
                                 retall=1,
                                 maxiter=maxiter,
                                 callback=step())
    #    theta, allvec = opt.fmin_bfgs(costf, initial_theta, difff, retall=1, maxiter=maxiter, callback=step())
    print 'optimal cost:', costf(theta)

    Jhist = [costf(t) for t in allvec]
    pl.figure()
    pl.plot(Jhist)
    plot_decision_boundary(theta, X, y)

    # Compute accuracy on our training set
    h = np.dot(X, theta)
    print 'Train Accuracy:', ((h > 0) == y).mean() * 100


if __name__ == '__main__':
    ex2path = os.path.join(ml_class_path(), 'ex2')
    ex2_reg()
    pl.show()