コード例 #1
0
def oneVsAll(X, y, num_labels, lamda):
    m, n = X.shape
    all_theta = np.zeros((num_labels, n + 1))
    X = np.hstack((np.ones((m, 1)), X))

    cost_function = lambda p, y: lrCostFunction(p, X, y, lamda)[0]
    grad_function = lambda p, y: lrCostFunction(p, X, y, lamda)[1]

    for i in range(1, num_labels + 1):
        initial_theta = np.zeros(n + 1)
        all_theta[i - 1, :] = fmin_cg(cost_function, initial_theta, fprime=grad_function,
                                      args=((y == i).astype(int),), maxiter=100, disp=False)
        print('Finish oneVsAll checking number: %d' % i)

    return all_theta
コード例 #2
0
def output(part_id):
    # Random Test Cases
    X = np.column_stack(
        (np.ones(20), (np.exp(1) * np.sin(np.linspace(1, 20, 20))),
         (np.exp(0.5) * np.cos(np.linspace(1, 20, 20)))))
    y = np.sin(X[:, 0] + X[:, 1]) > 0

    Xm = np.array([[-1, -1], [-1, -2], [-2, -1], [-2, -2], [1, 1], [1, 2],
                   [2, 1], [2, 2], [-1, 1], [-1, 2], [-2, 1], [-2, 2], [1, -1],
                   [1, -2], [-2, -1], [-2, -2]]).reshape((16, 2))
    ym = np.array([1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
                   4]).reshape(16, 1)
    t1 = np.sin(np.array(range(1, 24, 2)).reshape(3, 4).T)
    t2 = np.cos(np.array(range(1, 40, 2)).reshape(5, 4).T)

    fname = srcs[part_id - 1].rsplit('.', 1)[0]
    mod = __import__(fname, fromlist=[fname], level=1)
    func = getattr(mod, fname)

    if part_id == 1:
        J = lrCostFunction(np.array([0.25, 0.5, -0.5]), X, y, 0.1)
        grad = gradientFunctionReg(np.array([0.25, 0.5, -0.5]), X, y, 0.1)
        return sprintf('%0.5f ', np.hstack((J, grad)).tolist())
    elif part_id == 2:
        return sprintf('%0.5f ', oneVsAll(Xm, ym, 4, 0.1))
    elif part_id == 3:
        return sprintf('%0.5f ', predictOneVsAll(t1, Xm))
    elif part_id == 4:
        return sprintf('%0.5f ', predict(t1, t2, Xm))
コード例 #3
0
def output(part_id):
    # Random Test Cases
    X = np.column_stack((np.ones(20),
                          (np.exp(1) * np.sin(np.linspace(1, 20, 20))),
                          (np.exp(0.5) * np.cos(np.linspace(1, 20, 20)))))
    y = np.sin(X[:,0] + X[:,1]) > 0

    Xm = np.array([[-1,-1],[-1,-2],[-2,-1],[-2,-2],[1,1],[1,2],[2,1],[2,2],[-1,1],
          [-1,2],[-2,1],[-2,2],[1,-1],[1,-2],[-2,-1],[-2,-2]]).reshape((16,2))
    ym = np.array([1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4]).reshape(16,1)
    t1 = np.sin(np.array(range(1,24,2)).reshape(3,4).T)
    t2 = np.cos(np.array(range(1,40,2)).reshape(5,4).T)

    fname = srcs[part_id-1].rsplit('.',1)[0]
    mod = __import__(fname, fromlist=[fname], level=1)
    func = getattr(mod, fname)

    if part_id == 1:
        J = lrCostFunction(np.array([0.25, 0.5, -0.5]), X, y, 0.1)
        grad = gradientFunctionReg(np.array([0.25, 0.5, -0.5]), X, y, 0.1)
        return sprintf('%0.5f ', np.hstack((J, grad)).tolist())
    elif part_id == 2:
        return sprintf('%0.5f ', oneVsAll(Xm, ym, 4, 0.1))
    elif part_id == 3:
        return sprintf('%0.5f ', predictOneVsAll(t1, Xm))
    elif part_id == 4:
        return sprintf('%0.5f ', predict(t1, t2, Xm))
コード例 #4
0
def output(partId):
    # Random Test Cases
    X = np.stack([
        np.ones(20),
        np.exp(1) * np.sin(np.arange(1, 21)),
        np.exp(0.5) * np.cos(np.arange(1, 21))
    ],
                 axis=1)
    y = (np.sin(X[:, 0] + X[:, 1]) > 0).astype(float)
    Xm = np.array([[-1, -1], [-1, -2], [-2, -1], [-2, -2], [1, 1], [1, 2],
                   [2, 1], [2, 2], [-1, 1], [-1, 2], [-2, 1], [-2, 2], [1, -1],
                   [1, -2], [-2, -1], [-2, -2]])
    ym = np.array([0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3])
    t1 = np.sin(np.reshape(np.arange(1, 25, 2), (4, 3), order='F'))
    t2 = np.cos(np.reshape(np.arange(1, 41, 2), (4, 5), order='F'))
    if partId == '1':
        J, grad = lrCostFunction(np.array([0.25, 0.5, -0.5]), X, y, 0.1)
        out = formatter('%0.5f ', J)
        out += formatter('%0.5f ', grad)
    elif partId == '2':
        out = formatter('%0.5f ', oneVsAll(Xm, ym, 4, 0.1))
    elif partId == '3':
        out = formatter('%0.5f ', predictOneVsAll(t1, Xm))
    elif partId == '4':
        out = formatter('%0.5f ', predict(t1, t2, Xm))
    return out
コード例 #5
0
def oneVsAll(X, y, num_labels, lamda):
    m, n = X.shape
    all_theta = np.zeros((num_labels, n + 1))
    X = np.hstack((np.ones((m, 1)), X))

    cost_function = lambda p, y: lrCostFunction(p, X, y, lamda)[0]
    grad_function = lambda p, y: lrCostFunction(p, X, y, lamda)[1]

    for i in range(1, num_labels + 1):
        initial_theta = np.zeros(n + 1)
        all_theta[i - 1, :] = fmin_cg(cost_function,
                                      initial_theta,
                                      fprime=grad_function,
                                      args=((y == i).astype(int), ),
                                      maxiter=100,
                                      disp=False)
        print('Finish oneVsAll checking number: %d' % i)

    return all_theta
コード例 #6
0
def oneVsAll(X, y, num_labels, l):

    
    X = np.concatenate((np.ones((X.shape[0], 1)), X), axis=1);
    m, n = X.shape
    all_theta = np.zeros((num_labels, n))

    for x in range(0, num_labels):
    	initial_theta = np.zeros((n))
    	#print fmin_cg(lambda t: lrCostFunction(X, y == x, initial_theta, l) )
    	options = {'full_output': True, 'retall': True}
    	theta, cost, _, _, _, _, _, allvecs = fmin_bfgs(lambda t: lrCostFunction(X, y == x, t, l), initial_theta, maxiter=50, **options)
        all_theta[x, :] = theta

    return all_theta
コード例 #7
0
ファイル: submit.py プロジェクト: libelo/py-ng-ml
def output(partId):
    # Random Test Cases
    X = column_stack((ones(20), exp(1) * sin(arange(1, 21, 1)), exp(0.5) * cos(arange(1, 21, 1))))
    y = (sin(X[:,0] + X[:,1]) > 0).astype(int)
    Xm = array([[-1, -1], [-1, -2], [-2, -1], [-2, -2], [1, 1], [1, 2], [2, 1], [2, 2],
      [-1, 1], [-1, 2], [-2, 1], [-2, 2], [1, -1], [1, -2], [-2, -1], [-2, -2]])
    ym = array([1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4])
    t1 = sin(arange(1, 25, 2).reshape(4, 3, order='F'))
    t2 = cos(arange(1, 41, 2).reshape(4, 5, order='F'))
    
    if partId == '1':
        J, grad = lrCostFunction(array([0.25, 0.5, -0.5]), X, y, 0.1)
        out = sprintf('%0.5f ', J)
        return out + sprintf('%0.5f ', grad)
    elif partId == '2':
        return sprintf('%0.5f ', oneVsAll(Xm, ym, 4, 0.1))
    elif partId == '3':
        return sprintf('%0.5f ', predictOneVsAll(t1, Xm))
    elif partId == '4':
        return sprintf('%0.5f ', predict(t1, t2, Xm))
コード例 #8
0
# =================== 1.2 Visualizing the data ===========================
# Randomly select 100 data points to display
rand_indices = np.random.permutation(m)
sel = X[rand_indices[0:100], :]
plt.figure()
displayData(sel, padding=1)
plt.show()

# ===================== 1.3 Vectorizing logistic regression ==============
theta_t = np.array([-2, -1, 1, 2])
X_t = np.hstack((np.ones(
    (5, 1)), np.arange(1, 16).reshape(5, 3, order='F') / 10.0))
y_t = np.array([1, 0, 1, 0, 1])
lambda_t = 3

cost, grad = lrCostFunction(theta_t, X_t, y_t, lambda_t)

print('Cost:', cost)
print('Expected cost: 2.534819')
print('Gradients: \n', grad)
print('Expected gradients: \n  [ 0.146561 -0.548558 0.724722 1.398003 ]')

# =================== 1.4 One-vs-all classication ========================

l = 0.1
all_theta = oneVsAll(X, y, num_labels, l)

# ==================== 1.4.1 One-vs-all prediction=========================

pred = predictOneVsAll(all_theta, X)
print('Training Set Accuracy:', np.mean(pred == y) * 100)
コード例 #9
0
ファイル: ex3.py プロジェクト: dchhitarka/machinelearning
%
"""
#% Test case for lrCostFunction
print('\nTesting lrCostFunction() with regularization')

X_t = np.hstack((np.ones((len(y), 1)), X))
y_t = np.eye(10)[y].reshape(-1, 10)

from lrCostFunction import lrCostFunction, gradFind

lambda_t = 3
X_s = np.hstack((np.ones((5, 1)), (np.arange(1, 16).reshape(3, 5)).T / 10))
y_s = np.array([1, 0, 1, 0, 1]).reshape(-1, 1)
theta_s = np.array([-2, -1, 1, 2])[:, np.newaxis]

J = lrCostFunction(theta_s, X_s, y_s, lambda_t)
grad = gradFind(theta_s, X_s, y_s, lambda_t)

print('\nCost:', J)
print('Expected cost: 2.534819\n')
print('Gradients:\n')
print(grad)
print('Expected gradients:')
print(' 0.146561\n -0.548558\n 0.724722\n 1.398003\n')

input('Program paused. Press enter to continue.\n')

## ============ Part 2b: One-vs-All Training ============
print('\nTraining One-vs-All Logistic Regression...\n')

# Not WORKING
コード例 #10
0
ファイル: ex3.py プロジェクト: ravi-dalal/ml-repo
rand_indices = np.random.permutation(m)
sel = np.take(X, rand_indices[0:100], 0)
plt, h, display_array = displayData(sel)
plt.show()

# ============ Part 2a: Vectorize Logistic Regression ============
# Test case for lrCostFunction
print('\nTesting lrCostFunction() with regularization')
theta_t = np.array([-2, -1, 1, 2]).reshape(4, 1)
#print(theta_t)
X_t = np.column_stack((np.ones(
    (5, 1)), (np.reshape(np.arange(1, 16), (5, 3), order="F") / 10)))
#print(X_t)
y_t = np.array([1, 0, 1, 0, 1]).reshape(5, 1)
lambda_t = 3
J, grad = lrCostFunction(theta_t, X_t, y_t, lambda_t, return_grad=True)
print('\nCost: \n', J)
print('Expected cost: 2.534819\n')
print('Gradients:\n')
print(' \n', grad)
print('Expected gradients:\n')
print(' 0.146561\n -0.548558\n 0.724722\n 1.398003\n')

# ============ Part 2b: One-vs-All Training ============
print('\nTraining One-vs-All Logistic Regression...\n')

_lambda = 0.1
all_theta = oneVsAll(X, y, 10, _lambda)
#print(all_theta.shape)

# ================ Part 3: Predict for One-Vs-All ================
コード例 #11
0
ファイル: ex3.py プロジェクト: jy2881/AndrewNg-ML-python
example_width = round(np.sqrt(sel.shape[1]))
displayData.displayData(sel,example_width)

## ============ Part 2a: Vectorize Logistic Regression ============
#  In this part of the exercise, you will reuse your logistic regression code from the last exercise.
#  You task here is to make sure that your regularized logistic regression implementation is vectorized.
#  After that, you will implement one-vs-all classification for the handwritten digit dataset.

# Test case for lrCostFunction
print('Testing lrCostFunction() with regularization')

theta_t = np.array([-2, -1, 1, 2])
X_t = np.c_[(np.ones([5,1]),np.arange(1,16).reshape(3,5).T/10)]
y_t = np.array([[1],[0],[1],[0],[1]]) >= 0.5
lambda_t = 3
[J,grad] = lrCostFunction.lrCostFunction(theta_t, X_t, y_t, lambda_t)
#[J,grad] = lrCostFunction.lrCostFunction(initial_theta, X, y, xlambda)
print('Cost: %.7f\nExpected cost: 2.534819\nGradients:'%J)
for grad_i in grad:
    print(grad_i)
print('Expected gradients:\n0.146561\n -0.548558\n 0.724722\n 1.398003')

input('Program paused. Press enter to continue.')

## ============ Part 2b: One-vs-All Training ============
print('Training One-vs-All Logistic Regression...')

xlambda = 0.1
all_theta = oneVsAll.oneVsAll(X, y, num_labels, xlambda)
input('Program paused. Press enter to continue.')
コード例 #12
0
 def grad_func(t):
     return lrCostFunction(X, y, t, lmda)[1]
コード例 #13
0
 def cost_func(t):
     return lrCostFunction(X, y, t, lmda)[0]
コード例 #14
0
    
    ''' lrCostFunction check '''

    theta = np.array([-2, -1, 1, 2])

    magic3 = np.array([[8, 1, 6], 
                  [3, 5, 7],
                  [4, 9, 2]])

    X = np.concatenate((np.ones((magic3.shape[0], 1)), magic3), axis=1);
    y = np.array([[1], [0], [1]]) >= 0.5

    l = 3

    try:
        assert( np.around( lrCostFunction(X, y, theta, l), decimals=4).tolist() == [7.6832] )
        assert( np.around( lrGrad(X, y, theta, l), decimals =5).tolist()  ==  [[0.31722], [-0.12768], [2.64812], [4.23787]] )

    except:
        sys.exit("Unit test failed")

    ''' oneVsAll check '''
    np.set_printoptions(suppress=True)
    np.set_printoptions(precision=6)
    X = np.r_[ magic3 , np.sin(range(1, 4))[np.newaxis], np.cos(range(1, 4))[np.newaxis]]
    y = np.array([[0, 1, 1, 0, 2]]).T
    num_labels = 3
    l = 0.1
    
    print oneVsAll(X, y, num_labels, l)
    print "\n"
コード例 #15
0

# Test case for lrCostFunction
print('\nTesting lrCostFunction() with regularization')

theta_t = np.array([[-2], [-1], [1], [2]])

data =  np.arange(1, 16).reshape(3, 5).T

X_t = np.c_[np.ones((5,1)), data/10]

y_t =  (np.array([[1], [0], [1], [0], [1]]) >= 0.5) * 1

lambda_t = 3

J, grad = lrCostFunction(theta_t, X_t, y_t, lambda_t), lrGradient(theta_t, X_t, y_t, lambda_t, flattenResult=False)

print('\nCost: f\n', J)
print('Expected cost: 2.534819\n')
print('Gradients:\n')
print(' f \n', grad)
print('Expected gradients:\n')
print(' 0.146561\n -0.548558\n 0.724722\n 1.398003\n')

print('Program paused. Press enter to continue.\n')
pause()


"""## Part 2b: One-vs-All Training ============"""
print('\nTraining One-vs-All Logistic Regression...\n')
コード例 #16
0
  digit dataset."""

# Test case for lrCostFunction
print('\nTesting lrCostFunction() with regularization')

theta_t = np.array([[-2], [-1], [1], [2]])

data = np.arange(1, 16).reshape(3, 5).T

X_t = np.c_[np.ones((5, 1)), data / 10]

y_t = (np.array([[1], [0], [1], [0], [1]]) >= 0.5) * 1

lambda_t = 3

J, grad = lrCostFunction(theta_t, X_t, y_t,
                         lambda_t), lrGradient(theta_t,
                                               X_t,
                                               y_t,
                                               lambda_t,
                                               flattenResult=False)

print('\nCost: f\n', J)
print('Expected cost: 2.534819\n')
print('Gradients:\n')
print(' f \n', grad)
print('Expected gradients:\n')
print(' 0.146561\n -0.548558\n 0.724722\n 1.398003\n')

print('Program paused. Press enter to continue.\n')
pause()
"""## Part 2b: One-vs-All Training ============"""