Example #1
0
theta_t = np.array([-2, -1, 1, 2])
X_t = np.c_[np.ones(5), np.arange(1, 16).reshape((3, 5)).T/10]
y_t = np.array([1, 0, 1, 0, 1])
lmda_t = 3
cost, grad = lCF.lr_cost_function(theta_t, X_t, y_t, lmda_t)

np.set_printoptions(formatter={'float': '{: 0.6f}'.format})
print('Cost: {:0.7f}'.format(cost))
print('Expected cost: 2.534819')
print('Gradients:\n{}'.format(grad))
print('Expected gradients:\n[ 0.146561 -0.548558 0.724722 1.398003]')

input('Program paused. Press ENTER to continue')



# ===================== Part 2-b: One-vs-All Training =====================
print('Training One-vs-All Logistic Regression ...')

lmd = 0.1
all_theta = ova.one_vs_all(X, y, num_labels, lmd)

input('Program paused. Press ENTER to continue')

# ===================== Part 3: Predict for One-Vs-All =====================


pred = pova.predict_one_vs_all(all_theta, X)

print('Training set accuracy: {}'.format(np.mean(pred == y)*100))
input('ex3 Finished. Press ENTER to exit')
Example #2
0
input_layer_size = 400
num_labels = 10
# ================== visualize the data ====================================================
rand = np.random.randint(0, 5000, (100, ))  # [0, 5000)
displayData.data_display(x_data[rand, :])   # get 100 images randomly
# ======================= Test case for lrCostFunction =============================
theta_t = np.array([-2, -1, 1, 2])
t = np.linspace(1, 15, 15) / 10
t = t.reshape((3, 5))
x_t = np.column_stack((np.ones((5, 1)), t.T))
y_t = np.array([1, 0, 1, 0, 1])
l_t = 3
cost = lrCostFunction.cost_reg(theta_t, x_t, y_t, l_t)
grad = lrCostFunction.grad_reg(theta_t, x_t, y_t, l_t)
print("cost is {}".format(cost))
print("expected cost is 2.534819")
print("grad is {}".format(grad))
print("expected grad is 0.146561 -0.548558 0.724722 1.398003")
# ============================ test end =============================================
# ============================ one vs all:predict ===========================================
l = 0.1
theta = oneVsAll.one_vs_all(x_data, y_data, l, num_labels)
result = Predict.pred_lr(theta, x_data[1500, :])
np.set_printoptions(precision=2, suppress=True)  # don't use  scientific notation
print("this number is {}".format(result))  # 10 here is 0
plt.imshow(x_data[1500, :].reshape((20, 20)), cmap='gray', vmin=-1, vmax=1)
plt.show()
accuracy = predictOneVsAll.pred_accuracy(theta, x_data, y_data)
print("test 5000 images, accuracy is {:%}".format(accuracy))
# ============================ predict  end ======================================================
Example #3
0
rand_indices = np.random.permutation(range(m))
selected = X[rand_indices[1:100], :]
# 显示手写数字样例
display_data(selected)

# ========================= 2.向量化Logistic Rgression =========================
# 测试函数lr_cost_function的功能
"""
theta_t = np.array([-2, -1, 1, 2])
X_t = np.c_[np.ones(5), np.arange(1, 16).reshape((3, 5)).T/10]
y_t = np.array([1, 0, 1, 0, 1])
lmda_t = 3
cost,grad = lr_cost_function(X_t,y_t,theta_t,lmda_t)
np.set_printoptions(formatter={'float': '{: 0.6f}'.format})
print('Cost: {:0.7f}'.format(cost))
print('Expected cost: 3.734819')
print('Gradients:\n{}'.format(grad))
print('Expected gradients:\n[ 0.146561 -0.548558 0.724722 1.398003]')
"""
# 训练模型

lmd = 0.01
num_labels = 10
all_theta = one_vs_all(X, Y, num_labels, lmd)

# =============================== 3.预测 =======================================
pred = predict_one_vs_all(X, all_theta)
# 这里一定要把Y.shape变成(m,),否则Y.shape = (m,1),带入是无效的
Y = Y.reshape(Y.size)
print('Training set accurayc:{}'.format(np.mean(pred == Y) * 100))
Example #4
0
    # Randomly select 100 data points to display
    shuffle_100_X = np.arange(0, m, 1, dtype=int)
    np.random.shuffle(shuffle_100_X)
    sel = X[shuffle_100_X[0:100], :]
    display_data(sel)
    print('Program paused. Press enter to continue.\n')
    # pause_func()

    # ============ Part 2a: Vectorize Logistic Regression ============
    # Test case for lrCostFunction
    print('\nTesting lrCostFunction() with regularization')
    theta_t = np.array([[-2], [-1], [1], [2]])
    X_t = np.append(np.ones((5, 1)), np.arange(1, 16).reshape(5, 3, order='F') / 10, axis=1)
    y_t = np.array([[1], [0], [1], [0], [1]])
    lambda_t = 3
    J, grad = lr_cost_function(theta_t, X_t, y_t, lambda_t)
    print('\nCost: \n', J, '\nExpected cost: 2.534819\n')
    print('Gradients:\n', grad, '\nExpected gradients:\n', ' 0.146561\n -0.548558\n  0.724722\n  1.398003\n')
    print('Program paused. Press enter to continue.\n')
    # pause_func()
    # ============ Part 2b: One-vs-All Training ============
    print('\nTraining One-vs-All Logistic Regression...\n')
    ova_lambda = 0.1
    all_theta = one_vs_all(X, y, num_labels, ova_lambda)
    print('Program paused. Press enter to continue.\n')
    # pause_func()

    # ================ Part 3: Predict for One-Vs-All ================
    pred = predict_one_vs_all(all_theta, X) + 1
    print('\nTraining Set Accuracy: \n', np.mean((pred == y) * 100))