コード例 #1
0
                              out_layer, lmd)
print(
    'Cost at parameters (loaded from ex4weights): {:0.6f}\n(This value should be about 0.383770)'
    .format(cost))
# 验证sigmoid的梯度
g = sigmoid_gradient(np.array([-1, -0.5, 0, 0.5, 1]))
print('Sigmoid gradient evaluated at [-1  -0.5  0  0.5  1]:\n{}'.format(g))

# =========================== 3.初始化网络参数 =================================
random_theta1 = rand_init_weights(input_layer, hidden_layer)
random_theta2 = rand_init_weights(hidden_layer, out_layer)
rand_nn_parameters = np.concatenate(
    [random_theta1.flatten(), random_theta2.flatten()])
# 检查BP算法
lmd = 3
check_nn_gradients(lmd)
debug_cost, _ = nn_cost_function(X, Y, nn_paramters, input_layer, hidden_layer,
                                 out_layer, lmd)
print(
    'Cost at (fixed) debugging parameters (w/ lambda = {}): {:0.6f}\n(for lambda = 3, this value should be about 0.576051)'
    .format(lmd, debug_cost))

# ========================== 4.训练NN ==========================================
lmd = 1


def cost_func(p):
    return nn_cost_function(X, Y, p, input_layer, hidden_layer, out_layer,
                            lmd)[0]

    [initial_theta1.flatten(),
     initial_theta2.flatten()])

# ===================== Part 7: Implement Backpropagation =====================
# Once your cost matches up with ours, you should proceed to implement the
# backpropagation algorithm for the neural network. You should add to the
# code you've written in nncostfunction.py to return the partial
# derivatives of the parameters.
#

print('Checking Backpropagation ... ')

# Check gradients by running check_nn_gradients()

lmd = 0
cng.check_nn_gradients(lmd)

input('Program paused. Press ENTER to continue')

# ===================== Part 8: Implement Regularization =====================
# Once your backpropagation implementation is correct, you should now
# continue to implement the regularization with the cost and gradient.
#

print('Checking Backpropagation (w/ Regularization) ...')

lmd = 3
cng.check_nn_gradients(lmd)

# Also output the cost_function debugging values
debug_cost, _ = ncf.nn_cost_function(nn_params, input_layer_size,
コード例 #3
0
    print('Program paused. Press enter to continue.\n')
    # pause_func()

    # ================ Part 6: Initializing Pameters ================
    print('\nInitializing Neural Network Parameters ...\n')
    initial_Theta1 = rand_initialize_weights(input_layer_size,
                                             hidden_layer_size)
    initial_Theta2 = rand_initialize_weights(hidden_layer_size, num_labels)
    # Unroll parameters
    initial_nn_params = np.append(np.ravel(initial_Theta1, order='F'),
                                  np.ravel(initial_Theta2, order='F'))

    # =============== Part 7: Implement Backpropagation ===============
    print('\nChecking Backpropagation... \n')
    # Check gradients by running checkNNGradients
    check_nn_gradients()
    print('Program paused. Press enter to continue.\n')
    # pause_func()

    # =============== Part 8: Implement Regularization ===============
    print('\nChecking Backpropagation (w/ Regularization) ... \n')
    #  Check gradients by running checkNNGradients
    check_nn_lambda = 3
    check_nn_gradients(check_nn_lambda)
    # Also output the costFunction debugging values
    debug_J = nn_cost_function(nn_params, input_layer_size, hidden_layer_size,
                               num_labels, X, y, check_nn_lambda)[0]
    print('\n\nCost at (fixed) debugging parameters (w/ lambda = %d): %f ' %
          (check_nn_lambda, debug_J))
    print('\n(for lambda = 3, this value should be about 0.576051)\n\n')
    print('Program paused. Press enter to continue.\n')