Esempio n. 1
0
    theta = np.block([t.reshape(t.size, order='F') for t in Theta])
    sample_nn = NeuralNetwork(X_s, y_s, Theta, sample_dims)

    grads = sample_nn.cost_grad(theta)
    n_grads = sample_nn.cost_grad_numerical(theta)
    print('----------------------')
    print('Analytical | Numerical')
    print('----------------------')
    for g, n_g in zip(grads, n_grads):
        print('   {0: .4f} |   {1: .4f}'.format(g, n_g))
    diff = np.linalg.norm(n_grads - grads) / np.linalg.norm(n_grads + grads)
    print('Relative difference: {0}'.format(diff))

    print('========== Part 2.4: Regularized Neural Networks ==========')
    Theta = [data['Theta1'], data['Theta2']]
    theta = np.block([t.reshape(t.size, order='F') for t in Theta])
    nn.update_lambda(3)
    J = nn.cost(theta)
    print('Regularized cost: {0:0.6f} (expected: 0.576051)'.format(J))

    print('========== Part 2.5: Learning Parameters ==========')
    Theta = [
        nn.initialize_weights(layer_dims[i], layer_dims[i + 1], 0.12)
        for i in range(len(layer_dims) - 1)
    ]
    theta = np.block([t.reshape(t.size, order='F') for t in Theta])
    nn.update_lambda(1)
    J, theta = nn.optimize(theta)
    p = nn.predict(theta)
    print('Trained accuracy: {}'.format(np.mean(p == y) * 100))