def check_cost_function(lmd):
    """
    Gradient checking for collaborative filtering
    """
    # Create small problem
    x_t = np.random.rand(4, 3)
    theta_t = np.random.rand(5, 3)

    # Zap out most entries
    Y = np.dot(x_t, theta_t.T)  # 4x5
    Y[np.random.rand(Y.shape[0], Y.shape[1]) > 0.5] = 0
    R = np.zeros(Y.shape)
    R[Y != 0] = 1

    # Run Gradient Checking
    x = np.random.randn(x_t.shape[0], x_t.shape[1])
    theta = np.random.randn(theta_t.shape[0], theta_t.shape[1])
    num_users = Y.shape[1]  #5
    num_movies = Y.shape[0]  #4
    num_features = theta_t.shape[1]  #3

    def cost_func(p):
        return cofi_cost_function(p, Y, R, num_users, num_movies, num_features,
                                  lmd)

    numgrad = compute_numerial_gradient(
        cost_func, np.concatenate((x.flatten(), theta.flatten())))
    cost, grad = cofi_cost_function(
        np.concatenate((x.flatten(), theta.flatten())), Y, R, num_users,
        num_movies, num_features, lmd)

    print(np.c_[numgrad, grad])
    print('The above two columns you get should be very similar.\n'
          '(Left-Your Numerical Gradient, Right-Analytical Gradient')

    diff = np.linalg.norm(numgrad - grad) / np.linalg.norm(numgrad + grad)

    print('If you backpropagation implementation is correct, then\n'
          'the relative difference will be small (less than 1e-9).\n'
          'Relative Difference: {:0.3e}'.format(diff))
Esempio n. 2
0
theta = data['Theta']
num_users = data['num_users']
num_movies = data['num_movies']
num_features = data['num_features']

# Reduce the data set size so that this runs faster
num_users = 4
num_movies = 5
num_features = 3
X = X[0:num_movies, 0:num_features]
theta = theta[0:num_users, 0:num_features]
Y = Y[0:num_movies, 0:num_users]
R = R[0:num_movies, 0:num_users]

# Evaluate cost function
cost = ccf.cofi_cost_function(np.concatenate((X.flatten(), theta.flatten())),
                              Y, R, num_users, num_movies, num_features, 0)
grad = ccf.cofi_gred_function(np.concatenate((X.flatten(), theta.flatten())),
                              Y, R, num_users, num_movies, num_features, 0)

print('Cost at loaded parameters: {:0.2f}\n(this value should be about 22.22)'.
      format(cost))

input('Program paused. Press ENTER to continue')

# ===================== Part 3: Collaborative Filtering Gradient =====================
# Once your cost function matches up with ours, you should now implement
# the collaborative filtering gradient function. Specifically, you should
# complete the code in cofiCostFunction.py to return the grad argument.
#
print('Checking gradients (without regularization) ...')
Esempio n. 3
0
 def cost_func(p):
     return ccf.cofi_cost_function(p, Y, R, num_users, num_movies,
                                   num_features, lmd)
Esempio n. 4
0
def grad_func(p):
    return cofi_cost_function(p, Ynorm, R, num_users, num_movies, num_features,
                              lmd)[1]
 def cost_func(p):
     cost_tmp = ccf.cofi_cost_function(p, Y, R, num_users, num_movies,
                                       num_features, lmd)
     gred_tmp = ccf.cofi_gred_function(p, Y, R, num_users, num_movies,
                                       num_features, lmd)
     return cost_tmp, gred_tmp