def test_KNN_dists_oneloop_shape(sample_train, sample_test): Xtrain, ytrain = sample_train(count=40) Xtest, ytest = sample_test(count=10) Xtrain = np.reshape(Xtrain, (Xtrain.shape[0], -1)) Xtest = np.reshape(Xtest, (Xtest.shape[0], -1)) knn = KNearestNeighbor() knn.train(Xtrain,ytrain) assert knn.compute_distances_one_loop(Xtest).shape == (Xtest.shape[0], Xtrain.shape[0])
def test_KNN_dists_one_to_none(sample_train, sample_test): Xtrain, ytrain = sample_train(count=40) Xtest, ytest = sample_test(count=10) Xtrain = np.reshape(Xtrain, (Xtrain.shape[0], -1)) Xtest = np.reshape(Xtest, (Xtest.shape[0], -1)) knn = KNearestNeighbor() knn.train(Xtrain,ytrain) dist_one = knn.compute_distances_one_loop(Xtest) dist_no = knn.compute_distances_no_loops(Xtest) assert np.linalg.norm(dist_one - dist_no, ord='fro') < 0.001
# Compute and print the fraction of correctly predicted examples num_correct = np.sum(y_test_pred == y_test) accuracy = float(num_correct) / num_test print('Got %d / %d correct => accuracy: %f' % (num_correct, num_test, accuracy)) y_test_pred = classifier.predict_labels(dists, k=5) num_correct = np.sum(y_test_pred == y_test) accuracy = float(num_correct) / num_test print('Got %d / %d correct => accuracy: %f' % (num_correct, num_test, accuracy)) # Now lets speed up distance matrix computation by using partial vectorization # with one loop. Implement the function compute_distances_one_loop and run the # code below: dists_one = classifier.compute_distances_one_loop(X_test) # To ensure that our vectorized implementation is correct, we make sure that it # agrees with the naive implementation. There are many ways to decide whether # two matrices are similar; one of the simplest is the Frobenius norm. In case # you haven't seen it before, the Frobenius norm of two matrices is the square # root of the squared sum of differences of all elements; in other words, reshape # the matrices into vectors and compute the Euclidean distance between them. difference = np.linalg.norm(dists - dists_one, ord='fro') print('Difference was: %f' % (difference, )) if difference < 0.001: print('Good! The distance matrices are the same') else: print('Uh-oh! The distance matrices are different') # Now implement the fully vectorized version inside compute_distances_no_loops