def test_KNN_dists_twoloop_shape(sample_train, sample_test): Xtrain, ytrain = sample_train(count=40) Xtest, ytest = sample_test(count=10) Xtrain = np.reshape(Xtrain, (Xtrain.shape[0], -1)) Xtest = np.reshape(Xtest, (Xtest.shape[0], -1)) knn = KNearestNeighbor() knn.train(Xtrain,ytrain) assert knn.compute_distances_two_loops(Xtest).shape == (Xtest.shape[0], Xtrain.shape[0])
def test_KNN_dists_two_to_none(sample_train, sample_test): Xtrain, ytrain = sample_train(count=40) Xtest, ytest = sample_test(count=10) Xtrain = np.reshape(Xtrain, (Xtrain.shape[0], -1)) Xtest = np.reshape(Xtest, (Xtest.shape[0], -1)) knn = KNearestNeighbor() knn.train(Xtrain,ytrain) dist_two = knn.compute_distances_two_loops(Xtest) dist_no = knn.compute_distances_no_loops(Xtest) assert np.linalg.norm(dist_two - dist_no, ord='fro') < 0.001
X_train = np.reshape(X_train, (X_train.shape[0], -1)) X_test = np.reshape(X_test, (X_test.shape[0], -1)) print(X_train.shape, X_test.shape) from cs231n.classifiers.k_nearest_neighbor import KNearestNeighbor # Create a kNN classifier instance. # Remember that training a kNN classifier is a noop: # the Classifier simply remembers the data and does no further processing classifier = KNearestNeighbor() classifier.train(X_train, y_train) # Open cs231n/classifiers/k_nearest_neighbor.py and implement # compute_distances_two_loops. # Test your implementation: dists = classifier.compute_distances_two_loops(X_test) # We can visualize the distance matrix: each row is a single test example and # its distances to training examples plt.imshow(dists, interpolation='none') plt.show() # Now implement the function predict_labels and run the code below: # We use k = 1 (which is Nearest Neighbor). y_test_pred = classifier.predict_labels(dists, k=1) # Compute and print the fraction of correctly predicted examples num_correct = np.sum(y_test_pred == y_test) accuracy = float(num_correct) / num_test print('Got %d / %d correct => accuracy: %f' % (num_correct, num_test, accuracy))