import numpy as np import k_nearest_neighbor as knn import sys x = np.array([[1, 2], [3, 4]]) y = np.array([0, 1]) test = np.array([[1, 2], [3, 4]]) if __name__ == '__main__': if len(sys.argv) == 2: choice = sys.argv[1] if choice == '2': kk = knn.KNearestNeighbor() kk.train(x, y) dist = kk.compute_distances_two_loops(test) print dist print kk.predict_labels(dist) elif choice == '1': kk = knn.KNearestNeighbor() kk.train(x, y) dist = kk.compute_distances_one_loop(test) print dist print kk.predict_labels(dist) elif choice == '0': kk = knn.KNearestNeighbor() kk.train(x, y) dist = kk.compute_distances_no_loops(test) print dist
num_training = 50000 X_train = X_train[:num_training] y_train = y_train[:num_training] num_test = 100 X_test = X_test[:num_test] y_test = y_test[:num_test] # Reshape the image data into rows: each item in these arrays is a 3072-element # vector representing 3 colors per image pixel. X_train = np.reshape(X_train, (X_train.shape[0], -1)) X_test = np.reshape(X_test, (X_test.shape[0], -1)) print 'Reshaped training data shape: ', X_train.shape, X_train.dtype print 'Reshaped test data shape: ', X_test.shape, X_test.dtype import k_nearest_neighbor knn = k_nearest_neighbor.KNearestNeighbor() knn.train(X_train, y_train) with timer.Timer('Computing distances'): dists = knn.compute_distances_no_loops(X_test) with timer.Timer('Running label prediction'): y_test_pred = knn.predict_labels(dists, k=5) # Compute and print the fraction of correctly predicted examples num_correct = np.sum(y_test_pred == y_test) accuracy = float(num_correct) / num_test print 'Got %d / %d correct => accuracy: %f' % (num_correct, num_test, accuracy)
y_train = y_train[mask] num_test = 500 mask = list(range(num_test)) X_test = X_test[mask] y_test = y_test[mask] # Reshape the image data into rows X_train = np.reshape(X_train, (X_train.shape[0], -1)) X_test = np.reshape(X_test, (X_test.shape[0], -1)) print(X_train.shape, X_test.shape) # Create a kNN classifier instance. # Remember that training a kNN classifier is a noop: # the Classifier simply remembers the data and does no further processing classifier = k_nearest_neighbor.KNearestNeighbor() classifier.train(X_train, y_train) # Open cs231n/classifiers/k_nearest_neighbor.py and implement # compute_distances_two_loops. # Test your implementation: dists = classifier.compute_distances_two_loops(X_test) print(dists.shape) # We can visualize the distance matrix: each row is a single test example and # its distances to training examples plt.imshow(dists, interpolation='none') plt.show() # Now implement the function predict_labels and run the code below: