Example #1
0
# Reshape the image data into rows
X_train = np.reshape(X_train, (X_train.shape[0], -1))
X_test = np.reshape(X_test, (X_test.shape[0], -1))
print('The shape of the new selected training dataset:', X_train.shape,
      X_test.shape)

from cs231n.classifiers import KNearestNeighbor
#create a kNN classifier instance
#The classifier simply remember the data and does no further processing
classifier = KNearestNeighbor()
classifier.train(X_train, y_train)

# open cs231n/classifiers /k_nearest_neighbor.py and implement
#compute distances by two loops

dists = classifier.compute_distances_two_loops(X_test)
print(dists.shape)

# We can visualize the distance matrix: each row is a single test example and
#its distance to training examples
plt.imshow(dists, interpolation='none')
plt.show()

# Now run the prediction fuction predict_labels and run the code
# first try k=1
y_test_pred = classifier.predict_labels(dists, k=1)
# compute and print the fraction of correctly predicted examples

num_correct = np.sum(y_test_pred == y_test)
accuracy = float(num_correct) / num_test
print('Got %d /%d correct => accuracy: %f' % (num_correct, num_test, accuracy))
Example #2
0
# values of k in the k_to_accuracies dictionary.                               #
################################################################################
for k in k_choices:
    for fold in range(num_folds): #This fold will be omitted.
        #Creating validation data and temp training data
        validation_X_test = X_train_folds[fold]
        validation_y_test = y_train_folds[fold]
        temp_X_train = np.concatenate(X_train_folds[:fold] + X_train_folds[fold + 1:])
        temp_y_train = np.concatenate(y_train_folds[:fold] + y_train_folds[fold + 1:])

        #Initializing a class
        test_classifier = KNearestNeighbor()
        test_classifier.train( temp_X_train, temp_y_train )
        
        #Computing the distance
        temp_dists = test_classifier.compute_distances_two_loops(validation_X_test)
        temp_y_test_pred = test_classifier.predict_labels(temp_dists, k=k)
        
        #Checking accuracies
        num_correct = np.sum(temp_y_test_pred == validation_y_test)
        num_test = validation_X_test.shape[0]
        accuracy = float(num_correct) / num_test
        print("k=",k,"Fold=",fold,"Accuracy=",accuracy)
        k_to_accuracies[k] = k_to_accuracies.get(k,[]) + [accuracy]
    

        
################################################################################
#                                 END OF YOUR CODE                             #
################################################################################
Example #3
0
We would now like to classify the test data with the kNN classifier. Recall that we can break down this process into two steps:

    First we must compute the distances between all test examples and all train examples.
    Given these distances, for each test example we find the k nearest examples and have them vote for the label

Lets begin with computing the distance matrix between all training and test examples. For example, if there are Ntr training examples and Nte test examples, this stage should result in a Nte x Ntr matrix where each element (i,j) is the distance between the i-th test and j-th train example.

First, open cs231n/classifiers/k_nearest_neighbor.py and implement the function compute_distances_two_loops that uses a (very inefficient) double loop over all pairs of (test, train) examples and computes the distance matrix one element at a time.

'''

# Open cs231n/classifiers/k_nearest_neighbor.py and implement
# compute_distances_two_loops.
print "Calculating distances...."
# Test your implementation:
dists = classifier.compute_distances_two_loops(X_test)
print dists.shape    #500 x 50000

# We can visualize the distance matrix: each row is a single test example and
# its distances to training examples
plt.imshow(dists, interpolation='none')
plt.show()

# Now implement the function predict_labels and run the code below:
# We use k = 1 (which is Nearest Neighbor).
y_test_pred = classifier.predict_labels(dists, k=1)

# Compute and print the fraction of correctly predicted examples
num_correct = np.sum(y_test_pred == y_test)
accuracy = float(num_correct) / num_test
print 'Got %d / %d correct => accuracy: %f' % (num_correct, num_test, accuracy)
Example #4
0
for k in k_choices:
    for fold in range(num_folds):  #This fold will be omitted.
        #Creating validation data and temp training data
        validation_X_test = X_train_folds[fold]
        validation_y_test = y_train_folds[fold]
        temp_X_train = np.concatenate(X_train_folds[:fold] +
                                      X_train_folds[fold + 1:])
        temp_y_train = np.concatenate(y_train_folds[:fold] +
                                      y_train_folds[fold + 1:])

        #Initializing a class
        test_classifier = KNearestNeighbor()
        test_classifier.train(temp_X_train, temp_y_train)

        #Computing the distance
        temp_dists = test_classifier.compute_distances_two_loops(
            validation_X_test)
        temp_y_test_pred = test_classifier.predict_labels(temp_dists, k=k)

        #Checking accuracies
        num_correct = np.sum(temp_y_test_pred == validation_y_test)
        num_test = validation_X_test.shape[0]
        accuracy = float(num_correct) / num_test
        print("k=", k, "Fold=", fold, "Accuracy=", accuracy)
        k_to_accuracies[k] = k_to_accuracies.get(k, []) + [accuracy]

################################################################################
#                                 END OF YOUR CODE                             #
################################################################################

# Print out the computed accuracies
for k in sorted(k_to_accuracies):