示例#1
0
fruit_prediction = knn.predict([[20, 4.3, 5.5]])
lookup_fruit_name[fruit_prediction[0]]

# In[12]:

# second example: a larger, elongated fruit with mass 100g, width 6.3 cm, height 8.5 cm
fruit_prediction = knn.predict([[100, 6.3, 8.5]])
lookup_fruit_name[fruit_prediction[0]]

# ### Plot the decision boundaries of the k-NN classifier

# In[13]:

from adspy_shared_utilities import plot_fruit_knn

plot_fruit_knn(X_train, y_train, 5, 'uniform')  # we choose 5 nearest neighbors

# ### How sensitive is k-NN classification accuracy to the choice of the 'k' parameter?

# In[14]:

k_range = range(1, 20)
scores = []

for k in k_range:
    knn = KNeighborsClassifier(n_neighbors=k)
    knn.fit(X_train, y_train)
    scores.append(knn.score(X_test, y_test))

plt.figure()
plt.xlabel('k')
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(X_train['width'],
           X_train['height'],
           X_train['color_score'],
           c=y_train,
           marker='o',
           s=100)
ax.set_xlabel('width')
ax.set_ylabel('height')
ax.set_zlabel('color_score')
plt.show()

X = fruits[['mass', 'width', 'height']]
y = fruits['fruit_label']
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)

from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier(n_neighbors=5)
knn.fit(X_train, y_train)
knn.score(X_test, y_test)

fruit_prediction = knn.predict([[20, 4.3, 5.5]])
look_up_fruit_name[fruit_prediction[0]]

fruit_prediction = knn.predict([[100, 6.3, 8.5]])
look_up_fruit_name[fruit_prediction[0]]

from adspy_shared_utilities import plot_fruit_knn
plot_fruit_knn(X_train, y_train, 5, 'uniform')
示例#3
0
# Use trained k-NN classifier model to classify new objects.
lookup = dict(zip(fruits.fruit_label.unique(),
                  fruits.fruit_name.unique()))  # Label #-name pair
fruit_prediction = knn.predict(
    [[20, 4.3, 5.5,
      0.45]])  # Predict function, need all 4 parameters to generate.
print(lookup[fruit_prediction[0]])

fruit_prediction = knn.predict([[100, 10.2, 12.3, 0.63]])
print(lookup[fruit_prediction[0]])

# Sensitivity of k-NN classifier accuracy to choice of 'k' parameter
k_range = range(1, 20)
scores = []
for k in k_range:  # k=1 to k=19
    knn = KNeighborsClassifier(n_neighbors=k)
    knn.fit(X_train, y_train)
    scores.append(knn.score(X_test, y_test))

plt.figure()
plt.scatter(k_range, scores)
plt.xlabel('k')
plt.ylabel("accuracy")
plt.xticks([0, 5, 10, 15, 20])

#   Graph displaying decision boundaries.
from adspy_shared_utilities import plot_fruit_knn

plot_fruit_knn(X_train, y_train, 5, "uniform")
示例#4
0
fruit_prediction = knn.predict([[20, 4.3, 5.5]])
lookup_fruit_name[fruit_prediction[0]]

# In[25]:

# second example: a larger, elongated fruit with mass 100g, width 6.3 cm, height 8.5 cm
fruit_prediction = knn.predict([[100, 6.3, 8.5]])
lookup_fruit_name[fruit_prediction[0]]

# ### Plot the decision boundaries of the k-NN classifier

# In[27]:

from adspy_shared_utilities import plot_fruit_knn

plot_fruit_knn(X_train, y_train, 3,
               'distance')  # we choose 5 nearest neighbors

# ### How sensitive is k-NN classification accuracy to the choice of the 'k' parameter?

# In[28]:

k_range = range(1, 20)
scores = []

for k in k_range:
    knn = KNeighborsClassifier(n_neighbors=k)
    knn.fit(X_train, y_train)
    scores.append(knn.score(X_test, y_test))

plt.figure()
plt.xlabel('k')
示例#5
0
# train the classifier (fit the estimator) using training data
print(knn.fit(X_train, y_train))

# Estimate the accuracy of the classifier on future data using the test data
print(knn.score(X_test, y_test))

# Use the trained k-NN classifier model to classify new, previously unseen objects
fruit_prediction = knn.predict([[20, 4.3, 5.5]])  # mass, width and height
print(lookup_fruit_name[fruit_prediction[0]])

fruit_prediction = knn.predict([[100, 6.3, 8.5]])  # mass, width and height
print(lookup_fruit_name[fruit_prediction[0]])

# Plot decision boundaries of the k-NN classifier
from adspy_shared_utilities import plot_fruit_knn
plot_fruit_knn(X_train, y_train, 5, 'uniform')
plot_fruit_knn(X_train, y_train, 5, 'distance')

# How sensitive is k-NN classification accuracy to the choice of the 'k' parameter?
k_range = range(1, 20)
scores = []

for k in k_range:
    knn = KNeighborsClassifier(n_neighbors=k)
    knn.fit(X_train, y_train)
    scores.append(knn.score(X_test, y_test))

plt.figure()
plt.xlabel('k')
plt.ylabel('accuracy')
plt.scatter(k_range, scores)
示例#6
0
def decisionBoundaries():
    from adspy_shared_utilities import plot_fruit_knn
    plot_fruit_knn(X_train, y_train, 5,
                   'uniform')  # we choose 5 nearest neighbors