Exemple #1
0
#X_test = sparse.csr_matrix(X_test)

print("num training: %d" % train_idx.shape[0])
print("num test: %d" % test_idx.shape[0])

## parameters = {
##     'loss': 'l2',
##     'penalty': 'l2',
##     'C': 1000,
##     'dual': False,
##     'eps': 1e-3,
## }
## print("Training LinearSVC on training set")
## clf = LinearSVC(**parameters)
print("Training SGD with alpha=0.001 and n_iter=2")
clf = SGD(alpha=0.001, n_iter=2)
t0 = time()
clf.fit(X_train, y_train)
print "done in %fs" % (time() - t0)

print "Predicting the outcomes of the testing set"
t0 = time()
pred = clf.predict(X_test)
print "done in %fs" % (time() - t0)

print "Classification performance:"
print
print metrics.classification_report(
    y_test,
    pred,
    labels=[-1, 1],
separable dataset using a linear Support Vector Machines classifier
trained using SGD.
"""
print __doc__

import numpy as np
import pylab as pl
from scikits.learn.sgd import SGD

# we create 40 separable points
np.random.seed(0)
X = np.r_[np.random.randn(20, 2) - [2,2], np.random.randn(20, 2) + [2, 2]]
Y = [0]*20 + [1]*20

# fit the model
clf = SGD(loss="hinge", alpha = 0.01, n_iter=50, fit_intercept=True)
clf.fit(X, Y)

# plot the line, the points, and the nearest vectors to the plane
xx = np.linspace(-5, 5, 10)
yy = np.linspace(-5, 5, 10)
X1, X2 = np.meshgrid(xx, yy)
Z = np.empty(X1.shape)
for (i,j), val in np.ndenumerate(X1):
    x1 = val
    x2 = X2[i,j]
    p = clf.predict_margin([x1, x2])
    Z[i,j] = p[0]
levels = [-1.0, 0.0, 1.0]
linestyles = ['dashed','solid', 'dashed']
colors = 'k'
    'loss': 'l2',
    'penalty': 'l2',
    'C': 1000,
    'dual': False,
    'eps': 1e-3,
    }
libsvm_res = benchmark(LinearSVC(**libsvm_parameters))
libsvm_err, libsvm_train_time, libsvm_test_time = libsvm_res

######################################################################
## Train SGD model
sgd_parameters = {
    'alpha': 0.001,
    'n_iter': 2,
    }
sgd_err, sgd_train_time, sgd_test_time = benchmark(SGD(**sgd_parameters))

######################################################################
## Train GNB model
gnb_err, gnb_train_time, gnb_test_time = benchmark(GNB())


######################################################################
## Print classification performance
print("")
print("Classification performance:")
print("===========================")
print("")
def print_row(clf_type, train_time, test_time, err):
    print("%s %s %s %s" % (clf_type.ljust(12),
                           ("%.4fs" % train_time).center(10),
Exemple #4
0
#X_test = sparse.csr_matrix(X_test)

print("num training: %d" % train_idx.shape[0])
print("num test: %d" % test_idx.shape[0])

## parameters = {
##     'loss': 'l2',
##     'penalty': 'l2',
##     'C': 1000,
##     'dual': False,
##     'eps': 1e-3,
## }
## print("Training LinearSVC on training set")
## clf = LinearSVC(**parameters)
print("Training SGD with alpha=0.001 and n_iter=2")
clf = SGD(alpha=0.001, n_iter=2)
t0 = time()
clf.fit(X_train, y_train)
print "done in %fs" % (time() - t0)

print "Predicting the outcomes of the testing set"
t0 = time()
pred = clf.predict(X_test)
print "done in %fs" % (time() - t0)

print "Classification performance:"
print
print metrics.classification_report(
    y_test, pred, labels=[-1, 1],
    class_names=['any other types', 'cover type 1']
)