def test_svm_classification():
    y_signed_train = (y_train * 2) - 1
    y_signed_test = (y_test * 2) - 1

    for kernel in [RBF(gamma=0.1), Linear()]:
        model = SVM(max_iter=250, kernel=kernel)
        model.fit(X_train, y_signed_train)
        predictions = model.predict(X_test)
        assert accuracy(y_signed_test, predictions) >= 0.8
Exemplo n.º 2
0
    def __init__(self, C=1.0, kernel=None, tol=1e-3, max_iter=100):
        """Support vector machines simplified SMO optimization implementation.

        Parameters
        ----------
        C : float, default 1.0
        kernel : Kernel object
        tol : float , default 1e-3
        max_iter : int, default 100
        """
        self.C = C
        self.tol = tol
        self.max_iter = max_iter
        if kernel is None:
            self.kernel = Linear()
        else:
            self.kernel = kernel

        self.b = 0
        self.alpha = None
        self.K = None
Exemplo n.º 3
0
def classification():
    # Generate a random binary classification problem.
    X, y = make_classification(n_samples=1200,
                               n_features=10,
                               n_informative=5,
                               random_state=1111,
                               n_classes=2,
                               class_sep=1.75)
    # Convert y to {-1, 1}
    y = (y * 2) - 1
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.2,
                                                        random_state=1111)

    for kernel in [RBF(gamma=0.1), Linear()]:
        model = SVM(max_iter=500, kernel=kernel, C=0.6)
        model.fit(X_train, y_train)
        predictions = model.predict(X_test)
        print("Classification accuracy (%s): %s" %
              (kernel, accuracy(y_test, predictions)))