예제 #1
0
def test_cosine_kernel():
    estimator = KernelSVC(kernel="cosine", random_state=1, gamma=2.0)

    estimator.fit(np.array([[1], [2]]), [1, 2])

    assembler = assemblers.LightningSVMModelAssembler(estimator)
    actual = assembler.assemble()

    def kernel_ast(sup_vec_value):
        feature_norm = ast.SqrtExpr(
            ast.BinNumExpr(
                ast.FeatureRef(0),
                ast.FeatureRef(0),
                ast.BinNumOpType.MUL),
            to_reuse=True)
        return ast.BinNumExpr(
            ast.BinNumExpr(
                ast.NumVal(sup_vec_value),
                ast.FeatureRef(0),
                ast.BinNumOpType.MUL),
            ast.IfExpr(
                ast.CompExpr(
                    feature_norm,
                    ast.NumVal(0.0),
                    ast.CompOpType.EQ),
                ast.NumVal(1.0),
                feature_norm),
            ast.BinNumOpType.DIV)

    expected = _create_expected_single_output_ast(
        estimator.coef_, estimator.intercept_,
        [kernel_ast(1.0), kernel_ast(1.0)])

    assert utils.cmp_exprs(actual, expected)
예제 #2
0
def test_lightning_multi_class_rbf_kernel():
    estimator = KernelSVC(kernel="rbf", random_state=1, gamma=2.0)

    estimator.fit(np.array([[1], [2], [3]]), np.array([1, 2, 3]))

    assembler = LightningSVMModelAssembler(estimator)
    actual = assembler.assemble()

    kernels = [_rbf_kernel_ast(estimator, float(i)) for i in range(1, 4)]

    expected = ast.VectorVal([
        ast.BinNumExpr(
            ast.BinNumExpr(
                ast.BinNumExpr(
                    ast.NumVal(0.0),
                    ast.BinNumExpr(kernels[0], ast.NumVal(0.5342246289),
                                   ast.BinNumOpType.MUL),
                    ast.BinNumOpType.ADD),
                ast.BinNumExpr(kernels[1], ast.NumVal(-0.5046204480),
                               ast.BinNumOpType.MUL), ast.BinNumOpType.ADD),
            ast.BinNumExpr(kernels[2], ast.NumVal(-0.4659431306),
                           ast.BinNumOpType.MUL), ast.BinNumOpType.ADD),
        ast.BinNumExpr(
            ast.BinNumExpr(
                ast.BinNumExpr(
                    ast.NumVal(0.0),
                    ast.BinNumExpr(kernels[0], ast.NumVal(-0.5386765707),
                                   ast.BinNumOpType.MUL),
                    ast.BinNumOpType.ADD),
                ast.BinNumExpr(kernels[1], ast.NumVal(0.5729019463),
                               ast.BinNumOpType.MUL), ast.BinNumOpType.ADD),
            ast.BinNumExpr(kernels[2], ast.NumVal(-0.5386765707),
                           ast.BinNumOpType.MUL), ast.BinNumOpType.ADD),
        ast.BinNumExpr(
            ast.BinNumExpr(
                ast.BinNumExpr(
                    ast.NumVal(0.0),
                    ast.BinNumExpr(kernels[0], ast.NumVal(-0.4659431306),
                                   ast.BinNumOpType.MUL),
                    ast.BinNumOpType.ADD),
                ast.BinNumExpr(kernels[1], ast.NumVal(-0.5046204480),
                               ast.BinNumOpType.MUL), ast.BinNumOpType.ADD),
            ast.BinNumExpr(kernels[2], ast.NumVal(0.5342246289),
                           ast.BinNumOpType.MUL), ast.BinNumOpType.ADD)
    ])

    assert cmp_exprs(actual, expected)
예제 #3
0
    pl.scatter(sv[:, 0], sv[:, 1], s=100, c="g")

    # Plot decision surface.
    A, B = np.meshgrid(np.linspace(-6, 6, 50), np.linspace(-6, 6, 50))
    C = np.array([[x1, x2] for x1, x2 in zip(np.ravel(A), np.ravel(B))])
    Z = clf.decision_function(C).reshape(A.shape)
    pl.contour(A, B, Z, [0.0], colors="k", linewidths=1, origin="lower")

    pl.axis("tight")


# Generate synthetic data from 2 classes.
X1, y1, X2, y2 = gen_non_lin_separable_data()

# Combine them to form a training set.
X = np.vstack((X1, X2))
y = np.hstack((y1, y2))

# Train the classifiers.
clf = SparseNonlinearClassifier(gamma=0.1, alpha=1.0 / 0.05)
clf.fit(X, y)

clf2 = KernelSVC(gamma=0.1, kernel="rbf", alpha=1e-2)
clf2.fit(X, y)

# Plot contours.
plot_contour(X, X1, X2, clf, "Sparse")
plot_contour(X, X1, X2, clf2, "Kernel SVM")

pl.show()
    pl.scatter(sv[:, 0], sv[:, 1], s=100, c="g")

    # Plot decision surface.
    A, B = np.meshgrid(np.linspace(-6, 6, 50), np.linspace(-6, 6, 50))
    C = np.array([[x1, x2] for x1, x2 in zip(np.ravel(A), np.ravel(B))])
    Z = clf.decision_function(C).reshape(A.shape)
    pl.contour(A, B, Z, [0.0], colors='k', linewidths=1, origin='lower')

    pl.axis("tight")


# Generate synthetic data from 2 classes.
X1, y1, X2, y2 = gen_non_lin_separable_data()

# Combine them to form a training set.
X = np.vstack((X1, X2))
y = np.hstack((y1, y2))

# Train the classifiers.
clf = SparseNonlinearClassifier(gamma=0.1, alpha=1. / 0.05)
clf.fit(X, y)

clf2 = KernelSVC(gamma=0.1, kernel="rbf", alpha=1e-2)
clf2.fit(X, y)

# Plot contours.
plot_contour(X, X1, X2, clf, "Sparse")
plot_contour(X, X1, X2, clf2, "Kernel SVM")

pl.show()