示例#1
0
def linear_svm_regression():
    np.random.seed(42)
    m = 50
    X = 2 * np.random.rand(m, 1)
    y = (4 + 3 * X + np.random.randn(m, 1)).ravel()
    svm_reg1 = LinearSVR(epsilon=1.5, random_state=42)
    svm_reg2 = LinearSVR(epsilon=0.5, random_state=42)
    svm_reg1.fit(X, y)
    svm_reg2.fit(X, y)
    svm_reg1.support_ = find_support_vectors(svm_reg1, X, y)
    svm_reg2.support_ = find_support_vectors(svm_reg2, X, y)

    eps_x1 = 1
    eps_y_pred = svm_reg1.predict([[eps_x1]])

    plt.figure(figsize=(9, 4))
    plt.subplot(121)
    plot_svm_regression(svm_reg1, X, y, [0, 2, 3, 11])
    plt.title(r"$\epsilon = {}$".format(svm_reg1.epsilon), fontsize=18)
    plt.ylabel(r"$y$", fontsize=18, rotation=0)
    # plt.plot([eps_x1, eps_x1], [eps_y_pred, eps_y_pred - svm_reg1.epsilon], "k-", linewidth=2)
    plt.annotate(
        '', xy=(eps_x1, eps_y_pred), xycoords='data',
        xytext=(eps_x1, eps_y_pred - svm_reg1.epsilon),
        textcoords='data', arrowprops={'arrowstyle': '<->', 'linewidth': 1.5}
    )
    plt.text(0.91, 5.6, r"$\epsilon$", fontsize=20)
    plt.subplot(122)
    plot_svm_regression(svm_reg2, X, y, [0, 2, 3, 11])
    plt.title(r"$\epsilon = {}$".format(svm_reg2.epsilon), fontsize=18)
    plt.show()
def regression_linear_svm():
    from sklearn.svm import LinearSVR

    np.random.seed(42)
    size = 50
    theta = np.array([4.0, 3.0])

    # X ~ uniform(0, 2)
    X = 2 * np.random.rand(size, 1)
    # y = 3X + 4 + noise where noise ~ gaussian(0, 1)
    y = theta[0] + theta[1] * X + np.random.randn(size, 1)
    y = y.ravel()

    reg1 = LinearSVR(epsilon=1.5, random_state=42)
    reg2 = LinearSVR(epsilon=0.5, random_state=42)

    reg1.fit(X, y)
    reg2.fit(X, y)

    def find_support_vectors(model, X, y):
        pred = model.predict(X)
        off_margin = (np.abs(y - pred) >= model.epsilon)
        return np.argwhere(off_margin)

    reg1.support_ = find_support_vectors(reg1, X, y)
    reg2.support_ = find_support_vectors(reg2, X, y)

    eps_x1 = 1
    eps_y1 = reg1.predict([[eps_x1]])

    plt.figure(figsize=(9, 4))

    plt.subplot(121)
    plot_svm_regression(reg1, X, y, [0, 2, 3, 11])
    plt.title(r"$\epsilon = {}$".format(reg1.epsilon), fontsize=18)
    plt.ylabel(r"$y$", fontsize=18, rotation=0)
    plt.annotate("",
                 xy=(eps_x1, eps_y1),
                 xycoords='data',
                 xytext=(eps_x1, eps_y1 - reg1.epsilon),
                 textcoords='data',
                 arrowprops=dict(arrowstyle="<->", linewidth=1.5))
    plt.text(0.91, 5.6, r"$\epsilon$", fontsize=20)

    plt.subplot(122)
    plot_svm_regression(reg2, X, y, [0, 2, 3, 11])
    plt.title(r"$\epsilon = {}$".format(reg2.epsilon), fontsize=18)
    save_fig("svm_linear_regression")
    plt.show()
示例#3
0
def regression():
    np.random.seed(42)
    m = 50
    X = 2 * np.random.rand(m, 1)
    y = (4 + 3 * X + np.random.randn(m, 1)).ravel()

    svm_reg = LinearSVR(epsilon=1.5, random_state=42)
    svm_reg.fit(X, y)

    svm_reg1 = LinearSVR(epsilon=1.5, random_state=42)
    svm_reg2 = LinearSVR(epsilon=0.5, random_state=42)
    svm_reg1.fit(X, y)
    svm_reg2.fit(X, y)

    svm_reg1.support_ = find_support_vectors(svm_reg1, X, y)
    svm_reg2.support_ = find_support_vectors(svm_reg2, X, y)

    eps_x1 = 1
    eps_y_pred = svm_reg1.predict([[eps_x1]])

    plt.figure(figsize=(9, 4))
    plt.subplot(121)
    plot_svm_regression(svm_reg1, X, y, [0, 2, 3, 11])
    plt.title(r"$\epsilon = {}$".format(svm_reg1.epsilon), fontsize=18)
    plt.ylabel(r"$y$", fontsize=18, rotation=0)
    plt.plot([eps_x1, eps_x1], [eps_y_pred, eps_y_pred - svm_reg1.epsilon],
             "k-",
             linewidth=2)
    plt.annotate('',
                 xy=(eps_x1, eps_y_pred),
                 xycoords='data',
                 xytext=(eps_x1, eps_y_pred - svm_reg1.epsilon),
                 textcoords='data',
                 arrowprops={
                     'arrowstyle': '<->',
                     'linewidth': 1.5
                 })
    plt.text(0.91, 5.6, r"$\epsilon$", fontsize=20)
    plt.subplot(122)
    plot_svm_regression(svm_reg2, X, y, [0, 2, 3, 11])
    plt.title(r"$\epsilon = {}$".format(svm_reg2.epsilon), fontsize=18)
    plt.savefig(PNG_PATH + "svm_regression_plot", dpi=300)
    plt.close()

    m = 100
    X = 2 * np.random.rand(m, 1) - 1
    y = (0.2 + 0.1 * X + 0.5 * X**2 + np.random.randn(m, 1) / 10).ravel()

    svm_poly_reg1 = SVR(kernel="poly", degree=2, C=100, epsilon=0.1)
    svm_poly_reg2 = SVR(kernel="poly", degree=2, C=0.01, epsilon=0.1)
    svm_poly_reg1.fit(X, y)
    svm_poly_reg2.fit(X, y)

    plt.figure(figsize=(9, 4))
    plt.subplot(121)
    plot_svm_regression(svm_poly_reg1, X, y, [-1, 1, 0, 1])
    plt.title(r"$degree={}, C={}, \epsilon = {}$".format(
        svm_poly_reg1.degree, svm_poly_reg1.C, svm_poly_reg1.epsilon),
              fontsize=18)
    plt.ylabel(r"$y$", fontsize=18, rotation=0)
    plt.subplot(122)
    plot_svm_regression(svm_poly_reg2, X, y, [-1, 1, 0, 1])
    plt.title(r"$degree={}, C={}, \epsilon = {}$".format(
        svm_poly_reg2.degree, svm_poly_reg2.C, svm_poly_reg2.epsilon),
              fontsize=18)
    plt.savefig(PNG_PATH + "svm_with_polynomial_kernel_plot", dpi=300)
    plt.close()
from sklearn.svm import LinearSVR

svm_reg1 = LinearSVR(epsilon=1.5, random_state=42)  #In place of C
#the margin hyperparameter is controled with episilon.
svm_reg2 = LinearSVR(epsilon=0.5, random_state=42)
svm_reg1.fit(X, y)
svm_reg2.fit(X, y)


def find_support_vectors(svm_reg, X, y):
    y_pred = svm_reg.predict(X)
    off_margin = (np.abs(y - y_pred) >= svm_reg.epsilon)
    return np.argwhere(off_margin)


svm_reg1.support_ = find_support_vectors(svm_reg1, X, y)
svm_reg2.support_ = find_support_vectors(svm_reg2, X, y)


def plot_svm_regression(svm_reg, X, y, axes):
    xls = np.linspace(axes[0], axes[1], 100).reshape(100, 1)
    y_pred = svm_reg.predict(xls)
    plt.plot(xls, y_pred, "k-", linewidth=2, label=r"$\hat{y}$")
    plt.plot(xls, y_pred + svm_reg.epsilon, "k--")
    plt.plot(xls, y_pred - svm_reg.epsilon, "k--")
    plt.scatter(X[svm_reg.support_],
                y[svm_reg.support_],
                s=180,
                facecolors="#FFAAAA")
    plt.plot(X, y, "bo")
    plt.axis(axes)
示例#5
0
文件: svm.py 项目: abishek85/mlgeron
    plt.scatter(X[svm_reg.support_],
                y[svm_reg.support_],
                s=180,
                facecolors='#FFAAAA')
    plt.plot(X, y, "bo")
    plt.xlabel("$x_1$", fontsize=18)
    plt.legend(loc="upper left", fontsize=18)
    plt.axis(axes)


np.random.seed(42)
m = 50
X = 2 * np.random.rand(m, 1)
y = (4 + 3 * X + np.random.randn(m, 1)).ravel()

svm_reg = LinearSVR(epsilon=1.5, random_state=42)
svm_reg.fit(X, y)
svm_reg.support_ = find_support_vectors(svm_reg, X, y)
plot_svm_regression(svm_reg, X, y, [0, 2, 3, 11])

#%% Polynomial regression using svm
from sklearn.svm import SVR

m = 100
X = 2 * np.random.rand(m, 1) - 1
y = (0.2 + 0.1 * X + 0.5 * X**2 + np.random.randn(m, 1) / 10).ravel()

svm_poly_reg = SVR(kernel="poly", degree=2, C=100, epsilon=0.1, gamma="auto")
svm_poly_reg.fit(X, y)
plot_svm_regression(svm_poly_reg, X, y, [-1, 1, 0, 1])
        epsilon=1.5, random_state=42
    )  # epsilon decides width of a street: bigger epsilon, wider street
    '''  LinearSVR(C=1.0, dual=True, epsilon=1.5, fit_intercept=True,
         intercept_scaling=1.0, loss='epsilon_insensitive', max_iter=1000,
         random_state=42, tol=0.0001, verbose=0)
    '''  # C: penalty.
    svm_reg.fit(X, y)

    # compare different epsilons in LinearSVR(): bigger epsilon, wider street
    svm_reg1 = LinearSVR(epsilon=1.5, random_state=42)
    svm_reg2 = LinearSVR(epsilon=0.5, random_state=42)
    svm_reg1.fit(X, y)
    svm_reg2.fit(X, y)

    # support_: points outside the street
    svm_reg1.support_ = find_support_vectors(svm_reg1, X,
                                             y)  # [[ 7], [14], [25], ... ]
    svm_reg2.support_ = find_support_vectors(svm_reg2, X, y)
    # support_ is not a attribute of LinearSVR. It's added here.

    # a sample of reg1: a green point on the predicted line and used to points the epsilon
    eps_x1 = 1
    eps_y_pred = svm_reg1.predict([[eps_x1]])

    # plot
    plt.figure(figsize=(9, 4))
    plt.subplot(121)
    plot_svm_regression(svm_reg1, X, y,
                        [0, 2, 3, 11])  # support_ has been defined above
    plt.title(r"$\epsilon = {}$".format(svm_reg1.epsilon), fontsize=18)
    plt.ylabel(r"$y$", fontsize=18, rotation=0)
    # plt.plot([eps_x1, eps_x1], [eps_y_pred, eps_y_pred - svm_reg1.epsilon], "k-", linewidth=2)
示例#7
0
svm_lsvr2.fit(X, y)
svm_lsvr3.fit(X, y)
"""
Define support vector
off_margin : difference of Absolute value between real y value and predict value
np.argwhere return True value which is in matrix
"""


def find_support_vectors(svm_lsvr, X, y):
    y_pred = svm_lsvr.predict(X)
    off_margin = (np.abs(y - y_pred) >= svm_lsvr.epsilon)
    return np.argwhere(off_margin)


svm_lsvr2.support_ = find_support_vectors(svm_lsvr2, X, y)
svm_lsvr3.support_ = find_support_vectors(svm_lsvr3, X, y)

eps_x1 = 1
eps_y_pred = svm_lsvr2.predict([[eps_x1]])
"""
display plot with support vector
"""


def plot_svm_regression(svm_lsvr, X, y, axes):
    x1s = np.linspace(axes[0], axes[1], 100).reshape(100, 1)
    y_pred = svm_lsvr.predict(x1s)
    plt.plot(x1s, y_pred, 'k-', linewidth=2, label='y^')
    plt.plot(x1s, y_pred + svm_lsvr.epsilon, 'k--')
    plt.plot(x1s, y_pred - svm_lsvr.epsilon, 'k--')
X = 2 * rnd.rand(m, 1)
y = (4 + 3 * X + rnd.randn(m, 1)).ravel()

svm_reg1 = LinearSVR(epsilon=1.5)
svm_reg2 =LinearSVR(epsilon=0.5)
svm_reg1.fit(X, y)
svm_reg2.fit(X, y)



def find_support_vectors(svm_reg, X, y):
	y_pred = svm_reg.predict(X)
	off_margin = (np.abs(y - y_pred) >= svm_reg.epsilon)
	return np.argwhere(off_margin)

svm_reg1.support_ = find_support_vectors(svm_reg1, X, y)
svm_reg2.support_ = find_support_vectors(svm_reg2, X, y)

eps_x1 = 1
eps_y_pred = svm_reg1.predict([[eps_x1]])


def plot_svm_regression(svm_reg, X, y, axes):
	x1s = np.linspace(axes[0], axes[1], 100).reshape(100, 1)
	y_pred = svm_reg.predict(x1s)
	plt.plot(x1s, y_pred, "k-", linewidth=2, label=r"$\hat{y}$")
	plt.plot(x1s, y_pred + svm_reg.epsilon, "k--")
	plt.plot(x1s, y_pred - svm_reg.epsilon, "k--")
	plt.scatter(X[svm_reg.support_], y[svm_reg.support_], s=180, facecolors="#FFAAAA")
	plt.plot(X, y, "bo")
	plt.xlabel(r"$x_1$", fontsize=18)