def fit():
    x, y = sample(5)
    poly = polyreg.PolynomialRegression(4)
    poly.fit(x, y)

    lin = linearreg.LinearRegression()
    lin.fit(x, y)
    return poly, lin
Пример #2
0
import numpy as np
import matplotlib.pyplot as plt

# データ生成
np.random.seed(0)


def f(x):
    return 1 + 2 * x


x = np.random.random(10) * 10
y = f(x) + np.random.randn(10)

# 多項式回帰
model = polyreg.PolynomialRegression(10)
model.fit(x, y)

plt.scatter(x, y, color="k")
plt.ylim([y.min() - 1, y.max() + 1])
xx = np.linspace(x.min(), x.max(), 300)
yy = np.array([model.predict(u) for u in xx])
plt.plot(xx, yy, color="k")

# 線形回帰
model = linearreg.LinearRegression()
model.fit(x, y)
b, a = model.w_
x1 = x.min() - 1
x2 = x.max() + 1
plt.plot([x1, x2], [f(x1), f(x2)], color="k", linestyle="dashed")
Пример #3
0
    return x, y


xx = np.arange(0, 5, 0.01)
np.random.seed(0)
y_poly_sum = np.zeros(len(xx))
y_poly_sum_sq = np.zeros(len(xx))
y_lin_sum = np.zeros(len(xx))
y_lin_sum_sq = np.zeros(len(xx))
y_true = f(xx)
n = 100000
warnings.filterwarnings("ignore")

for _ in range(n):
    x, y = sample(5)
    poly = polyreg.PolynomialRegression(4)
    poly.fit(x, y)
    lin = linearreg.LinearRegression()
    lin.fit(x, y)
    y_poly = poly.predict(xx)
    y_poly_sum += y_poly
    y_poly_sum_sq += (y_poly - y_true)**2
    y_lin = lin.predict(xx)
    y_lin_sum += y_lin
    y_lin_sum_sq += (y_lin - y_true)**2

fig = plt.figure()
ax1 = fig.add_subplot(121)
ax2 = fig.add_subplot(122)
ax1.set_title("Linear reg.")
ax2.set_title("Polynomial reg.")
Пример #4
0

xx = np.arange(0, 5, 0.01)
np.random.seed(0)
y_poly_sum = np.zeros(len(xx))
y_poly_sum_sq = np.zeros(len(xx))
y_lin_sum = np.zeros(len(xx))
y_lin_sum_sq = np.zeros(len(xx))
y_true = f(xx)
n = 1000
warnings.filterwarnings("ignore")
for _ in range(n):
    # 1. 適当な5点をとる
    x, y = sample(5)
    # 多項式回帰学習
    poly = polyreg.PolynomialRegression(4)  # 多項式の次数(5点だから4次関数)
    poly.fit(x, y)
    # 線形回帰学習
    lin = linearreg.LinearRegression()
    lin.fit(x, y)
    # 多項式回帰予測(0~5の全ての点で)
    y_poly = poly.predict(xx)
    # 多項式回帰予測:結果を用意した配列に格納してやる
    y_poly_sum += y_poly
    # 真の値と予測値の差の2乗を配列に格納
    y_poly_sum_sq += (y_poly - y_true)**2
    # 線形回帰予測
    y_lin = lin.predict(xx.reshape(-1, 1))
    y_lin_sum += y_lin
    # 真の値と予測値の差の2乗を配列に格納
    y_lin_sum_sq += (y_lin - y_true)**2