def fit():
    x, y = sample(5)
    poly = polyreg.PolynomialRegression(4)
    poly.fit(x, y)

    lin = linearreg.LinearRegression()
    lin.fit(x, y)
    return poly, lin
Exemplo n.º 2
0
 def fit(self, x, y):
     x_pow = []
     xx = x.reshape(len(x), 1)
     for i in range(1, self.degree + 1):
         x_pow.append(xx ** i)
     mat = np.concatenate(x_pow, axis=1)
     linreg = linearreg.LinearRegression()
     linreg.fit(mat, y)
     self.w_ = linreg.w_
def train_lr(datasets):
    model = {}
    sse = {}
    mse = {}
    y_pred_dict = {}
    for i in range(1, 7):
        model[i] = {}
        sse[i] = {}
        mse[i] = {}
        y_pred_dict[i] = {}
        for j, dataset in enumerate(datasets):
            if i == 1:
                y_train = zip(*dataset)[1]
                y_predict = [1] * len(y_train)
                sse_train = calculate_sse(y_train, y_predict)
                model[i][j] = None
                sse[i][j] = sse_train
                mse[i][j] = sse_train / len(y_predict)
                y_pred_dict[i][j] = y_predict

            else:
                x_train = map(lambda x: gx(i, x), zip(*dataset)[0])
                y_train = zip(*dataset)[1]
                x_train_std = standardize_data(x_train)
                linearreg = lr.LinearRegression(x_train_std, y_train)
                w = linearreg.getparam()
                y_predict_train = linearreg.predict(x_train_std)
                y_predict = y_predict_train.getT().tolist()[0]
                sse_train = calculate_sse(y_train, y_predict)
                model[i][j] = copy.deepcopy(linearreg)
                sse[i][j] = sse_train
                mse[i][j] = sse_train / len(y_predict)
                y_pred_dict[i][j] = y_predict

    return {
        'model': model,
        'sse': sse,
        'mse': mse,
        'y_predict_dict': y_pred_dict,
    }
Exemplo n.º 4
0
 def fit(self, x, y):
     x_pow = []
     # reshapeは第二引数の列数に配列を変換するreshape(5, 2)ならば10列のarrayは5行2列になる
     xx = x.reshape(len(x), 1)
     # xxは以下のような形
     # [[5.48813504]
     #  [7.15189366]
     #  [6.02763376]
     #  [5.44883183]
     #  [4.23654799]
     #  [6.45894113]
     #  [4.37587211]
     #  [8.91773001]
     #  [9.63662761]
     #  [3.83441519]]
     for i in range(1, self.degree + 1):
         # ここで多項式回帰の元になるn乗の計算を実現している
         x_pow.append(xx**i)
     # ベクトルを横につなぐ
     mat = np.concatenate(x_pow, axis=1)
     linreg = linearreg.LinearRegression()
     linreg.fit(mat, y)
     self.w_ = linreg.w_
xmin = 0
xmax = 12
ymin = -1
ymax = 25

fig, axes = plt.subplots(nrows=2, ncols=5)
for i in range(5):
    axes[0, i].set_xlim([xmin, xmax])
    axes[0, i].set_ylim([ymin, ymax])
    axes[1, i].set_xlim([xmin, xmax])
    axes[1, i].set_ylim([ymin, ymax])
    xx = x[:2 + i * 2]
    yy = y[:2 + i * 2]
    axes[0, i].scatter(xx, yy, color="k")
    axes[1, i].scatter(xx, yy, color="k")
    model = linearreg.LinearRegression()
    model.fit(xx, yy)
    xs = [xmin, xmax]
    ys = [model.w_[0] + model.w_[1] * xmin,
          model.w_[0] + model.w_[1] * xmax]
    axes[0, i].plot(xs, ys, color="k")

    model = ridge.RidgeRegression(10.)
    model.fit(xx, yy)

    xs = [xmin, xmax]
    ys = [model.w_[0] + model.w_[1] * xmin,
          model.w_[0] + model.w_[1] * xmax]
    axes[1, i].plot(xs, ys, color="k")

plt.show()
Exemplo n.º 6
0
xx = np.arange(0, 5, 0.01)
np.random.seed(0)
y_poly_sum = np.zeros(len(xx))
y_poly_sum_sq = np.zeros(len(xx))
y_lin_sum = np.zeros(len(xx))
y_lin_sum_sq = np.zeros(len(xx))
y_true = f(xx)
n = 100000
warnings.filterwarnings("ignore")

for _ in range(n):
    x, y = sample(5)
    poly = polyreg.PolynomialRegression(4)
    poly.fit(x, y)
    lin = linearreg.LinearRegression()
    lin.fit(x, y)
    y_poly = poly.predict(xx)
    y_poly_sum += y_poly
    y_poly_sum_sq += (y_poly - y_true)**2
    y_lin = lin.predict(xx)
    y_lin_sum += y_lin
    y_lin_sum_sq += (y_lin - y_true)**2

fig = plt.figure()
ax1 = fig.add_subplot(121)
ax2 = fig.add_subplot(122)
ax1.set_title("Linear reg.")
ax2.set_title("Polynomial reg.")
ax1.set_ylim(0, 1)
ax2.set_ylim(0, 1)