Example #1
0
import numpy as np   
import matplotlib.pyplot as plt           
from prml.features import PolynomialFeatures
from prml.linear import (
    LinearRegressor,
    RidgeRegressor,
    BayesianRegressor
)

feature = PolynomialFeatures(9)
X_train = feature.transform(x_train)
X_test = feature.transform(x_test)

model = RidgeRegressor(alpha=1e-3)
model.fit(X_train, y_train)
y = model.predict(X_test)

y = model.predict(X_test)
plt.scatter(x_train, y_train, facecolor="none", edgecolor="b", s=50, label="training data")
plt.plot(x_test, y_test, c="g", label="$\sin(2\pi x)$")
plt.plot(x_test, y, c="r", label="fitting")
plt.ylim(-1.5, 1.5)
plt.legend()
plt.annotate("M=9", xy=(-0.15, 1))
plt.show()

Example #2
0
 def test_transform(self):
     self.features = PolynomialFeatures()
     x = np.array([[0., 1.], [2., 3.]])
     X = np.array([[1., 0., 1., 0., 0., 1.], [1., 2., 3., 4., 6., 9.]])
     self.assertTrue(np.allclose(self.features.transform(x), X))
Example #3
0
def create_toy_data(func, sample_size, std, domain=[0, 1]):
    x = np.linspace(domain[0], domain[1], sample_size)
    np.random.shuffle(x)
    t = func(x) + np.random.normal(scale=std, size=x.shape)
    return x, t


def cubic(x):
    return x * (x - 5) * (x + 5)


x_train, y_train = create_toy_data(cubic, 10, 10., [-5, 5])
x = np.linspace(-5, 5, 100)
y = cubic(x)

feature = PolynomialFeatures(degree=3)
X_train = feature.transform(x_train)
X = feature.transform(x)

vlr = VariationalLinearRegressor(beta=0.01)
vlr.fit(X_train, y_train)
y_mean, y_std = vlr.predict(X, return_std=True)
plt.scatter(x_train, y_train, s=100, facecolor="none", edgecolor="b")
plt.plot(x, y, c="g", label="$\sin(2\pi x)$")
plt.plot(x, y_mean, c="r", label="prediction")
plt.fill_between(x, y_mean - y_std, y_mean + y_std, alpha=0.2, color="pink")
plt.legend()
plt.show()


# variational logistic regression
Example #4
0
x_test = np.linspace(0, 1, 100)
y_test = func(x_test)

plt.scatter(x_train,
            y_train,
            facecolor="none",
            edgecolor="b",
            s=50,
            label="training data")
plt.plot(x_test, y_test, c="g", label="$\sin(2\pi x)$")
plt.legend()
plt.show()

for i, degree in enumerate([0, 1, 3, 9]):
    plt.subplot(2, 2, i + 1)
    feature = PolynomialFeatures(degree)
    X_train = feature.transform(x_train)
    X_test = feature.transform(x_test)

    model = LinearRegressor()
    model.fit(X_train, y_train)
    y = model.predict(X_test)

    plt.scatter(x_train,
                y_train,
                facecolor="none",
                edgecolor="b",
                s=50,
                label="training data")
    plt.plot(x_test, y_test, c="g", label="$\sin(2\pi x)$")
    plt.plot(x_test, y, c="r", label="fitting")
Example #5
0
from prml.linear import (BayesianRegressor, EmpiricalBayesRegressor,
                         LinearRegressor, RidgeRegressor)

np.random.seed(1234)


def create_toy_data(func, sample_size, std, domain=[0, 1]):
    x = np.linspace(domain[0], domain[1], sample_size)  # 范围从零到1
    np.random.shuffle(x)
    t = func(x) + np.random.normal(scale=std, size=x.shape)
    return x, t


# linear basis function models 线性基础函数模型
x = np.linspace(-1, 1, 100)
X_polynomial = PolynomialFeatures(11).transform(x[:, None])
X_gaussian = GaussianFeatures(np.linspace(-1, 1, 11), 0.1).transform(x)
X_sigmoidal = SigmoidalFeatures(np.linspace(-1, 1, 11), 10).transform(x)

plt.figure(figsize=(20, 5))
for i, X in enumerate([X_polynomial, X_gaussian, X_sigmoidal]):
    plt.subplot(1, 3, i + 1)
    for j in range(12):
        plt.plot(x, X[:, j])  # 从左到右,依次是多项式拟合, 高斯拟合, sigmoid拟合, 而且每个都拟合了12次


# maximum likelihood and least squares  极大似然估计和最小二乘
def sinusoidal(x):
    return np.sin(2 * np.pi * x)