def GradientDescentRegular(x, y, w_init):
    #添加正则项的梯度下降算法
    lamda = 0.001  # 设置正则系数
    w = np.mat(w_init.copy()).T  #将数据转化为矩阵
    Y = np.mat(y).T
    step = 0.05  #设置布长为0.05
    X = np.mat(np.zeros((x.size, w_init.size)))  #范德蒙德矩阵的计算
    for i in range(x.size):
        for j in range(w_init.size):
            X[i, j] = pow(x[i], j)

    loss_function = 1.0  #初始设置损失函数值
    iteratornNum = 0  #迭代次数极为0
    while (loss_function > 0.001 and iteratornNum < 200000):
        #迭代终止条件为loss_function > 0.001 and iteratornNum < 200000
        iteratornNum = iteratornNum + 1
        w -= step * (np.dot(X.T, (np.dot(X, w) - Y)) + lamda * w
                     )  #更新w,并设置惩罚系数为0.001
        loss_function = 0.0
        for i in range(x.size):
            loss_function += pow(
                poly.polynomial(x[i],
                                np.array(w.T)[0]) - y[i], 2)
        loss_function /= (2 * x.size)  #计算损失函数值
    W = np.zeros(w_init.size)
    for i in range(w_init.size):
        W[i] = w[i, 0]
    print("loss_function:", loss_function)
    print("The iteratorNum times is:", iteratornNum)
    return W
Exemple #2
0
w_init = np.random.randn(power)
print("x", x)
print("y", y)
print("w_init:", w_init)

plot.title("Least Square")
plot.plot(x_points,
          np.sin(2 * np.pi * x_points),
          label='$sin(2*pi*x)$',
          color="red")

for i in range(size):
    plot.scatter(x[i], y[i], color="green", linewidths=0.01)

w_LS1 = LS.leastSqure(x, y, w_init)
print("w_LS1:", w_LS1)
plot.plot(x_points,
          poly.polynomial(x_points, w_LS1),
          label='$LS$',
          color="green")

w_LS2 = LS.leastSqureRegular(x, y, w_init)
print("w_LS2:", w_LS2)
plot.plot(x_points,
          poly.polynomial(x_points, w_LS2),
          label='$LS+Regular$',
          color="blue")

plot.legend()
plot.show()
Exemple #3
0
w_init = np.random.randn(power)
print("x", x)
print("y", y)
print("w_init:", w_init)

plot.title("Conjungate Gradient")
plot.plot(x_points,
          np.sin(2 * np.pi * x_points),
          label='$sin(2*pi*x)$',
          color="red")

for i in range(size):
    plot.scatter(x[i], y[i], color="green", linewidths=0.01)

w_FR1 = CG.ConjungateGradient(x, y, w_init)
print("w_FR1:", w_FR1)
plot.plot(x_points,
          poly.polynomial(x_points, w_FR1),
          label='$CG$',
          color="green")

w_FR2 = CG.ConjungateGradientRegular(x, y, w_init)
print("w_FR2:", w_FR2)
plot.plot(x_points,
          poly.polynomial(x_points, w_FR2),
          label='$CG+Regular$',
          color="blue")

plot.legend()
plot.show()
Exemple #4
0
w_init = np.random.randn(power)
print("x", x)
print("y", y)
print("w_init:", w_init)

plot.title("Gradient Descent")
plot.plot(x_points,
          np.sin(2 * np.pi * x_points),
          label='$sin(2*pi*x)$',
          color="red")

for i in range(size):
    plot.scatter(x[i], y[i], color="green", linewidths=0.01)

w_GD1 = GD.GradientDescent(x, y, w_init)
print("w_GD1:", w_GD1)
plot.plot(x_points,
          poly.polynomial(x_points, w_GD1),
          label='$GD$',
          color="blue")

w_GD2 = GD.GradientDescentRegular(x, y, w_init)
print("w_GD2:", w_GD2)
plot.plot(x_points,
          poly.polynomial(x_points, w_GD2),
          label='$GD+Regular$',
          color="green")

plot.legend()
plot.show()
Exemple #5
0
print("y", y)
print("w_init:", w_init)

plot.title("sin function")
plot.plot(x_points,
          np.sin(2 * np.pi * x_points),
          label='$sin(2*pi*x)$',
          color="red")

for i in range(size):
    plot.scatter(x[i], y[i], color="green", linewidths=0.01)

w_LS1 = LS.leastSqure(x, y, w_init)
print("w_LS1:", w_LS1)
plot.plot(x_points,
          poly.polynomial(x_points, w_LS1),
          label='$LS$',
          color="green")

w_LS2 = LS.leastSqureRegular(x, y, w_init)
print("w_LS2:", w_LS2)
plot.plot(x_points,
          poly.polynomial(x_points, w_LS2),
          label='$LS+Regular$',
          color="blue")

w_GD1 = GD.GradientDescent(x, y, w_init)
print("w_GD1:", w_GD1)
plot.plot(x_points,
          poly.polynomial(x_points, w_GD1),
          label='$GD$',