Beispiel #1
0
X, y = construct_log_reg_data(m=m)
X = np.column_stack([np.ones((X.shape[0], 1)), X])  # dodamo enke na zacetek za theta0


X = feature_extend(X, n_features=nf)
T = np.array([np.ones(k)] + [np.linspace(min(X[:, 1]), max(X[:, 1]), k)]).T
T = feature_extend(T, n_features=nf)


thetaLin = normal_lin_reg(X, y, lambda_=lambda_)
thetaLog, _, _ = logReg.log_reg(X, y, lambda_)


ylin = predict_lin_reg(T, thetaLin)
ylog = predict_log_reg(T, thetaLog)

print "Normal eq. lin:", thetaLin
print "Normal eq. log:", thetaLog

plt.close()
plt.plot(X[:, 1], y, "o")
# plt.plot(T[:,1], ylin)
for i in range(4):
    plt.plot(T[:, 1], logReg.predict_log_reg(X, y, T, {"lambda_": 0.001 * i ** 2}))
plt.xlabel("x1")
plt.ylabel("y")
plt.title("Polynomial regression (lambda=%.5f)" % lambda_)
plt.savefig("linlogreg.png")
plt.show()
Beispiel #2
0
def logisticReg(trainD, testD, args):
    X, y, _ = trainD.to_numpy()
    T, _, _ = testD.to_numpy()
    return logReg.predict_log_reg(X, y, T, args)