Exemple #1
0
# 岭回归测试
reload(regression)
abX, abY = regression.loadDataSet('abalone.txt')
ridgeWeights = regression.ridgeTest(abX, abY)
# 绘制λ
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(ridgeWeights)
plt.show()

# lasso的测试
reload(regression)
xArr, yArr = regression.loadDataSet('abalone.txt')
regression.stageWise(xArr, yArr, 0.01, 200)
regression.stageWise(xArr, yArr, 0.001, 5000)

# 与最小二乘比较看下
xMat = mat(xArr)
yMat = mat(yArr).T
xMat = regression.regularize(xMat)
yM = mean(yMat, 0)
yMat = yMat - yM
weights = regression.standRegres(xMat, yMat.T)
weights.T  # 返回的W应该差不多的

# 获取谷歌的数据
reload(regression)
lgX = []
lgY = []
regression.setDataCollect(lgX, lgY)
dm, ls = regression.loadDataSet('abalone.txt')
ridgeWeights = regression.ridgeTest(dm, ls)
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(ridgeWeights)
plt.show()

# 前向逐步回归
import regression
from numpy import *
dm, ls = regression.loadDataSet('abalone.txt')
wMat = regression.stageWise(dm, ls, 0.01, 200)
wMat = regression.stageWise(dm, ls, 0.001, 5000)
xMat = mat(dm)
yMat = mat(ls).T
xMat = (xMat - mean(xMat, 0)) / var(xMat, 0)
yMat = yMat - mean(yMat, 0)
regression.standRegres(xMat, yMat.T).T

import regression
regression.setDataCollect()

import regression
from numpy import *
lgx, lgy = regression.loadLEGOData('out.txt')
# ws = regression.standRegres(lgx,lgy)
# lgx = mat(lgx)
# lgx[0] * ws
ws = regression.crossValidation(lgx, lgy, 10)
regression.ridgeTest(lgx, lgy)
#regression.stageWise(xArr, yArr, 0.01, 200)
#regression.stageWise(xArr, yArr, 0.001, 5000)

xMat, yMat = mat(xArr), mat(yArr).T
xMat = regression.regularize(xMat)
yM = mean(yMat, 0)
yMat = yMat - yM
weights = regression.standRegres(xMat, yMat.T)
print weights.T

weights = regression.stageWise(xArr, yArr, 0.005, 1000)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(weights)
plt.show()

'''
lgX, lgY = [], []
regression.setDataCollect(lgX, lgY)
print shape(lgX)
lgX1 = mat(ones((58, 5)))
lgX1[:, 1:5] = mat(lgX)
print lgX1[0]
ws = regression.standRegres(lgX1, lgY)
print ws
print lgX1[0] * ws
print lgX1[-1] * ws
print lgX1[43] * ws
regression.crossValidation(lgX, lgY, 10)
print regression.ridgeTest(lgX, lgY)
'''
# ax = fig.add_subplot(111)
# ax.plot(xSort[:,1], yHat[srtInd])
# ax.scatter(xMat[:,1].flatten().A[0], mat(yArr).T.flatten().A[0], s=2, c='red')
# plt.show()

#################### baoyu nianling #############################

abX, abY = regression.loadDataSet('abalone.txt')
# yHat01 = regression.lwlrTest(abX[0:99],abX[0:99],abY[0:99],0.1)
# yHat02 = regression.lwlrTest(abX[0:99],abX[0:99],abY[0:99],1)
# yHat03 = regression.lwlrTest(abX[0:99],abX[0:99],abY[0:99],10)

# print regression.rssError(abY[0:99], yHat01.T)
# print regression.rssError(abY[0:99], yHat02.T)
# print regression.rssError(abY[0:99], yHat03.T)

#print regression.ridgeRegres(abX, abY, 1)


# ridgeWeights = regression.ridgeTest(abX, abY)
# print ridgeWeights

# fig = plt.figure()
# ax = fig.add_subplot(111)
# ax.plot(ridgeWeights)
# plt.show()

###################### lego #######################################
lgX = []; lgY =[]
regression.setDataCollect(lgX, lgY)
print lgX,lgY 
Exemple #5
0
# -*- coding: utf-8 -*-
import regression
from numpy import *
lgX = []
lgY = []
print regression.setDataCollect(lgX, lgY)
xArr, yArr = regression.loadDataSet('abalone.txt')
#regression.stageWise(xArr, yArr, 0.01, 200)
#regression.stageWise(xArr, yArr, 0.001, 5000)

xMat, yMat = mat(xArr), mat(yArr).T
xMat = regression.regularize(xMat)
yM = mean(yMat, 0)
yMat = yMat - yM
weights = regression.standRegres(xMat, yMat.T)
print weights.T

weights = regression.stageWise(xArr, yArr, 0.005, 1000)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(weights)
plt.show()
'''
lgX, lgY = [], []
regression.setDataCollect(lgX, lgY)
print shape(lgX)
lgX1 = mat(ones((58, 5)))
lgX1[:, 1:5] = mat(lgX)
print lgX1[0]
ws = regression.standRegres(lgX1, lgY)
print ws
print lgX1[0] * ws
print lgX1[-1] * ws
print lgX1[43] * ws
regression.crossValidation(lgX, lgY, 10)
print regression.ridgeTest(lgX, lgY)
'''