Пример #1
0
import operator
import Untils
import matplotlib.pyplot as plt 

# BP神经网络

# 数据集: 列1:截距 1;列2:x坐标; 列3:y坐标
dataMat,classLabels = Untils.loadDataSet("student.txt")
dataMat = mat(dataMat)
classMat= mat(classLabels)

# 数据归一化
dataMat = Untils.normalize(dataMat)

# 绘制数据集坐标散点图
Untils.drawClassScatter(dataMat,classLabels,False)
		
# m行数 n列数
m,n = shape(dataMat)
labelMat = classMat.transpose()
# 步长
alpha = 0.001
# 迭代次数
maxCycles = 500
#构成线性分割线 y=a*x+b: b:weights[0]; a:weights[1]/weights[2]
weights = ones((n,1))
# 计算回归系数 weights
for k in range(maxCycles):
	# 通过sigmoid函数返回结果,h是梯度计算的结果,是一个列向量
	# h = logRegres2.sigmoid(dataMatrix*weights)
	h = 1.0/(1+exp(-dataMat*weights))
Пример #2
0
import operator
import Untils
import BackPropgation
import matplotlib.pyplot as plt 

# 数据集
dataSet,classLabels = BackPropgation.loadDataSet("testSet2.txt") # 初始化时第1列为全1向量, studentTest.txt
dataSet = BackPropgation.normalize(mat(dataSet))

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据集散点图
Untils.drawClassScatter(dataMat,transpose(classLabels),False)

# BP神经网络进行数据分类
errRec,WEX,wex = BackPropgation.bpNet(dataSet,classLabels)

# 计算和绘制分类线
x,z = BackPropgation.BPClassfier(-3.0,3.0,WEX,wex)

Untils.classfyContour(x,x,z)

# 绘制误差曲线
X = linspace(0,2000,2000)
Y = log2(errRec)+1.0e-6
Untils.TrendLine(X,Y)
Пример #3
0
    DELTA = multiply(err,BackPropgation.dlogsig(HM,out))
    wDelta = W.T*DELTA
    delta = multiply(wDelta,BackPropgation.dlogsig(hp,tau))
    dWEX = DELTA*tauex.T
    dwex = delta*SampIn.T       
    # 更新权值
    if i == 0:  
        WEX = WEX + eta * dWEX
        wex = wex + eta * dwex
    else :    
        WEX = WEX + (1 - mc)*eta*dWEX + mc * dWEXOld
        wex = wex + (1 - mc)*eta*dwex + mc * dwexOld
 
    dWEXOld = dWEX
    dwexOld = dwex 
    W  = WEX[:,0:nHidden]

# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据点
Untils.drawClassScatter(dataMat,transpose(expected))

# 绘制分类线


# 绘制误差曲线
X = linspace(0,1000,1000)
Untils.TrendLine(X,errRec)
Пример #4
0
    DELTA = multiply(err,BackPropgation.dlogsig(HM,out))
    wDelta = W.T*DELTA
    delta = multiply(wDelta,BackPropgation.dlogsig(hp,tau))
    dWEX = DELTA*tauex.T
    dwex = delta*SampIn.T       
    # 更新权值
    if i == 0:  
        WEX = WEX + eta * dWEX
        wex = wex + eta * dwex
    else :    
        WEX = WEX + (1 - mc)*eta*dWEX + mc * dWEXOld
        wex = wex + (1 - mc)*eta*dwex + mc * dwexOld
 
    dWEXOld = dWEX
    dwexOld = dwex 
    W  = WEX[:,0:nHidden]

# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据点
Untils.drawClassScatter(dataMat,transpose(expected))

# 绘制分类线


# 绘制误差曲线
X = linspace(0,1000,1000)
Untils.TrendLine(X,errRec)
Пример #5
0
import operator
import Untils
import BackPropgation
import matplotlib.pyplot as plt 

# 数据集
dataSet,classLabels = BackPropgation.loadDataSet("testSet2.txt") # 初始化时第1列为全1向量, studentTest.txt
dataSet = BackPropgation.normalize(mat(dataSet))

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据集散点图
Untils.drawClassScatter(dataMat,transpose(classLabels),False)

# BP神经网络进行数据分类
errRec,WEX,wex = BackPropgation.bpNet(dataSet,classLabels)

# 计算和绘制分类线
x,z = BackPropgation.BPClassfier(-3.0,3.0,WEX,wex)

Untils.classfyContour(x,x,z)

# 绘制误差曲线
X = linspace(0,2000,2000)
Y = log2(errRec)+1.0e-6
Untils.TrendLine(X,Y)
import operator
import Untils
import matplotlib.pyplot as plt 

# BP神经网络

# 数据集: 列1:截距 1;列2:x坐标; 列3:y坐标
dataMat,classLabels = Untils.loadDataSet("student.txt")
dataMat = mat(dataMat)
classMat= mat(classLabels)

# 数据归一化
dataMat = Untils.normalize(dataMat)

# 绘制数据集坐标散点图
Untils.drawClassScatter(dataMat,classLabels,False)
		
# m行数 n列数
m,n = shape(dataMat)
labelMat = classMat.transpose()
# 步长
alpha = 0.001
# 迭代次数
maxCycles = 500
#构成线性分割线 y=a*x+b: b:weights[0]; a:weights[1]/weights[2]
weights = ones((n,1))
# 计算回归系数 weights
for k in range(maxCycles):
	# 通过sigmoid函数返回结果,h是梯度计算的结果,是一个列向量
	# h = logRegres2.sigmoid(dataMatrix*weights)
	h = 1.0/(1+exp(-dataMat*weights))