Пример #1
0
# -*- coding: UTF-8 -*-
# Filename : 03BPTest.py

from numpy import *
import operator
import BackPropgation
import matplotlib.pyplot as plt

# 数据集
dataSet, classLabels = BackPropgation.loadDataSet(
    "testSet2.txt")  # 初始化时第1列为全1向量, studentTest.txt
dataSet = mat(dataSet)
m, n = shape(dataSet)
SampIn = dataSet.T
hi_wb = ones((m, 1))
hi_input = SampIn * hi_wb
print hi_input
Пример #2
0
# -*- coding: UTF-8 -*-
# Filename : 03BPTest.py

from numpy import *
import operator
import BackPropgation
import matplotlib.pyplot as plt

# 数据集
dataSet, classLabels = BackPropgation.loadDataSet(
    "testSet2.txt")  # 初始化时第1列为全1向量, studentTest.txt
dataSet = BackPropgation.normalize(mat(dataSet))

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0], shape(dataSet)[1])))
dataMat[:, 1] = mat(dataSet)[:, 0]
dataMat[:, 2] = mat(dataSet)[:, 1]

# 绘制数据集散点图
Untils.drawClassScatter(dataMat, transpose(classLabels), False)

# BP神经网络进行数据分类
errRec, WEX, wex = BackPropgation.bpNet(dataSet, classLabels)

# 计算和绘制分类线
x, z = BackPropgation.BPClassfier(-3.0, 3.0, WEX, wex)

Untils.classfyContour(x, x, z)

# 绘制误差曲线
Пример #3
0
# -*- coding: UTF-8 -*-
# Filename : 03BPTest.py

from numpy import *
import operator
import BackPropgation
import matplotlib.pyplot as plt 

# 数据集
dataSet,classLabels = BackPropgation.loadDataSet("testSet2.txt") # 初始化时第1列为全1向量, studentTest.txt
dataSet = mat(dataSet)
m,n=shape(dataSet) 
SampIn = dataSet.T
hi_wb = ones((m,1)) 
hi_input = SampIn*hi_wb
print hi_input
Пример #4
0
# -*- coding: GBK -*-
# Filename :gradDecent.py

from numpy import *
import operator
import Untils
import BackPropgation
import matplotlib.pyplot as plt 

# BP神经网络

# 数据集: 列1:截距 1列2:x坐标 列3:y坐标
dataMat,classLabels = BackPropgation.loadDataSet() # 初始化时第1列为全1向量
[m,n] = shape(dataMat) 
SampIn = mat(BackPropgation.normalize(mat(dataMat)).transpose())
expected = mat(classLabels)

# 网络参数
eb = 0.01                   # 误差容限 
eta = 0.6                   # 学习率 
mc = 0.8                    # 动量因子 
maxiter = 1000              # 最大迭代次数 

# 构造网络

# 初始化网络
nSampNum = m;  # 样本数量
nSampDim = 2;  # 样本维度
nHidden = 3;   # 隐含层神经元 
nOut = 1;      # 输出层
# -*- coding: GBK -*-
# Filename :gradDecent.py

from numpy import *
import operator
import Untils
import BackPropgation
import matplotlib.pyplot as plt 

# BP神经网络

# 数据集: 列1:截距 1列2:x坐标 列3:y坐标
dataMat,classLabels = BackPropgation.loadDataSet() # 初始化时第1列为全1向量
[m,n] = shape(dataMat) 
SampIn = mat(BackPropgation.normalize(mat(dataMat)).transpose())
expected = mat(classLabels)

# 网络参数
eb = 0.01                   # 误差容限 
eta = 0.6                   # 学习率 
mc = 0.8                    # 动量因子 
maxiter = 1000              # 最大迭代次数 

# 构造网络

# 初始化网络
nSampNum = m;  # 样本数量
nSampDim = 2;  # 样本维度
nHidden = 3;   # 隐含层神经元 
nOut = 1;      # 输出层
Пример #6
0
b = 2*(random.rand(nHidden,1)-1/2) 
wex = mat(Untils.mergMatrix(mat(w),mat(b)))

# 输出层参数
W = 2*(random.rand(nOut,nHidden)-1/2) 
B = 2*(random.rand(nOut,1)-1/2) 
WEX = mat(Untils.mergMatrix(mat(W),mat(B)))

dWEXOld = 0 ; dwexOld = 0 
# 训练
iteration = 0;  
errRec = [];
for i in range(maxiter):   
    # 工作信号正向传播
    hp = wex*SampIn
    tau = BackPropgation.logsig(hp)
    tauex  = Untils.mergMatrix(tau.T, ones((nSampNum,1))).T

    HM = WEX*tauex
    out = BackPropgation.logsig(HM)    
    err = expected - out 
    sse = BackPropgation.sumsqr(err) 
    errRec.append(sse); 
    # 判断是否收敛
    iteration = iteration + 1    
    if sse <= eb:
        print "iteration:",i    
        break;
     
    # 误差信号反向传播
    # DELTA和delta为局部梯度  
Пример #7
0
b = 2*(random.rand(nHidden,1)-1/2) 
wex = mat(Untils.mergMatrix(mat(w),mat(b)))

# 输出层参数
W = 2*(random.rand(nOut,nHidden)-1/2) 
B = 2*(random.rand(nOut,1)-1/2) 
WEX = mat(Untils.mergMatrix(mat(W),mat(B)))

dWEXOld = 0 ; dwexOld = 0 
# 训练
iteration = 0;  
errRec = [];
for i in range(maxiter):   
    # 工作信号正向传播
    hp = wex*SampIn
    tau = BackPropgation.logsig(hp)
    tauex  = Untils.mergMatrix(tau.T, ones((nSampNum,1))).T

    HM = WEX*tauex
    out = BackPropgation.logsig(HM)    
    err = expected - out 
    sse = BackPropgation.sumsqr(err) 
    errRec.append(sse); 
    # 判断是否收敛
    iteration = iteration + 1    
    if sse <= eb:
        print "iteration:",i    
        break;
     
    # 误差信号反向传播
    # DELTA和delta为局部梯度  
Пример #8
0
# 数据集
dataSet = [[0, 0, 1], [0, 1, 1], [1, 0, 1], [1, 1, 1]]
classLabels = [0, 1, 1, 0]
expected = mat(classLabels)

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0], shape(dataSet)[1])))
dataMat[:, 1] = mat(dataSet)[:, 0]
dataMat[:, 2] = mat(dataSet)[:, 1]

# 绘制数据集散点图
Untils.drawClassScatter(dataMat, transpose(expected), False)

# BP神经网络进行数据分类
errRec, WEX, wex = BackPropgation.bpNet(dataSet, classLabels)

print errRec, WEX, wex

# 计算和绘制分类线
x = linspace(-0.2, 1.2, 30)
xx = mat(ones((30, 30)))
xx[:, 0:30] = x
yy = xx.T
z = ones((len(xx), len(yy)))
for i in range(len(xx)):
    for j in range(len(yy)):
        xi = []
        tauex = []
        tautemp = []
        mat(xi.append([xx[i, j], yy[i, j], 1]))
Пример #9
0
# -*- coding: GBK -*-
# Filename : 03BPTest.py

from numpy import *
import operator
import Untils
import BackPropgation
import matplotlib.pyplot as plt 

# 数据集
dataSet,classLabels = BackPropgation.loadDataSet("testSet2.txt") # 初始化时第1列为全1向量, studentTest.txt
dataSet = BackPropgation.normalize(mat(dataSet))

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据集散点图
Untils.drawClassScatter(dataMat,transpose(classLabels),False)

# BP神经网络进行数据分类
errRec,WEX,wex = BackPropgation.bpNet(dataSet,classLabels)

# 计算和绘制分类线
x,z = BackPropgation.BPClassfier(-3.0,3.0,WEX,wex)

Untils.classfyContour(x,x,z)

# 绘制误差曲线
Пример #10
0
# 数据集
dataSet = [[0,0,1],[0,1,1],[1,0,1],[1,1,1]]
classLabels = [0,1,1,0]
expected = mat(classLabels)

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据集散点图
Untils.drawClassScatter(dataMat,transpose(expected),False)

# BP神经网络进行数据分类
errRec,WEX,wex = BackPropgation.bpNet(dataSet,classLabels)

print errRec,WEX,wex

# 计算和绘制分类线
x = linspace(-0.2,1.2,30)
xx = mat(ones((30,30)))
xx[:,0:30] = x 
yy = xx.T
z = ones((len(xx),len(yy))) ;
for i in range(len(xx)):
   for j in range(len(yy)):
       xi = []; tauex=[] ; tautemp=[]
       mat(xi.append([xx[i,j],yy[i,j],1])) 
       hp = wex*(mat(xi).T)
       tau = BackPropgation.logistic(hp)