示例#1
0
def bpNet(dataSet, classLabels):
    SampIn = mat(dataSet).T
    expected = mat(classLabels)
    m, n = shape(dataSet)
    eb = 0.01
    eta = 0.05
    mc = 0.3
    maxiter = 2000
    errlist = []

    nSampleNum = m
    nSampleDim = n - 1
    nHidden = 4
    nOut = 1

    hi_w = 2.0 * (random.rand(nHidden, nSampleDim) - 0.5)
    hi_b = 2.0 * (random.rand(nHidden, 1) - 0.5)
    hi_wb = mat(Untils.mergMatrix(mat(hi_w), mat(hi_b)))

    out_w = 2.0 * (random.rand(nOut, nHidden) - 0.5)
    out_b = 2.0 * (random.rand(nOut, 1) - 0.5)
    out_wb = mat(Untils.mergMatrix(mat(out_w), mat(out_b)))

    dout_wbOld = 0.0
    dhi_wbOld = 0.0

    for i in xrange(maxiter):
        hi_input = hi_wb * SampIn
        hi_output = logistic(hi_input)
        hi2out = Untils.mergeMatrix(hi_output.T, ones((nSampleNum, 1))).T

        out_input = out_wb * hi2out
        out_output = logistic(out_input)

        err = expected - out_output
        sse = errorfunc(err)
        errlist.append(sse)

        if sse <= eb:
            print "iteration:", i + 1
            break
        DELTA = multiply(err, dlogit(out_input, out_output))
        wDelta = out_wb[:, :-1].T * DELTA

        delta = multiply(wDelta, dlogit(hi_input, hi_output))
        dout_wb = DELTA * hi2out.T

        dhi_wb = delta * SampIn.T

        if i == 0:
            out_wb = out_wb + eta * dout_wb
            hi_wb = hi_wb + eta * dhi_wb
        else:
            out_wb = out_wb + (1.0 - mc) * eta * dout_wb + mc * dout_wbOld
            hi_wb = hi_wb + (1.0 - mc) * eta * dhi_wb + mc * dhi_wbOld
            dout_wbOld = dout_wb
            dhi_wbOld = dhi_wb
        return errlist, out_wb, hi_wb
示例#2
0
# -*- coding:utf-8 -*-
# Filename : testBoltzmann01.py

import operator
import copy
import Untils
import Boltzmann
from numpy import *
import matplotlib.pyplot as plt 

dataSet = Untils.loadDataSet("cities.txt")
cityPosition = mat(dataSet)
m,n = shape(cityPosition)
pn = m
# 将城市的坐标矩阵转换为邻接矩阵(城市间距离矩阵)
dist = Boltzmann.distM(cityPosition,cityPosition.transpose())

# 初始化
MAX_ITER = 2000 # 1000-2000
MAX_M = m;
Lambda = 0.97;
T0 = 1000; # 100-1000
# 构造一个初始可行解
x0 = arange(m)
random.shuffle(x0)
# 
T = T0;
iteration = 0;
x = x0;                   # 路径变量
xx = x0.tolist();         # 每个路径
di = []
示例#3
0
def bpNet(dataSet, classLabels):
    # 数据集矩阵化
    SampIn = mat(dataSet).T
    expected = mat(classLabels)
    m, n = shape(dataSet)
    # 网络参数
    eb = 0.01  # 误差容限
    eta = 0.05  # 学习率
    mc = 0.3  # 动量因子
    maxiter = 2000  # 最大迭代次数
    errlist = []  # 误差列表

    # 构造网络
    # 初始化网络
    nSampNum = m
    # 样本数量
    nSampDim = n - 1
    # 样本维度
    nHidden = 4
    # 隐含层神经元
    nOut = 1
    # 输出层

    # 隐含层参数
    hi_w = 2.0 * (random.rand(nHidden, nSampDim) - 0.5)
    hi_b = 2.0 * (random.rand(nHidden, 1) - 0.5)
    hi_wb = mat(Untils.mergMatrix(mat(hi_w), mat(hi_b)))

    # 输出层参数
    out_w = 2.0 * (random.rand(nOut, nHidden) - 0.5)
    out_b = 2.0 * (random.rand(nOut, 1) - 0.5)
    out_wb = mat(Untils.mergMatrix(mat(out_w), mat(out_b)))
    # 默认旧权值
    dout_wbOld = 0.0
    dhi_wbOld = 0.0

    for i in xrange(maxiter):
        #1. 工作信号正向传播

        #1.1 输入层到隐含层
        hi_input = hi_wb * SampIn  #  hi_wb 4,n SampIn n,m
        hi_output = logistic(hi_input)
        print "hi_output.T.shape", hi_output.T.shape
        hi2out = Untils.mergMatrix(hi_output.T, ones((nSampNum, 1))).T

        #1.2 隐含层到输出层
        out_input = out_wb * hi2out
        out_output = logistic(out_input)

        #2. 误差计算
        err = expected - out_output
        sse = errorfunc(err)
        errlist.append(sse)
        #2.1 判断是否收敛
        if sse <= eb:
            print "iteration:", i + 1
            break

        #3.误差信号反向传播
        #3.1 DELTA为输出层到隐含层梯度
        DELTA = multiply(err, dlogit(out_input, out_output))
        wDelta = out_wb[:, :-1].T * DELTA
        print "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
        print "err.shape", err.shape
        print "dlogit(out_input,out_output).shape", dlogit(
            out_input, out_output).shape
        print "out_wb[:,:-1].T.shape", out_wb[:, :-1].T.shape

        #3.2 delta为隐含层到输入层梯度
        delta = multiply(wDelta, dlogit(hi_input, hi_output))
        dout_wb = DELTA * hi2out.T

        print "DELTA.shape", DELTA.shape
        print "hi2out.T.shape", hi2out.T.shape
        #3.3 输入层的权值更新
        dhi_wb = delta * SampIn.T

        #3.4 更新输出层和隐含层权值
        if i == 0:
            out_wb = out_wb + eta * dout_wb
            hi_wb = hi_wb + eta * dhi_wb
        else:
            out_wb = out_wb + (1.0 - mc) * eta * dout_wb + mc * dout_wbOld
            hi_wb = hi_wb + (1.0 - mc) * eta * dhi_wb + mc * dhi_wbOld
        dout_wbOld = dout_wb
        dhi_wbOld = dhi_wb
    return errlist, out_wb, hi_wb
def bpNet(dataSet,classLabels):
    # 数据集矩阵化
    SampIn = mat(dataSet).T
    expected = mat(classLabels)
    [m,n] = shape(dataSet) 
    # 网络参数
    eb = 0.01                   # 误差容限 
    eta = 0.05                   # 学习率 
    mc = 0.2                    # 动量因子 
    maxiter = 2000              # 最大迭代次数 
    errRec = []                 # 误差
    # 构造网络
    
    # 初始化网络
    nSampNum = m;  # 样本数量
    nSampDim = n-1;  # 样本维度
    nHidden = 4;   # 隐含层神经元 
    nOut = 1;      # 输出层

    # 输入层参数
    
    # 隐含层参数
    # net_Hidden * 3 一行代表一个隐含层节点
    w = 2.0*(random.rand(nHidden,nSampDim)-1.0/2.0)  
    b = 2.0*(random.rand(nHidden,1)-1.0/2.0) 
    wex = mat(Untils.mergMatrix(mat(w),mat(b)))
    
    # 输出层参数
    W = 2.0*(random.rand(nOut,nHidden)-1.0/2.0) 
    B = 2.0*(random.rand(nOut,1)-1.0/2.0) 
    WEX = mat(Untils.mergMatrix(mat(W),mat(B)))
    
    dWEXOld = 0.0 ; dwexOld = 0.0 
    # 训练
    iteration = 0.0;  
    for i in range(maxiter):   
        # 1. 工作信号正向传播
        hp = wex*SampIn
        tau = logistic(hp)
        tauex  = Untils.mergMatrix(tau.T, ones((nSampNum,1))).T
    
        HM = WEX*tauex
        out = logistic(HM)    
        err = expected - out 
        sse = sumsqr(err) 
        errRec.append(sse); 
        # 判断是否收敛
        iteration = iteration + 1    
        if sse <= eb:
            print "iteration:",i    
            break;
         
        # 2.误差信号反向传播
        # DELTA和delta为局部梯度  
        DELTA = multiply(err,dlogit(HM,out))
        wDelta = W.T*DELTA
        delta = multiply(wDelta,dlogit(hp,tau))
        dWEX = DELTA*tauex.T 
        dwex = delta*SampIn.T        
        
        # 3.更新权值
        if i == 0:  
            WEX = WEX + eta * dWEX
            wex = wex + eta * dwex
        else :    
            WEX = WEX + (1.0 - mc)*eta*dWEX + mc * dWEXOld
            wex = wex + (1.0 - mc)*eta*dwex + mc * dwexOld
     
        dWEXOld = dWEX
        dwexOld = dwex 
        W  = WEX[:,0:nHidden]
    return errRec,WEX,wex 
示例#5
0
# -*- coding: utf-8 -*-
# Filename : testKohonen.py

import numpy as np 
import operator
import Untils
import Kohonen
from numpy import *
import matplotlib.pyplot as plt 

# 加载坐标数据文件
dataSet = Untils.loadDataSet("dataset.txt");
dataMat = mat(dataSet)
dm,dn = shape(dataMat)
# 归一化数据
normDataset = Kohonen.mapMinMax(dataMat)
# 参数
# 学习率
rate1max=0.8  #0.8
rate1min=0.05;
# 学习半径
r1max=3;
r1min=0.8 #0.8

## 网络构建
Inum=2;
M=2;
N=2;
K=M*N;          #Kohonen总节点数  
 
# Kohonen层节点排序
mc = 0.8                    # 动量因子 
maxiter = 1000              # 最大迭代次数 

# 构造网络

# 初始化网络
nSampNum = m;  # 样本数量
nSampDim = 2;  # 样本维度
nHidden = 3;   # 隐含层神经元 
nOut = 1;      # 输出层

# 隐含层参数
# net_Hidden * 3 一行代表一个隐含层节点
w = 2*(random.rand(nHidden,nSampDim)-1/2)  
b = 2*(random.rand(nHidden,1)-1/2) 
wex = mat(Untils.mergMatrix(mat(w),mat(b)))

# 输出层参数
W = 2*(random.rand(nOut,nHidden)-1/2) 
B = 2*(random.rand(nOut,1)-1/2) 
WEX = mat(Untils.mergMatrix(mat(W),mat(B)))

dWEXOld = [] ; dwexOld = [] # 初始化权值中间变量
# 训练
iteration = 0;  
# 初始化误差变量
errRec = [];

for i in range(maxiter):   
    # 工作信号正向传播
    hp = wex*SampIn
示例#7
0
文件: 03BPTest.py 项目: wenbo/MLBook
import BackPropgation
import matplotlib.pyplot as plt 

# 数据集
dataSet = [[0,0,1],[0,1,1],[1,0,1],[1,1,1]]
classLabels = [0,1,1,0]
expected = mat(classLabels)

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据集散点图
Untils.drawClassScatter(dataMat,transpose(expected),False)

# BP神经网络进行数据分类
errRec,WEX,wex = BackPropgation.bpNet(dataSet,classLabels)

print errRec,WEX,wex

# 计算和绘制分类线
x = linspace(-0.2,1.2,30)
xx = mat(ones((30,30)))
xx[:,0:30] = x 
yy = xx.T
z = ones((len(xx),len(yy))) ;
for i in range(len(xx)):
   for j in range(len(yy)):
       xi = []; tauex=[] ; tautemp=[]
示例#8
0
# -*- coding: utf-8 -*-
# Filename : dataSet.py

import numpy as np
import operator
import Untils
import Kohonen
from numpy import *
import matplotlib.pyplot as plt

# 加载坐标数据文件
dataSet = Untils.loadDataSet("dataset.txt")
dataMat = mat(dataSet)
# print dataMat
normDataset = Kohonen.mapMinMax(dataMat)
# print normDataset

# 生成int随机数,不包含高值
# print random.randint(0,30)

# 计算向量中最小值的索引值
xx = mat([1, 9])
w1 = mat([[1, 2, 3, 4], [5, 6, 7, 8]])
minIndx = Kohonen.distM(xx, w1).argmin()

# 计算距离
jdpx = mat([[0, 0], [0, 1], [1, 0], [1, 1]])
d1 = ceil(minIndx / 4)
d2 = mod(minIndx, 4)
mydist = Kohonen.distM(mat([d1, d2]), jdpx.transpose())
# print mydist
示例#9
0
# 构造网络

# 初始化网络
nSampNum = 4;  # 样本数量
nSampDim = 2;  # 样本维度
nHidden = 3;   # 隐含层神经元 
nOut = 1;      # 输出层

# 输入层参数

# 隐含层参数
# net_Hidden * 3 一行代表一个隐含层节点
w = 2*(random.rand(nHidden,nSampDim)-1/2)  
b = 2*(random.rand(nHidden,1)-1/2) 
wex = mat(Untils.mergMatrix(mat(w),mat(b)))

# 输出层参数
W = 2*(random.rand(nOut,nHidden)-1/2) 
B = 2*(random.rand(nOut,1)-1/2) 
WEX = mat(Untils.mergMatrix(mat(W),mat(B)))

dWEXOld = 0 ; dwexOld = 0 
# 训练
iteration = 0;  
errRec = [];
for i in range(maxiter):   
    # 工作信号正向传播
    hp = wex*SampIn
    tau = BackPropgation.logsig(hp)
    tauex  = Untils.mergMatrix(tau.T, ones((nSampNum,1))).T
示例#10
0
文件: 03BP_XOR.py 项目: wenbo/MLBook
# 构造网络

# 初始化网络
nSampNum = 4;  # 样本数量
nSampDim = 2;  # 样本维度
nHidden = 3;   # 隐含层神经元 
nOut = 1;      # 输出层

# 输入层参数

# 隐含层参数
# net_Hidden * 3 一行代表一个隐含层节点
w = 2*(random.rand(nHidden,nSampDim)-1/2)  
b = 2*(random.rand(nHidden,1)-1/2) 
wex = mat(Untils.mergMatrix(mat(w),mat(b)))

# 输出层参数
W = 2*(random.rand(nOut,nHidden)-1/2) 
B = 2*(random.rand(nOut,1)-1/2) 
WEX = mat(Untils.mergMatrix(mat(W),mat(B)))

dWEXOld = 0 ; dwexOld = 0 
# 训练
iteration = 0;  
errRec = [];
for i in range(maxiter):   
    # 工作信号正向传播
    hp = wex*SampIn
    tau = BackPropgation.logsig(hp)
    tauex  = Untils.mergMatrix(tau.T, ones((nSampNum,1))).T
# -*- coding:utf-8 -*-
# Filename : testBoltzmann01.py

import operator
import copy
import Untils
import Boltzmann
from numpy import *
import matplotlib.pyplot as plt

dataSet = Untils.loadDataSet("cities.txt")
cityPosition = mat(dataSet)
m, n = shape(cityPosition)
pn = m
# 将城市的坐标矩阵转换为邻接矩阵(城市间距离矩阵)
dist = Boltzmann.distM(cityPosition, cityPosition.transpose())

# 初始化
MAX_ITER = 2000  # 1000-2000
MAX_M = m
Lambda = 0.97
T0 = 1000
# 100-1000
# 构造一个初始可行解
x0 = arange(m)
random.shuffle(x0)
#
T = T0
iteration = 0
x = x0
# 路径变量
示例#12
0
# -*- coding: GBK -*-
# Filename : 01dataSet.py

import numpy as np 
import operator
import Untils
import BackPropgation
from numpy import *
import matplotlib.pyplot as plt 

dataMat,classLabels = Untils.loadDataSet("student.txt")

# 绘制图形:二维散点,无分类
# Untils.drawScatter(dataMat)

# 绘制图形:二维散点,有分类,适合训练集
# Untils.drawClassScatter(mat(dataMat),classLabels)

# 合并两个多维的matrix,并返回合并后的Matrix
# 输入参数有先后顺序
# [m,n]=shape(dataMat)
# classMat = transpose(mat(classLabels))
# matMerge = Untils.mergMatrix(mat(dataMat),classMat)

# 元素乘法
# a = mat([1,1,1]) ;b = mat([2,2,2])
# print multiply(a,b)

# 测试BackPropgation.dlogsig(hp,tau)
# A = mat([0,1,2]);
# print "A*(1-A)",multiply(A,(1-A))
示例#13
0
文件: 04BPTest.py 项目: wenbo/MLBook
import operator
import Untils
import BackPropgation
import matplotlib.pyplot as plt 

# 数据集
dataSet,classLabels = BackPropgation.loadDataSet("testSet2.txt") # 初始化时第1列为全1向量, studentTest.txt
dataSet = BackPropgation.normalize(mat(dataSet))

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据集散点图
Untils.drawClassScatter(dataMat,transpose(classLabels),False)

# BP神经网络进行数据分类
errRec,WEX,wex = BackPropgation.bpNet(dataSet,classLabels)

# 计算和绘制分类线
x,z = BackPropgation.BPClassfier(-3.0,3.0,WEX,wex)

Untils.classfyContour(x,x,z)

# 绘制误差曲线
X = linspace(0,2000,2000)
Y = log2(errRec)+1.0e-6
Untils.TrendLine(X,Y)
# -*- coding: GBK -*-
# Filename :gradDecent.py

from numpy import *
import operator
import Untils
import matplotlib.pyplot as plt 

# BP神经网络

# 数据集: 列1:截距 1;列2:x坐标; 列3:y坐标
dataMat,classLabels = Untils.loadDataSet("student.txt")
dataMat = mat(dataMat)
classMat= mat(classLabels)

# 数据归一化
dataMat = Untils.normalize(dataMat)

# 绘制数据集坐标散点图
Untils.drawClassScatter(dataMat,classLabels,False)
		
# m行数 n列数
m,n = shape(dataMat)
labelMat = classMat.transpose()
# 步长
alpha = 0.001
# 迭代次数
maxCycles = 500
#构成线性分割线 y=a*x+b: b:weights[0]; a:weights[1]/weights[2]
weights = ones((n,1))
# 计算回归系数 weights
示例#15
0
# -*- coding:utf-8 -*-
# Filename : testBoltzmann01.py

import operator
import copy
import Untils
import Boltzmann
from numpy import *
import matplotlib.pyplot as plt

dataSet = Untils.loadDataSet("dataSet25.txt")
cityPosition = mat(dataSet)
m, n = shape(cityPosition)
bestx, di = Boltzmann.boltzmann(cityPosition, MAX_ITER=1000, T0=100)

# 优化前城市图,路径图
Untils.drawScatter(cityPosition, flag=False)
Untils.drawPath(list(range(m)), cityPosition)

# 显示优化后城市图,路径图
Untils.drawScatter(cityPosition, flag=False)
Untils.drawPath(bestx, cityPosition, color='b')

# 绘制误差趋势线
x0 = list(range(len(di)))
Untils.TrendLine(x0, di)
示例#16
0
# -*- coding: GBK -*-
# Filename :gradDecent.py

from numpy import *
import operator
import Untils
import matplotlib.pyplot as plt 

# BP神经网络

# 数据集: 列1:截距 1;列2:x坐标; 列3:y坐标
dataMat,classLabels = Untils.loadDataSet("student.txt")
dataMat = mat(dataMat)
classMat= mat(classLabels)

# 数据归一化
dataMat = Untils.normalize(dataMat)

# 绘制数据集坐标散点图
Untils.drawClassScatter(dataMat,classLabels,False)
		
# m行数 n列数
m,n = shape(dataMat)
labelMat = classMat.transpose()
# 步长
alpha = 0.001
# 迭代次数
maxCycles = 500
#构成线性分割线 y=a*x+b: b:weights[0]; a:weights[1]/weights[2]
weights = ones((n,1))
# 计算回归系数 weights
示例#17
0
import operator
import Untils
import BackPropgation
import matplotlib.pyplot as plt 

# 数据集
dataSet,classLabels = BackPropgation.loadDataSet("testSet2.txt") # 初始化时第1列为全1向量, studentTest.txt
dataSet = BackPropgation.normalize(mat(dataSet))

# 绘制数据点
# 重构dataSet数据集
dataMat = mat(ones((shape(dataSet)[0],shape(dataSet)[1])))
dataMat[:,1] = mat(dataSet)[:,0]
dataMat[:,2] = mat(dataSet)[:,1]	

# 绘制数据集散点图
Untils.drawClassScatter(dataMat,transpose(classLabels),False)

# BP神经网络进行数据分类
errRec,WEX,wex = BackPropgation.bpNet(dataSet,classLabels)

# 计算和绘制分类线
x,z = BackPropgation.BPClassfier(-3.0,3.0,WEX,wex)

Untils.classfyContour(x,x,z)

# 绘制误差曲线
X = linspace(0,2000,2000)
Y = log2(errRec)+1.0e-6
Untils.TrendLine(X,Y)
示例#18
0
文件: 04BP_Dual.py 项目: wenbo/MLBook
mc = 0.8                    # 动量因子 
maxiter = 1000              # 最大迭代次数 

# 构造网络

# 初始化网络
nSampNum = m;  # 样本数量
nSampDim = 2;  # 样本维度
nHidden = 3;   # 隐含层神经元 
nOut = 1;      # 输出层

# 隐含层参数
# net_Hidden * 3 一行代表一个隐含层节点
w = 2*(random.rand(nHidden,nSampDim)-1/2)  
b = 2*(random.rand(nHidden,1)-1/2) 
wex = mat(Untils.mergMatrix(mat(w),mat(b)))

# 输出层参数
W = 2*(random.rand(nOut,nHidden)-1/2) 
B = 2*(random.rand(nOut,1)-1/2) 
WEX = mat(Untils.mergMatrix(mat(W),mat(B)))

dWEXOld = [] ; dwexOld = [] # 初始化权值中间变量
# 训练
iteration = 0;  
# 初始化误差变量
errRec = [];

for i in range(maxiter):   
    # 工作信号正向传播
    hp = wex*SampIn
示例#19
0
文件: 01dataSet.py 项目: wenbo/MLBook
# -*- coding: GBK -*-
# Filename : 01dataSet.py

import numpy as np 
import operator
import Untils
import BackPropgation
from numpy import *
import matplotlib.pyplot as plt 

dataMat,classLabels = Untils.loadDataSet("student.txt")

# 绘制图形:二维散点,无分类
# Untils.drawScatter(dataMat)

# 绘制图形:二维散点,有分类,适合训练集
# Untils.drawClassScatter(mat(dataMat),classLabels)

# 合并两个多维的matrix,并返回合并后的Matrix
# 输入参数有先后顺序
# [m,n]=shape(dataMat)
# classMat = transpose(mat(classLabels))
# matMerge = Untils.mergMatrix(mat(dataMat),classMat)

# 元素乘法
# a = mat([1,1,1]) ;b = mat([2,2,2])
# print multiply(a,b)

# 测试BackPropgation.dlogsig(hp,tau)
# A = mat([0,1,2]);
# print "A*(1-A)",multiply(A,(1-A))