Пример #1
0
# neural networks work better if classes are encoded using
# one output neuron per class
trndata._convertToOneOfMany( bounds=[0,1] )

# same for the independent test data set
tstdata = generateClassificationData(100)
tstdata._convertToOneOfMany( bounds=[0,1] )

# build a feed-forward network with 20 hidden units, plus
# a corresponding trainer
fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)

# generate a grid of data points for visualization
griddata, X, Y = generateGridData([-3.,6.,0.2],[-3.,6.,0.2])

# repeat 20 times
for i in range(20):
    # train the network for 1 epoch
    trainer.trainEpochs( 1 )

    # evaluate the result on the training and test data
    trnresult = percentError( trainer.testOnClassData(),
                              trndata['class'] )
    tstresult = percentError( trainer.testOnClassData(
           dataset=tstdata ), tstdata['class'] )

    # print the result
    print("epoch: %4d" % trainer.totalepochs, \
          "  train error: %5.2f%%" % trnresult, \
Пример #2
0
# neural networks work better if classes are encoded using
# one output neuron per class
trndata._convertToOneOfMany( bounds=[0,1] )

# same for the independent test data set
tstdata = generateClassificationData(100)
tstdata._convertToOneOfMany( bounds=[0,1] )

# build a feed-forward network with 20 hidden units, plus
# a corresponding trainer
fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)

# generate a grid of data points for visualization
griddata, X, Y = generateGridData([-3.,6.,0.2],[-3.,6.,0.2])

# repeat 20 times
for i in range(20):
    # train the network for 1 epoch
    trainer.trainEpochs( 1 )

    # evaluate the result on the training and test data
    trnresult = percentError( trainer.testOnClassData(),
                              trndata['class'] )
    tstresult = percentError( trainer.testOnClassData(
           dataset=tstdata ), tstdata['class'] )

    # print the result
    print "epoch: %4d" % trainer.totalepochs, \
          "  train error: %5.2f%%" % trnresult, \
Пример #3
0
svm = SVMUnit()
trainer = SVMTrainer(svm, trndata)

# train the with fixed meta-parameters
log2C = 0.  # degree of slack
log2g = 1.1  # width of RBF kernels
trainer.train(log2C=log2C, log2g=log2g)
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )

# pass data sets through the SVM to get performance
trnresult = percentError(svm.activateOnDataset(trndata), trndata['target'])
tstresult = percentError(svm.activateOnDataset(tstdata), tstdata['target'])
print "sigma: %7g,  C: %7g,  train error: %5.2f%%,  test error: %5.2f%%" % (
    2.0**log2g, 2.0**log2C, trnresult, tstresult)

# generate a grid dataset
griddat, X, Y = generateGridData(x=[-4, 8, 0.1], y=[-2, 3, 0.1])

# pass the grid through the SVM, but this time get the raw distance
# from the boundary, not the class
Z = svm.activateOnDataset(griddat, values=True)

# the output format is a bit weird... make it into a decent array
Z = p.array([z.values()[0] for z in Z]).reshape(X.shape)

# make a 2d plot of training data with an decision value contour overlay
fig = p.figure()
plotData(trndata)
p.contourf(X, Y, Z)
p.show()
Пример #4
0
log2g = 1.1  # width of RBF kernels
trainer.train(log2C=log2C, log2g=log2g)
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )

# pass data sets through the SVM to get performance
trnresult = percentError(svm.activateOnDataset(trndata), trndata["target"])
tstresult = percentError(svm.activateOnDataset(tstdata), tstdata["target"])
print "sigma: %7g,  C: %7g,  train error: %5.2f%%,  test error: %5.2f%%" % (
    2.0 ** log2g,
    2.0 ** log2C,
    trnresult,
    tstresult,
)

# generate a grid dataset
griddat, X, Y = generateGridData(x=[-4, 8, 0.1], y=[-2, 3, 0.1])

# pass the grid through the SVM, but this time get the raw distance
# from the boundary, not the class
Z = svm.activateOnDataset(griddat, values=True)

# the output format is a bit weird... make it into a decent array
Z = p.array([z.values()[0] for z in Z]).reshape(X.shape)

# make a 2d plot of training data with an decision value contour overlay
fig = p.figure()
plotData(trndata)
p.contourf(X, Y, Z)
p.show()
Пример #5
0
# Plot output
#----------
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.pyplot as plt
import numpy as np

fig = plt.figure()
ax = fig.gca(projection='3d')
X = np.arange(-2, 2.5, 0.1)
Y = np.arange(-2, 2.5, 0.1)
X, Y = np.meshgrid(X, Y)

# Need to get activation here
griddata, X, Y = generateGridData([-2.,2.5,0.2],[-2.,2.5,0.2])
out = net.activateOnDataset(griddata)
out = out.reshape(X.shape)

# R = np.sqrt(X**2 + Y**2)
# Z = np.sin(R)

surf = ax.plot_surface(X, Y, out, rstride=1, cstride=1, cmap=cm.coolwarm,
                       linewidth=0, antialiased=False)
ax.set_zlim(-1.01, 1.01)

ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))

fig.colorbar(surf, shrink=0.5, aspect=5)
Пример #6
0
# Plot output
#----------
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.pyplot as plt
import numpy as np

fig = plt.figure()
ax = fig.gca(projection='3d')
X = np.arange(-2, 2.5, 0.1)
Y = np.arange(-2, 2.5, 0.1)
X, Y = np.meshgrid(X, Y)

# Need to get activation here
griddata, X, Y = generateGridData([-2., 2.5, 0.2], [-2., 2.5, 0.2])
out = net.activateOnDataset(griddata)
out = out.reshape(X.shape)

# R = np.sqrt(X**2 + Y**2)
# Z = np.sin(R)

surf = ax.plot_surface(X,
                       Y,
                       out,
                       rstride=1,
                       cstride=1,
                       cmap=cm.coolwarm,
                       linewidth=0,
                       antialiased=False)
ax.set_zlim(-1.01, 1.01)