Esempio n. 1
0
svm = SVMUnit()
trainer = SVMTrainer(svm, trndata)

# train the with fixed meta-parameters
log2C = 0.  # degree of slack
log2g = 1.1  # width of RBF kernels
trainer.train(log2C=log2C, log2g=log2g)
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )

# pass data sets through the SVM to get performance
trnresult = percentError(svm.activateOnDataset(trndata), trndata['target'])
tstresult = percentError(svm.activateOnDataset(tstdata), tstdata['target'])
print "sigma: %7g,  C: %7g,  train error: %5.2f%%,  test error: %5.2f%%" % (
    2.0**log2g, 2.0**log2C, trnresult, tstresult)

# generate a grid dataset
griddat, X, Y = generateGridData(x=[-4, 8, 0.1], y=[-2, 3, 0.1])

# pass the grid through the SVM, but this time get the raw distance
# from the boundary, not the class
Z = svm.activateOnDataset(griddat, values=True)

# the output format is a bit weird... make it into a decent array
Z = p.array([z.values()[0] for z in Z]).reshape(X.shape)

# make a 2d plot of training data with an decision value contour overlay
fig = p.figure()
plotData(trndata)
p.contourf(X, Y, Z)
p.show()
Esempio n. 2
0
    tstresult = percentError( trainer.testOnClassData(
           dataset=tstdata ), tstdata['class'] )

    # print the result
    print "epoch: %4d" % trainer.totalepochs, \
          "  train error: %5.2f%%" % trnresult, \
          "  test error: %5.2f%%" % tstresult

    # run our grid data through the FNN, get the most likely class
    # and shape it into an array
    out = fnn.activateOnDataset(griddata)
    out = out.argmax(axis=1)
    out = out.reshape(X.shape)

    # plot the test data and the underlying grid as a filled contour
    figure(1)
    ioff()  # interactive graphics off
    clf()
    # plot the datapoints
    plotData(tstdata)
    # overlay a contour plot of the functional margin
    if out.max()!=out.min():
        CS = contourf(X, Y, out)
    ion()   # interactive graphics on
    draw()  # update the plot

# show the plot until user kills it
ioff()
show()

Esempio n. 3
0
    trnresult = percentError(trainer.testOnClassData(), ds['class'])
    # print("Epochs:", trainer.totalepochs)
    #print("Percent error on training data:", trnresult)
    sys.stdout.write(str(trnresult) + "%\n")  # same as print
    sys.stdout.flush()

# Plot results (based on example_fnn.py from PyBrain examples)
griddata, X, Y = generateGridData([-2., 2.5, 0.2], [-2., 2.5, 0.2])
out = net.activateOnDataset(griddata)
out = out.argmax(axis=1)
out = out.reshape(X.shape)

figure(1)
ioff()  # interactive graphics off
clf()
plotData(ds)
if out.max() != out.min():
    CS = contourf(X, Y, out)
ion()  # interactive graphics on
draw()  # update the plot

ioff()
show()

# errorVals = trainer.trainUntilConvergence() # Stores errors as trains
print("Done training.")

# for inpt, target in ds:
# # inpt is an index in the 2D inputs
# # target is an index in the 1D outputs
# print(inpt,target,net.activate(inpt))