예제 #1
0
#!/usr/bin/env python
# Example script for feed-forward network usage in PyBrain.
__author__ = "Martin Felder"
__version__ = '$Id$'

from pylab import figure, ioff, clf, contourf, ion, draw, show
from pybrain.utilities           import percentError
from pybrain.tools.shortcuts     import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules   import SoftmaxLayer

from datasets import generateGridData, generateClassificationData, plotData

# load the training data set
trndata = generateClassificationData(250)

# neural networks work better if classes are encoded using
# one output neuron per class
trndata._convertToOneOfMany( bounds=[0,1] )

# same for the independent test data set
tstdata = generateClassificationData(100)
tstdata._convertToOneOfMany( bounds=[0,1] )

# build a feed-forward network with 20 hidden units, plus
# a corresponding trainer
fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)

# generate a grid of data points for visualization
griddata, X, Y = generateGridData([-3.,6.,0.2],[-3.,6.,0.2])
예제 #2
0
from pybrain.datasets import ClassificationDataSet
from pybrain.utilities import percentError

from pybrain.structure.modules.svmunit import SVMUnit
from pybrain.supervised.trainers.svmtrainer import SVMTrainer

# import some local stuff
from datasets import generateClassificationData, plotData, generateGridData

logging.basicConfig(level=logging.INFO,
                    filename=join('.', 'testrun.log'),
                    format='%(asctime)s %(levelname)s %(message)s')
logging.getLogger('').addHandler(logging.StreamHandler())

# load the training and test data sets
trndata = generateClassificationData(20, nClasses=2)
tstdata = generateClassificationData(100, nClasses=2)

# initialize the SVM module and a corresponding trainer
svm = SVMUnit()
trainer = SVMTrainer(svm, trndata)

# train the with fixed meta-parameters
log2C = 0.  # degree of slack
log2g = 1.1  # width of RBF kernels
trainer.train(log2C=log2C, log2g=log2g)
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )

# pass data sets through the SVM to get performance
trnresult = percentError(svm.activateOnDataset(trndata), trndata['target'])
예제 #3
0
#!/usr/bin/env python
# Example script for feed-forward network usage in PyBrain.
__author__ = "Martin Felder"
__version__ = '$Id$'

from pylab import figure, ioff, clf, contourf, ion, draw, show
from pybrain.utilities           import percentError
from pybrain.tools.shortcuts     import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules   import SoftmaxLayer

from datasets import generateGridData, generateClassificationData, plotData

# load the training data set
trndata = generateClassificationData(250)

# neural networks work better if classes are encoded using
# one output neuron per class
trndata._convertToOneOfMany( bounds=[0,1] )

# same for the independent test data set
tstdata = generateClassificationData(100)
tstdata._convertToOneOfMany( bounds=[0,1] )

# build a feed-forward network with 20 hidden units, plus
# a corresponding trainer
fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)

# generate a grid of data points for visualization
griddata, X, Y = generateGridData([-3.,6.,0.2],[-3.,6.,0.2])
예제 #4
0
from pybrain.utilities import percentError

from pybrain.structure.modules.svmunit import SVMUnit
from pybrain.supervised.trainers.svmtrainer import SVMTrainer

# import some local stuff
from datasets import generateClassificationData, plotData, generateGridData

logging.basicConfig(
    level=logging.INFO, filename=join(".", "testrun.log"), format="%(asctime)s %(levelname)s %(message)s"
)
logging.getLogger("").addHandler(logging.StreamHandler())


# load the training and test data sets
trndata = generateClassificationData(20, nClasses=2)
tstdata = generateClassificationData(100, nClasses=2)

for inpt, target in trndata:
    print inpt, target

# initialize the SVM module and a corresponding trainer
svm = SVMUnit()
trainer = SVMTrainer(svm, trndata)

# train the with fixed meta-parameters
log2C = 0.0  # degree of slack
log2g = 1.1  # width of RBF kernels
trainer.train(log2C=log2C, log2g=log2g)
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )