Beispiel #1
0
    if tstsplot and ctstsplot:
        pylab.plot(tsts['input'], ctsts, c='g')

    pylab.xlabel('x')
    pylab.ylabel('y')
    pylab.title('Neuron Number:' + str(nneuron))
    pylab.grid(True)
    plotname = os.path.join(plotdir, ('jpq2layers_plot' + str(iter)))
    pylab.savefig(plotname)


# set-up the neural network
nneuron = 5
mom = 0.98
netname = "LSL-" + str(nneuron) + "-" + str(mom)
mv = ModuleValidator()
v = Validator()

#create the test DataSet
x = numpy.arange(0.0, 1.0 + 0.01, 0.01)
s = 0.5 + 0.4 * numpy.sin(2 * numpy.pi * x)
tsts = SupervisedDataSet(1, 1)
tsts.setField('input', x.reshape(len(x), 1))
tsts.setField('target', s.reshape(len(s), 1))
#read the train DataSet from file
trndata = SupervisedDataSet.loadFromFile(os.path.join(os.getcwd(), 'trndata'))

myneuralnet = os.path.join(os.getcwd(), 'myneuralnet.xml')
if os.path.isfile(myneuralnet):
    n = NetworkReader.readFrom(myneuralnet, name=netname)
    #calculate the test DataSet based on the trained Neural Network
    treinadorSupervisionado = BackpropTrainer(rn, dados)

    numeroDeAcessos = 10
    numeroDeEpocasPorAcesso = 50

    fig1 = plt.figure()
    ax1 = fig1.add_subplot(111)
    ax1.axis([0, 2 * math.pi, -1.5, 1.5])
    fig1.hold()

    fig2 = plt.figure()
    ax2 = fig2.add_subplot(111)
    ax2.axis([-50, numeroDeAcessos * numeroDeEpocasPorAcesso + 50, 0.00001, 4])
    ax2.set_yscale('log')
    fig2.hold()
    meansq = ModuleValidator()
    erro2 = meansq.MSE(treinadorSupervisionado.module, dados)
    print erro2
    ax2.plot([0], [erro2], 'bo')

    tempoPausa = 1
    for i in range(numeroDeAcessos):
        treinadorSupervisionado.trainEpochs(numeroDeEpocasPorAcesso)
        meansq = ModuleValidator()
        erro2 = meansq.MSE(treinadorSupervisionado.module, dados)
        print erro2
        ax1.plot(dados['input'],
                 dados['target'],
                 'bo',
                 markersize=7,
                 markeredgewidth=0)
Beispiel #3
0
alldata.addSample([-1, -1], [0])
alldata.addSample([-1, -1], [0])
alldata.addSample([-1, -1], [0])
alldata.addSample([-1, -1], [0])

alldata.addSample([1, 1], [1])
alldata.addSample([1, 1], [1])
alldata.addSample([1, 1], [1])
alldata.addSample([1, 1], [1])
alldata.addSample([1, 1], [1])

tstdata, trndata = alldata.splitWithProportion(0.25)
trndata._convertToOneOfMany()
tstdata._convertToOneOfMany()

# We can also examine the dataset
print "Number of training patterns: ", len(trndata)
print "Input and output dimensions: ", trndata.indim, trndata.outdim
print "First sample (input, target, class):"
print trndata['input'][0], trndata['target'][0], trndata['class'][0]

fnn     = buildNetwork( trndata.indim, 5, trndata.outdim, recurrent=False )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01 )

# I am not sure about this, I don't think my production code is implemented like this
modval = ModuleValidator()
trainer.trainEpochs(20)
trainer.trainOnDataset(dataset=trndata)
cv = CrossValidator( trainer, trndata, n_folds=5, valfunc=modval.MSE )
print "MSE %f" %( cv.validate() )
Beispiel #4
0
import pylab, numpy
from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure import TanhLayer
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.tools.validation import CrossValidator, ModuleValidator

results = pylab.loadtxt('credit.txt')
target = results[:, -1]
data = numpy.delete(results, -1, 1)

#print "data", tuple(data[0])
#print "target", (target[0],)

#net = buildNetwork(14, 10, 1)
net = buildNetwork(14, 10, 1, hiddenclass=TanhLayer)
#print net.activate([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14])

ds = SupervisedDataSet(14, 1)

for i in range(len(data)):
    ds.addSample(tuple(data[i]), (target[i], ))

trainer = BackpropTrainer(net, ds)
evaluation = ModuleValidator()
validator = CrossValidator(trainer=trainer,
                           dataset=trainer.ds,
                           n_folds=5,
                           valfunc=evaluation.MSE)
print(validator.validate())