def initializeNetwork(self):
        can1 = NNTrainData.NNTrainData(cv2.imread('NNTrain/can1.png'), self.encodingDict["can"])
        can2 = NNTrainData.NNTrainData(cv2.imread('NNTrain/can2.png'), self.encodingDict["can"])
        can3 = NNTrainData.NNTrainData(cv2.imread('NNTrain/can3.png'), self.encodingDict["can"])
        stain1 = NNTrainData.NNTrainData(cv2.imread('NNTrain/stain1.png'), self.encodingDict["stain"])
        stain2 = NNTrainData.NNTrainData(cv2.imread('NNTrain/stain2.png'), self.encodingDict["stain"])
        stain3 = NNTrainData.NNTrainData(cv2.imread('NNTrain/stain3.png'), self.encodingDict["stain"])
        dirt1 = NNTrainData.NNTrainData(cv2.imread('NNTrain/dirt1.png'), self.encodingDict["dirt"])
        dirt2 = NNTrainData.NNTrainData(cv2.imread('NNTrain/dirt2.png'), self.encodingDict["dirt"])
        dirt3 = NNTrainData.NNTrainData(cv2.imread('NNTrain/dirt3.png'), self.encodingDict["dirt"])

        self.trainData.append(can1)
        self.trainData.append(can2)
        self.trainData.append(can3)
        self.trainData.append(stain1)
        self.trainData.append(stain2)
        self.trainData.append(stain3)
        self.trainData.append(dirt1)
        self.trainData.append(dirt2)
        self.trainData.append(dirt3)

        for x in self.trainData:
            x.prepareTrainData()

        self.net = buildNetwork(4, 3, 3, hiddenclass=TanhLayer, outclass=SoftmaxLayer)
        ds = SupervisedDataSet(4, 3)

        for x in self.trainData:
            ds.addSample((x.contours/100.0, x.color[0]/1000.0, x.color[1]/1000.0, x.color[2]/1000.0), x.output)

        trainer = BackpropTrainer(self.net, momentum=0.1, verbose=True, weightdecay=0.01)
        trainer.trainOnDataset(ds, 1000)
        trainer.testOnData(verbose=True)
        print "\nSiec nauczona\n"
示例#2
0
def generate_and_test_nn():
    d = load_training_set()
    n = buildNetwork(d.indim, 13, d.outdim, hiddenclass=LSTMLayer, outclass=SoftmaxLayer, outputbias=False, recurrent=True)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0.99, verbose=True)
    t.trainOnDataset(d, 1000)
    t.testOnData(verbose=True)
    return (n, d)
示例#3
0
def testOldTraining(hidden=15, n=None):
    d = XORDataSet()
    if n is None:
        n = buildNetwork(d.indim, hidden, d.outdim, recurrent=False)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0., verbose=False)
    t.trainOnDataset(d, 250)
    t.testOnData(verbose=True)
示例#4
0
def testOldTraining(hidden=15, n=None):
    d = XORDataSet()
    if n is None:
        n = buildNetwork(d.indim, hidden, d.outdim, recurrent=False)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0., verbose=False)
    t.trainOnDataset(d, 250)
    t.testOnData(verbose=True)
示例#5
0
def testTraining():
    d = PrimesDataSet()
    d._convertToOneOfMany()
    n = buildNetwork(d.indim, 8, d.outdim, recurrent=True)
    t = BackpropTrainer(n, learningrate = 0.01, momentum = 0.99, verbose = True)
    t.trainOnDataset(d, 1000)
    t.testOnData(verbose=True)
    for i in range(15):
        print "Guess: %s || Real: %s" % (str(n.activate(i)), str(i in d.generatePrimes(10)))
    print d
示例#6
0
def testTraining():
    ds = WebsiteFeaturesDataSet()
    net = buildNetwork(ds.indim, 4, ds.outdim, recurrent=True)
    trainer = BackpropTrainer(net,
                              learningrate=0.001,
                              momentum=0.99,
                              verbose=True)
    trainer.trainOnDataset(ds, 1000)
    trainer.testOnData(verbose=True)
    import pdb
    pdb.set_trace()
示例#7
0
def testTraining():
    print "Reading data"
    d = XORDataSet()
    traind,testd = d.splitWithProportion(0.8)
    print "Building network"
    n = buildNetwork(traind.indim, 4, traind.outdim, recurrent=True)
    print "Training"
    t = BackpropTrainer(n, learningrate = 0.01, momentum = 0.99, verbose = True)
    t.trainOnDataset(traind,100)
    testd = XORDataSet(begin=60000,end=80000)
    print t.module.params
    t.testOnData(testd,verbose= True)
示例#8
0
def generate_and_test_nn():
    d = load_training_set()
    n = buildNetwork(d.indim,
                     13,
                     d.outdim,
                     hiddenclass=LSTMLayer,
                     outclass=SoftmaxLayer,
                     outputbias=False,
                     recurrent=True)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0.99, verbose=True)
    t.trainOnDataset(d, 1000)
    t.testOnData(verbose=True)
    return (n, d)
示例#9
0
    def execute(self):
        network = self.networkFactoryMethod()
        trainer = BackpropTrainer(network, learningrate = self.learningrate, momentum = self.momentum)
        trainer.trainOnDataset(self.datasetForTraining, self.epochs)
        averageError = trainer.testOnData(self.datasetForTest)
        self.collectedErrors.append(averageError)

        return averageError
def treinamento_Portas(list_Entrada_Saida, NumCamadasOcultas, taxa_aprendizado,
                       epochs):
    # adiciona-se as amostras
    d_in = 0
    d_out = 0
    for d in list_Entrada_Saida:
        d_in = len(d[0])
        d_out = len(d[1])

    dataset = SupervisedDataSet(d_in, d_out)
    for l in list_Entrada_Saida:
        entrada = l[0]
        saida = l[1]
        dataset.addSample(entrada, saida)

    # construindo a rede

    network = buildNetwork(
        dataset.indim,
        NumCamadasOcultas,
        dataset.outdim,
        bias=True,
        hiddenclass=SigmoidLayer,
        outclass=SigmoidLayer,
    )

    # utilizando o backpropagation
    trainer = BackpropTrainer(network, dataset, learningrate=taxa_aprendizado)

    # trainamento da rede
    for epocas in range(epochs):
        trainer.train()

    # teste da rede
    test_data = SupervisedDataSet(d_in, d_out)
    for l in list_Entrada_Saida:
        entrada = l[0]
        saida = l[1]
        test_data.addSample(entrada, saida)

    try:
        trainer.testOnData(test_data, verbose=True)
    except:
        pass
    def __init__(self, stock_to_predict, days_of_prediction = 10, days_of_training = 450):

        self.number_of_days_before = 8
        self.days_of_prediction = days_of_prediction

        self.downloader = StockDownloader()

        stock_training_data = self.downloader.download_stock(stock_to_predict, days_of_training, days_of_prediction)
        self.stock_prediction_data = self.downloader.download_stock(stock_to_predict, days_of_prediction)

        self.starting_price = self.stock_prediction_data[0]

        self.dataset = StockSupervisedDataSet(self.number_of_days_before, stock_training_data)
        self.network = buildNetwork(self.dataset.indim, 10, self.dataset.outdim, recurrent=True)
        t = BackpropTrainer(self.network, learningrate = 0.00005,  momentum=0., verbose = True)
        t.trainOnDataset(self.dataset, 200)
        t.testOnData(verbose= True)

        self.starting_prices = self.dataset['input'][-1]
示例#12
0
    def __init__(self):
        self.code = {
            'cat': [1, 0, 0],
            'dust': [0, 1, 0],
            'water': [0, 0, 1]
        }

        pack = 'media.images_train'
        train_data = [
            (Neuron(load(file_path(pack, 'cat1.png'))), self.code['cat']),
            (Neuron(load(file_path(pack, 'cat2.png'))), self.code['cat']),
            (Neuron(load(file_path(pack, 'cat3.png'))), self.code['cat']),
            (Neuron(load(file_path(pack, 'dust1.png'))), self.code['dust']),
            (Neuron(load(file_path(pack, 'dust2.png'))), self.code['dust']),
            (Neuron(load(file_path(pack, 'dust3.png'))), self.code['dust']),
            (Neuron(load(file_path(pack, 'water1.png'))), self.code['water']),
            (Neuron(load(file_path(pack, 'water2.png'))), self.code['water']),
            (Neuron(load(file_path(pack, 'water3.png'))), self.code['water']),
        ]

        for x, output in train_data:
            x.prepare()

        self.net = buildNetwork(
            4, 3, 3, hiddenclass=TanhLayer, outclass=SoftmaxLayer
        )
        data = SupervisedDataSet(4, 3)

        for x, output in train_data:
            data.addSample(
                (
                    x.contours / 100.0, x.color[0] / 1000.0,
                    x.color[1] / 1000.0, x.color[2] / 1000.0,
                ),
                output
            )

        trainer = BackpropTrainer(
            self.net, momentum=0.1, verbose=True, weightdecay=0.01
        )
        trainer.trainOnDataset(data, 1000)  # 1000 iterations
        trainer.testOnData(verbose=True)
示例#13
0
    def train(self, train_data_set, test_data_set, epoch=100):
        trainer = BackpropTrainer(self.network, train_data_set)

        progress_bar = ProgressBar(epoch)

        for i in range(epoch):
            progress_bar.update(i+1)
            time.sleep(0.01)
            trainer.train()

        return trainer.testOnData(test_data_set, verbose=True)
示例#14
0
def train():
    f = open('train_tower.csv', 'r')

    csvreader = csv.reader(f)

    dataset = SupervisedDataSet(64, 2)
    for d in csvreader:
        if d[64] == '0':
            dataset.addSample(d[0:64], [1, 0])
        else:
            dataset.addSample(d[0:64], [0, 1])

    network = buildNetwork(64, 19, 2)
    trainer = BackpropTrainer(network, dataset)
    for i in range(100):
        trainer.train()
    trainer.testOnData(dataset, verbose=True)

    NetworkWriter.writeToFile(network, "tower.xml")

    f.close()
示例#15
0
    def __init__(self):
        self.code = {'cat': [1, 0, 0], 'dust': [0, 1, 0], 'water': [0, 0, 1]}

        pack = 'media.images_train'
        train_data = [
            (Neuron(load(file_path(pack, 'cat1.png'))), self.code['cat']),
            (Neuron(load(file_path(pack, 'cat2.png'))), self.code['cat']),
            (Neuron(load(file_path(pack, 'cat3.png'))), self.code['cat']),
            (Neuron(load(file_path(pack, 'dust1.png'))), self.code['dust']),
            (Neuron(load(file_path(pack, 'dust2.png'))), self.code['dust']),
            (Neuron(load(file_path(pack, 'dust3.png'))), self.code['dust']),
            (Neuron(load(file_path(pack, 'water1.png'))), self.code['water']),
            (Neuron(load(file_path(pack, 'water2.png'))), self.code['water']),
            (Neuron(load(file_path(pack, 'water3.png'))), self.code['water']),
        ]

        for x, output in train_data:
            x.prepare()

        self.net = buildNetwork(4,
                                3,
                                3,
                                hiddenclass=TanhLayer,
                                outclass=SoftmaxLayer)
        data = SupervisedDataSet(4, 3)

        for x, output in train_data:
            data.addSample((
                x.contours / 100.0,
                x.color[0] / 1000.0,
                x.color[1] / 1000.0,
                x.color[2] / 1000.0,
            ), output)

        trainer = BackpropTrainer(self.net,
                                  momentum=0.1,
                                  verbose=True,
                                  weightdecay=0.01)
        trainer.trainOnDataset(data, 1000)  # 1000 iterations
        trainer.testOnData(verbose=True)
示例#16
0
from pybrain.datasets import SupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised import BackpropTrainer

# passa as dimensões dos vetores de entrada e do objetivo
dataset = SupervisedDataSet(2, 1)

dataset.addSample([1, 1], [0])
dataset.addSample([1, 0], [1])
dataset.addSample([0, 1], [1])
dataset.addSample([0, 0], [0])

network = buildNetwork(dataset.indim, 4, dataset.outdim, bias=True)
trainer = BackpropTrainer(network, dataset, learningrate=0.01, momentum=0.99)
'''
for epoch in range(1000): # treina por 1000 épocas
	trainer.train()
'''

trainer.trainEpochs(1000)
'''
	treinar até a convergência: trainer.trainUntilConvergence
'''

test_data = SupervisedDataSet(2, 1)
test_data.addSample([1, 1], [0])
test_data.addSample([1, 0], [1])
test_data.addSample([0, 1], [1])
test_data.addSample([0, 0], [0])
trainer.testOnData(test_data, verbose=True)
示例#17
0
saidas_teste = np.concatenate((saidas[35:50], saidas[85:100], saidas[135:]))
print(len(entradas_teste))
print(len(saidas_teste))
print('--------------------------')

from pybrain.datasets import SupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised import BackpropTrainer

treinamento = SupervisedDataSet(4, 1)
for i in range(len(entradas_treino)):
    treinamento.addSample(entradas_treino[i], saidas_treino[i])
print(len(treinamento))
print(treinamento.indim)
print(treinamento.outdim)
print('--------------------------')

# Construindo a rede
rede = buildNetwork(treinamento.indim, 2, treinamento.outdim, bias=True)
trainer = BackpropTrainer(rede, treinamento, learningrate=0.01, momentum=0.7)

# Treinando a rede
for epoca in range(1000):
    trainer.train()

# Testando a rede
teste = SupervisedDataSet(4, 1)
for i in range(len(entradas_teste)):
    teste.addSample(entradas_teste[i], saidas_teste[i])
trainer.testOnData(teste, verbose=True)
示例#18
0
import csv
from numpy import *
from pybrain.datasets import SequentialDataSet,UnsupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised import BackpropTrainer

sequencia = []
NEURONIOS = 10e4

if __name__ == "__main__":

    sequencias = SequentialDataSet(1,1)

    for x in range(0,100):
        sequencia.append(x)

    for i,v in enumerate(sequencia):
        if i+1 < len(sequencia):
            sequencias.addSample(v, sequencia[i+1])

    print(sequencias)

    rn = buildNetwork(sequencias.indim, NEURONIOS, sequencias.outdim, recurrent=True)
    sorteio = BackpropTrainer(rn, sequencias, learningrate=1/(NEURONIOS/100))
    while 1:
        try:
            print(sorteio.train())
        except KeyboardInterrupt:
            sorteio.testOnData(verbose=True)
            break
示例#19
0
def testTraining():
    d = SequentialXORDataSet()
    n = buildNetwork(d.indim, 4, d.outdim, recurrent=True)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0.99, verbose=True)
    t.trainOnDataset(d, 1000)
    t.testOnData(verbose=True)
示例#20
0
objeto = WilsonPDI.wocr("../imagens_teste/um.jpg")

atributos_teste = []
atributos_teste.append(objeto.atributos())
atributos_teste = np.matrix(atributos_teste)
atributos_teste = atributos_teste / maxima_entrada

saidas = []
saidas.append([0, 0, 0, 0])
saidas.append([0, 0, 0, 1])
saidas.append([0, 0, 1, 0])
saidas.append([0, 0, 1, 1])
saidas.append([0, 1, 0, 0])
saidas.append([0, 1, 0, 1])
saidas.append([0, 1, 1, 0])
saidas.append([0, 1, 1, 1])
saidas.append([1, 0, 0, 0])
saidas.append([1, 0, 0, 1])

caracteres = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
contador = -1
for saida_desejavel in saidas:
    contador = contador + 1
    teste = SupervisedDataSet(22, 4)
    teste.addSample(atributos_teste, saida_desejavel)
    erro = treinamento.testOnData(teste, True)
    if (erro <= 0.059):
        print(caracteres[contador])
        break
示例#21
0
parametros_entrada = SupervisedDataSet(20, 1)

i = 0
for entrada in atributos:
    parametros_entrada.addSample(entrada, [saidas[0, i]])
    i = i + 1

rede_neural = buildNetwork(20, 15, 1, bias=True)
rede_neural.randomize()
treinamento = BackpropTrainer(rede_neural, parametros_entrada, momentum=0.99)
treinamento.trainEpochs(1000)

obj = wocr("../imagens_teste/um.jpg")

atributos_teste = obj.atributos()
atributos_teste = np.matrix(atributos_teste) / maximo_entrada

caracteres = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
valores_desejaveis = []
for i in range(0, 50, 5):
    valores_desejaveis.append(float(i))
valores_desejaveis = np.matrix(valores_desejaveis)
valores_desejaveis = valores_desejaveis / 45
for indice in range(0, 9):
    simular = SupervisedDataSet(20, 1)
    simular.addSample(atributos_teste, valores_desejaveis[0, indice])
    erro = treinamento.testOnData(simular, True)
    if (erro <= 0.005):
        print(caracteres[indice])
        break
示例#22
0
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised import BackpropTrainer

# UTILIZANDO REDES NEURAIS PARA SIMULAR PORTA LOGICA AND

# DEFININDO QUANTAS ENTRADAS E QUANTAS SAIDAS A REDE NEURAL DEVE POSSUIR
parametros = SupervisedDataSet(2, 1)

# DEFININDO PARAMETROS DE ENTRADA E SAIDA
parametros.addSample([0, 0], [0])
parametros.addSample([0, 1], [0])
parametros.addSample([1, 0], [0])
parametros.addSample([1, 1], [1])

# CONSTRUINDO A REDE NEURAL
# 2 - PARAMETROS DE ENTRADA
# 10 - NEURONIOS NA CAMADA INTERMEDIARIA
# 1 - SAIDA
rede_neural = buildNetwork(2, 10, 1, bias=True, outputbias=True)

# TREINANDO A REDE NEURAL
treinamento = BackpropTrainer(rede_neural, parametros, momentum=0.5)
treinamento.trainEpochs(1000)

# SIMULANDO A REDE NEURAL
parametros_teste = SupervisedDataSet(2, 1)

# DEFININDO PARAMETROS DE ENTRADA E SAIDA
parametros_teste.addSample([0, 0], [0])
treinamento.testOnData(parametros_teste, True)
    # third feature is unemployment
    une.append(data[2])
    
    fund.append(data[3])

    indata =  tuple(data[:features])
    outdata = tuple(data[features:])
    ds.addSample(indata,outdata)

# this builds a network that has the number of features as input, 
# a *SINGLE* defined hidden layer and a single output neuron. 
n = buildNetwork(ds.indim,hidden,hidden,ds.outdim)
t = BackpropTrainer(n,learningrate=0.01,momentum=0.8,verbose=True)
t.trainOnDataset(ds,steps)
t.testOnData(verbose=True)

# let's plot what we have
import matplotlib.pyplot as plt

# lets ask for a prediction: GDP,CPI, Unemployment
#print n.activate([.02,.02,-.002])

x = []
y = []
#print range(len(time))
for i in range(len(time)):
    #print n.activate([gdp(i),cpi(i),une(i)])
    x.append(.25*time[i]+1954.5)
    y.append(n.activate([gdp[i],cpi[i],une[i]]))
示例#24
0
print('tempo de treinamento', fim - inicio)
'''
Outras formas de treinar:
    trainer.trainEpochs(1000)
    treinar até a convergência: trainer.trainUntilConvergence()
'''

# Agora iremos testar a rede com um conjunto de dados
# testando com os mesmos dados
test_data = SupervisedDataSet(2, 1)
test_data.addSample([1, 1], [0])
test_data.addSample([1, 0], [1])
test_data.addSample([0, 1], [1])
test_data.addSample([0, 0], [0])
# verbose=True indica que deve ser impressas mensagens
trainer.testOnData(test_data, verbose=True)

# testando com dados arbitrários para avaliar a capacidade de ajuste aos padrões
print('testando segunda lista de parametros')
test_data2 = SupervisedDataSet(2, 1)
test_data2.addSample([0, 1], [0])  # saida 1 //erro //correct é a sequencia colocada e out e a saída da rede
test_data2.addSample([0, 1], [1])  # saida 1 //ok
test_data2.addSample([0, 0], [1])  # saida 0 //erro
test_data2.addSample([1, 1], [0])  # saida 0 /ok
resultado = trainer.testOnData(test_data2, verbose=True)

'''
Testing on data2:
('out:    ', '[1     ]')
('correct:', '[0     ]')
error:  0.50000000
    ((0.3, 1.0), 1.0),
    ((1.0, 0.6), 0.0),
    ((0.7, 0.6), 0.0),
    ((0.7, 0.1), 1.0),
)

testData = (
    ((0.8, 0.0), 1.0),
    ((0.9, 0.7), 0.0),
    ((0.1, 0.1), 0.0),
    ((0.2, 0.8), 1.0),
    ((0.6, 0.6), 0.0),
    ((0.6, 1.0), 0.0),
    ((1.0, 0.3), 1.0),
    ((0.1, 0.1), 0.0),
)

datasetForTraining = SupervisedDataSet(ENTRY_DIMENSION, RESULT_DIMENSION)
for entry, expectedResult in trainingData:
    datasetForTraining.addSample(entry, [expectedResult])

datasetForTest = SupervisedDataSet(ENTRY_DIMENSION, RESULT_DIMENSION)
for entry, expectedResult in testData:
    datasetForTest.addSample(entry, [expectedResult])

HIDDEN_LAYER_DIMENSION = 4
network = buildNetwork(ENTRY_DIMENSION, HIDDEN_LAYER_DIMENSION, RESULT_DIMENSION, recurrent=True)
trainer = BackpropTrainer(network, learningrate=0.01, momentum=0.99, verbose=True)
trainer.trainOnDataset(datasetForTraining, 1)
trainer.testOnData(datasetForTest, verbose=True)
示例#26
0
net = buildNetwork(input_dim, 40, 40, 1, hiddenclass=TanhLayer, outclass=LinearLayer, bias=True)
net.sortModules()

# ------------------------------
# Train Neural Network
# ------------------------------

plt.ion()
plt.show()
for i in range(numEpoch):
    t = BackpropTrainer(net, dataset=d_train, learningrate=lr/(1+(i*1.0)/lr_reg), lrdecay=1, momentum=0.2)
    t.train()

    if i % 10 == 0:
        train_error = t.testOnData(dataset=d_train)
        val_error = t.testOnData(dataset=d_val)
        print "Epoch", i+1, "training/val error: ", train_error, "/", val_error
        print >> f, "Epoch", i+1, "training/val error: ", train_error, "/", val_error

        # save trained net
        with open('net/'+directory+filename+'_iter='+str(i)+'.pickle', 'w') as f1:
            pickle.dump([x, y, idx_train, idx_val, net], f1)

        y_pred = np.zeros(y.shape[0])

        # for k in np.concatenate((idx_train, idx_val), axis=0):
        for k in range(x.shape[0]):
            y_pred[k] = net.activate(x[k, :])

        plt.clf()
示例#27
0
rnn = buildNetwork(trndata.indim, hidden, trndata.outdim, hiddenclass=LSTMLayer, outclass=SigmoidLayer, recurrent=True)

#rnn.randomize()

#trainer = BackpropTrainer(rnn, dataset)

#for _ in range(100):
#    print trainer.train()
# define a training method
#trainer = RPropMinusTrainer( rnn, dataset=trndata, verbose=True )
# instead, you may also try
trainer = BackpropTrainer( rnn, dataset=trndata, verbose=True)

#carry out the training
for i in xrange(1000):
    #trainer.trainEpochs( 2)
    #trainer.trainOnDataset(trndata)
    #trnresult = 100. * (1.0-testOnSequenceData(rnn, trndata))
    #print trnresult
    #tstresult = 100. * (1.0-testOnSequenceData(rnn, tstdata))
    #print "train error: %5.2f%%" % trnresult, ",  test error: %5.2f%%" % tstresult
    trainer.train()
    #print "train error: %5.2f%%" % trnresult
# just for reference, plot the first 5 timeseries
trainer.testOnData(tstdata, verbose= True)
#plot(trndata['input'][0:50,:],'-o')
#old(True)
#plot(trndata['target'][0:50,:],'-o')
#show()
示例#28
0
        trainIn = []
        for x in row[:numberOfInputs]:
            trainIn.append(x)

        trainOut = []
        for x in row[numberOfInputs:]:
            trainOut.append(x)

        d.appendLinked(trainIn, trainOut)

    # build a neural network with the second parameter being the number of hidden layers
    n = buildNetwork(d.indim, 3, d.outdim, recurrent=True)

    # configure the trainer
    t = BackpropTrainer(n, learningrate=0.01, momentum=0.99, verbose=True)

    # split the data randomly into 75% training - 25% testing
    train, test = d.splitWithProportion(0.75)
    print "{} - {}".format(len(train), len(test))

    # train the data with n number of epochs
    t.trainOnDataset(train, 10)

    # test the data with the remaining data
    t.testOnData(test, verbose=True)

    # try the same test but with a different method
    net = buildNetwork(d.indim, 3, d.outdim, bias=True, hiddenclass=TanhLayer)
    trainer = BackpropTrainer(net, d)
    trainer.trainUntilConvergence(verbose=True)
示例#29
0
while line:
    read = tuple(line.strip().split(','))
    # Creating Dataset from Data File
    dataset.addSample(read[:2], read[2:])
    line = dataFile.readline()

# Fase di allenamento basato su Backpropagation
trainer = BackpropTrainer(net, dataset, verbose=True)

import sys
orig_stdout = sys.stdout
final = file('results/trainingresults.txt', 'w+')
sys.stdout = final
print("Neural Net Test before Training Session:")
# Test Iniziale della rete prima l'addestramento
print(trainer.testOnData(dataset=dataset, verbose=True), )

temp_std_out = sys.stdout
sys.stdout = orig_stdout

##########
trained = False
# Allenamento continua finche' la rete non arriva ad una precisione pari a 0,0000001
acceptableError = 0.0001
while not trained:
    error = trainer.train()
    if error < acceptableError:
        trained = True
##########

sys.stdout = temp_std_out
示例#30
0
print 'starting training'
# trainer = RPropMinusTrainer(n, dataset=ds)
# trainer = BackpropTrainer(n, dataset=ds)
# trainer.trainUntilConvergence()
# trainer.train()

trainer = BackpropTrainer(net, ds)

train_errors = [] # save errors for plotting later
EPOCHS_PER_CYCLE = 10
CYCLES = 50
EPOCHS = EPOCHS_PER_CYCLE * CYCLES
for i in xrange(CYCLES):
    trainer.trainEpochs(EPOCHS_PER_CYCLE)
    train_errors.append(trainer.testOnData())
    epoch = (i+1) * EPOCHS_PER_CYCLE
#    print("\r epoch {}/{}".format(epoch, EPOCHS), end="")
    print(epoch, EPOCHS, train_errors[-1])
#     #stdout.flush()
print()
print("final error =", train_errors[-1])


# Plot the errors (note that in this simple toy example, we are testing and training on the same dataset, which is of course not what you'd do for a real project!):
# plt.plot(range(0, EPOCHS, EPOCHS_PER_CYCLE), train_errors)
# plt.xlabel('epoch')
# plt.ylabel('error')
# plt.show()

print 'post training, writing NN to file'