Example #1
0
def training(d):
    """
    Builds a network and trains it.
    """
    n = buildNetwork(d.indim, 4, d.outdim, recurrent=True)
    t = BackpropTrainer(n, d, learningrate=0.01, momentum=0.99, verbose=True)
    for epoch in range(0, 1000):
        t.train()
    return t
Example #2
0
def training(d):
    """
    Builds a network and trains it.
    """
    n = buildNetwork(d.indim, 4, d.outdim,recurrent=True)
    t = BackpropTrainer(n, d, learningrate = 0.01, momentum = 0.99, verbose = True)
    for epoch in range(0,500):
        t.train()
    return t
Example #3
0
    def train_network(self, network, dataset):

        trainer = BackpropTrainer(network,
                                  dataset,
                                  learningrate=0.01,
                                  momentum=0.99,
                                  verbose=True)
        for epoch in range(0, 1000):
            trainer.train()

        return network
Example #4
0
    def train(self, train_data_set, test_data_set, epoch=100):
        trainer = BackpropTrainer(self.network, train_data_set)

        progress_bar = ProgressBar(epoch)

        for i in range(epoch):
            progress_bar.update(i+1)
            time.sleep(0.01)
            trainer.train()

        return trainer.testOnData(test_data_set, verbose=True)
Example #5
0
def train_network(d, iterations):
    print("Training")
    n = buildNetwork(d.indim, 4, d.outdim, bias=True)
    t = BackpropTrainer(
        n,
        d,
        learningrate=0.01,
        momentum=0.99,
        verbose=False)
    for epoch in range(iterations):
        t.train()
    return n
Example #6
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break


    exportCatDogANN(n)
    return n
Example #7
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Example #8
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
Example #9
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
Example #10
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogANN(n)
    return n
Example #11
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
Example #12
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Example #13
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
def train():
    f = open('train.csv', 'r')

    csv_reader = csv.reader(f)

    dataset = SupervisedDataSet(64, 1)
    for d in csv_reader:
        dataset.addSample(d[0:64], d[64])

    network = buildNetwork(64, 19, 1)
    trainer = BackpropTrainer(network, dataset)
    for i in range(100):
        trainer.train()

    NetworkWriter.writeToFile(network, "model.xml")

    f.close()
class PredictorTrainer:
    def __init__(self, euro_predictor):
        self.euro_predictor = euro_predictor
        self.trainer = BackpropTrainer(euro_predictor.net, euro_predictor.ds)
        self.errors = []

    def train(self, error):
        self.trainer.train()
        e = self.trainer.train()
        errors = []
        while e > error:
            e = self.trainer.train()
            errors.append(e)
            print e
        self.errors = errors
        return errors

    def determined_train(self, iterations):
        self.trainer.train()
        self.trainer.train()
        errors = []
        for i in range(iterations):
            e = self.trainer.train()
            errors.append(e)
            print e
        self.errors = errors
        return errors

    def plot_errors(self):
        xs = [i for i in range(len(self.errors))]
        ys = self.errors

        plt.plot(xs, ys)
        plt.show()
def treinamento_Portas(list_Entrada_Saida, NumCamadasOcultas, taxa_aprendizado,
                       epochs):
    # adiciona-se as amostras
    d_in = 0
    d_out = 0
    for d in list_Entrada_Saida:
        d_in = len(d[0])
        d_out = len(d[1])

    dataset = SupervisedDataSet(d_in, d_out)
    for l in list_Entrada_Saida:
        entrada = l[0]
        saida = l[1]
        dataset.addSample(entrada, saida)

    # construindo a rede

    network = buildNetwork(
        dataset.indim,
        NumCamadasOcultas,
        dataset.outdim,
        bias=True,
        hiddenclass=SigmoidLayer,
        outclass=SigmoidLayer,
    )

    # utilizando o backpropagation
    trainer = BackpropTrainer(network, dataset, learningrate=taxa_aprendizado)

    # trainamento da rede
    for epocas in range(epochs):
        trainer.train()

    # teste da rede
    test_data = SupervisedDataSet(d_in, d_out)
    for l in list_Entrada_Saida:
        entrada = l[0]
        saida = l[1]
        test_data.addSample(entrada, saida)

    try:
        trainer.testOnData(test_data, verbose=True)
    except:
        pass
Example #17
0
 def train(self, data, iterations=NETWORK_ITERATIONS):
     for item in data:
         self.dataset.addSample(item[0], item[1])
     trainer = BackpropTrainer(self.network, self.dataset, learningrate=NETWORK_LEARNING_RATE,
                               momentum=NETWORK_MOMENTUM)
     error = 0
     for i in xrange(iterations):
         error = trainer.train()
         print (i + 1), error
     return error
Example #18
0
def result(request, form):
    dataset = SupervisedDataSet(2, 1)
    dados = form.cleaned_data

    # Adiciona a tabela XOR
    dataset.addSample([0, 0], [0])
    dataset.addSample([0, 1], [1])
    dataset.addSample([1, 0], [1])
    dataset.addSample([1, 1], [0])

    if dados['bias'] is None:
        bias = False
    else:
        bias = True

    # dimensões de entrada e saida, argumento 2 é a quantidade de camadas intermediárias
    network = buildNetwork(dataset.indim, int(dados['num_camadas']), dataset.outdim, bias=bias)
    trainer = BackpropTrainer(network, dataset, learningrate=float(dados['learningrate']), momentum=float(dados['momentum']))

    pesos_iniciais = network.params

    network._setParameters(np.random.uniform(dados['peso_start'], dados['peso_end'], network.params.shape[0]))

    error = 1.00000000

    epocasPercorridas = 0

    errors = []
    it = []
    while epocasPercorridas < dados['epochs'] and error > dados['erro_max']:
        error = trainer.train()
        epocasPercorridas += 1
        errors.append(error)
        it.append(epocasPercorridas)
    graph = []
    idx = 0
    for e in errors:
        temp = []
        temp.append(idx)
        temp.append(e)
        idx +=1
        graph.append(temp)

    context = {'form': form.cleaned_data,
               'error': error,
               'graph': json.dumps(graph),
               'epocas': epocasPercorridas,
               'pesos_iniciais': pesos_iniciais,
               'pesos_finais': network.params,
               'result00': network.activate([0, 0])[0],
               'result01': network.activate([0, 1])[0],
               'result10': network.activate([1, 0])[0],
               'result11': network.activate([1, 1])[0]}

    return render(request, 'result.html', context)
Example #19
0
    def train(self, dataSet):
        """
        Builds a network and trains it.
        """
        if os.stat(self.predictor_path).st_size != 0:
            self.network = NetworkReader.readFrom(self.predictor_path)
        else:
            self.network = buildNetwork(dataSet.indim, 4, dataSet.outdim,recurrent=True)

        t = None


        if len(dataSet) > 0:
            t = BackpropTrainer(self.network, dataSet, learningrate = self.learningrate, momentum = self.momentum, verbose = False)
            for epoch in range(0, self.epochs):
                t.train()

        NetworkWriter.writeToFile(self.network, self.predictor_path)

        return t
def train():
    f = open('train_tower.csv', 'r')

    csvreader = csv.reader(f)

    dataset = SupervisedDataSet(64, 2)
    for d in csvreader:
        if d[64] == '0':
            dataset.addSample(d[0:64], [1, 0])
        else:
            dataset.addSample(d[0:64], [0, 1])

    network = buildNetwork(64, 19, 2)
    trainer = BackpropTrainer(network, dataset)
    for i in range(100):
        trainer.train()
    trainer.testOnData(dataset, verbose=True)

    NetworkWriter.writeToFile(network, "tower.xml")

    f.close()
Example #21
0
def _train(X, Y, filename, epochs=50):
    global nn
    nn = buildNetwork(INPUT_SIZE, HIDDEN_LAYERS, OUTPUT_LAYER, bias=True, outclass=SoftmaxLayer)
    ds = ClassificationDataSet(INPUT_SIZE, OUTPUT_LAYER)
    for x, y in zip(X, Y):
        ds.addSample(x, y)
    trainer = BackpropTrainer(nn, ds)
    for i in xrange(epochs):
        error = trainer.train()
        print "Epoch: %d, Error: %7.4f" % (i+1, error)
    # trainer.trainUntilConvergence(verbose=True, maxEpochs=epochs, continueEpochs=10)
    if filename:
        NetworkWriter.writeToFile(nn, 'data/' + filename + '.nn')
Example #22
0
def trainedLSTMNN():
    """
    n = RecurrentNetwork()

    inp = LinearLayer(100, name = 'input')
    hid = LSTMLayer(30, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid))
    n.addConnection(FullConnection(hid, out))

    n.addRecurrentConnection(FullConnection(hid, hid))
    n.sortModules()
    """
    n = buildNetwork(100,
                     50,
                     1,
                     hiddenclass=LSTMLayer,
                     outputbias=False,
                     recurrent=True)

    print "Network created"
    d = load1OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count, " error = ", globErr
        if globErr < 0.1:
            break
        count = count + 1
        # if (count == 60):
        #     break

    # for i in range(100):
    #     print t.train()

    exportANN(n)

    return n
Example #23
0
def trained3ONN():
    n = FeedForwardNetwork()

    inp = LinearLayer(176850, name='input')
    hid = LinearLayer(3, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid, inSliceTo=100, outSliceTo=1))
    n.addConnection(
        FullConnection(inp,
                       hid,
                       inSliceFrom=100,
                       inSliceTo=5150,
                       outSliceFrom=1,
                       outSliceTo=2))
    n.addConnection(FullConnection(inp, hid, inSliceFrom=5150, outSliceFrom=2))
    n.addConnection(FullConnection(hid, out))

    n.sortModules()
    print "Network created"
    d = load3OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count, " error = ", globErr
        if globErr < 0.01:
            break
        count = count + 1
        # if (count == 100):
        #     break

    # for i in range(100):
    #     print t.train()

    exportANN(n)

    return n
Example #24
0
    def training(self,d):
        """
        Builds a network ,trains and returns it
        """

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(4) # 4 inputs
        hiddenLayer = SigmoidLayer(3) # 5 neurons on hidden layer with sigmoid function
        outLayer = LinearLayer(2) # 2 neuron as output layer


        "add layers to NN"
        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        "create connections"
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        "add connections"
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        "some unknown but necessary function :)"
        self.net.sortModules()

        print self.net

        "generate big sized training set"
        trainingSet = SupervisedDataSet(4,2)

        trainArr = self.generate_training_set()
        for ri in range(2000):
            input = ((trainArr[0][ri][0],trainArr[0][ri][1],trainArr[0][ri][2],trainArr[0][ri][3]))
            target = ((trainArr[1][ri][0],trainArr[1][ri][1]))
            trainingSet.addSample(input, target)

        "create backpropogation trainer"
        t = BackpropTrainer(self.net,d,learningrate=0.00001, momentum=0.99)
        while True:
            globErr = t.train()
            print "global error:", globErr
            if globErr < 0.0001:
                break

        return self.net
Example #25
0
def trainedLSTMNN():
    """
    n = RecurrentNetwork()

    inp = LinearLayer(100, name = 'input')
    hid = LSTMLayer(30, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid))
    n.addConnection(FullConnection(hid, out))

    n.addRecurrentConnection(FullConnection(hid, hid))
    n.sortModules()
    """
    n = buildNetwork(100, 50, 1, hiddenclass = LSTMLayer, outputbias=False, recurrent = True)

    print "Network created"
    d = load1OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count," error = ", globErr
        if globErr < 0.1:
            break
        count = count + 1
        # if (count == 60):
        #     break

    # for i in range(100):
    #     print t.train()


    exportANN(n)

    return n
Example #26
0
def trained3ONN():
    n = FeedForwardNetwork()

    inp = LinearLayer(176850, name = 'input')
    hid = LinearLayer(3, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid, inSliceTo = 100, outSliceTo = 1))
    n.addConnection(FullConnection(inp, hid, inSliceFrom = 100, inSliceTo = 5150, outSliceFrom = 1, outSliceTo = 2))
    n.addConnection(FullConnection(inp, hid, inSliceFrom = 5150, outSliceFrom = 2))
    n.addConnection(FullConnection(hid, out))

    n.sortModules()
    print "Network created"
    d = load3OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count," error = ", globErr
        if globErr < 0.01:
            break
        count = count + 1
        # if (count == 100):
        #     break

    # for i in range(100):
    #     print t.train()


    exportANN(n)

    return n
Example #27
0
    def learn(self, learning_rate, momentum, epochs, verbose=False, verbose_modulus=5):
        """Learns NN.

        :param learning_rate: NN learning rate
        :param momentum: NN momentum
        :param epochs: NN number of epochs
        :param verbose: if True, prints info about verification every verbose_modulus epochs
        :param verbose_modulus: rate to print info
        :return: PyBrain's NN class
        """
        if verbose:
            print "Training neural network..."
        trainer = BackpropTrainer(self.network, self.learn_data, learningrate=learning_rate, momentum=momentum)
        self.x = range(1, epochs + 1)
        for epoch in xrange(1, epochs + 1):
            to_print = verbose and epoch % verbose_modulus == 0
            if to_print:
                print "\tEpoch:", epoch, "/" + str(epochs)
            err = trainer.train()
            self.err.append(err)
        return self.network
Example #28
0
orig_stdout = sys.stdout
final = file('results/trainingresults.txt', 'w+')
sys.stdout = final
print("Neural Net Test before Training Session:")
# Test Iniziale della rete prima l'addestramento
print(trainer.testOnData(dataset=dataset, verbose=True), )

temp_std_out = sys.stdout
sys.stdout = orig_stdout

##########
trained = False
# Allenamento continua finche' la rete non arriva ad una precisione pari a 0,0000001
acceptableError = 0.0001
while not trained:
    error = trainer.train()
    if error < acceptableError:
        trained = True
##########

sys.stdout = temp_std_out
print("\nNeural Net Test after Training Session:")
# Test Finale della rete dopo l'addestramento
print(trainer.testOnData(dataset=dataset, verbose=True), )
sys.stdout = orig_stdout
final.close()
"""
Codice di stampa del contenuto della rete neurale

for mod in net.modules:
  print "Module:", mod.name
Example #29
0
class Suite(PyExperimentSuite):
    def reset(self, params, repetition):
        print params

        self.nDimInput = 3
        self.inputEncoder = PassThroughEncoder()

        if params['output_encoding'] == None:
            self.outputEncoder = PassThroughEncoder()
            self.nDimOutput = 1
        elif params['output_encoding'] == 'likelihood':
            self.outputEncoder = ScalarBucketEncoder()
            self.nDimOutput = self.outputEncoder.encoder.n

        if (params['dataset'] == 'nyc_taxi'
                or params['dataset'] == 'nyc_taxi_perturb_baseline'):
            self.dataset = NYCTaxiDataset(params['dataset'])
        else:
            raise Exception("Dataset not found")

        self.testCounter = 0
        self.resets = []
        self.iteration = 0

        # initialize LSTM network
        random.seed(6)
        if params['output_encoding'] == None:
            self.net = buildNetwork(self.nDimInput,
                                    params['num_cells'],
                                    self.nDimOutput,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outputbias=True,
                                    recurrent=True)
        elif params['output_encoding'] == 'likelihood':
            self.net = buildNetwork(self.nDimInput,
                                    params['num_cells'],
                                    self.nDimOutput,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outclass=SigmoidLayer,
                                    recurrent=True)

        self.trainer = BackpropTrainer(self.net,
                                       dataset=SequentialDataSet(
                                           self.nDimInput, self.nDimOutput),
                                       learningrate=0.01,
                                       momentum=0,
                                       verbose=params['verbosity'] > 0)

        (self.networkInput, self.targetPrediction, self.trueData) = \
          self.dataset.generateSequence(
          prediction_nstep=params['prediction_nstep'],
          output_encoding=params['output_encoding'],
          noise=params['noise'])

    def window(self, data, params):
        start = max(0, self.iteration - params['learning_window'])
        return data[start:self.iteration]

    def train(self, params, verbose=False):

        if params['reset_every_training']:
            if verbose:
                print 'create lstm network'

            random.seed(6)
            if params['output_encoding'] == None:
                self.net = buildNetwork(self.nDimInput,
                                        params['num_cells'],
                                        self.nDimOutput,
                                        hiddenclass=LSTMLayer,
                                        bias=True,
                                        outputbias=True,
                                        recurrent=True)
            elif params['output_encoding'] == 'likelihood':
                self.net = buildNetwork(self.nDimInput,
                                        params['num_cells'],
                                        self.nDimOutput,
                                        hiddenclass=LSTMLayer,
                                        bias=True,
                                        outclass=SigmoidLayer,
                                        recurrent=True)

        self.net.reset()

        ds = SequentialDataSet(self.nDimInput, self.nDimOutput)
        networkInput = self.window(self.networkInput, params)
        targetPrediction = self.window(self.targetPrediction, params)

        # prepare a training data-set using the history
        for i in xrange(len(networkInput)):
            ds.addSample(self.inputEncoder.encode(networkInput[i]),
                         self.outputEncoder.encode(targetPrediction[i]))

        if params['num_epochs'] > 1:
            trainer = RPropMinusTrainer(self.net, dataset=ds, verbose=verbose)

            if verbose:
                print " train LSTM on ", len(
                    ds), " records for ", params['num_epochs'], " epochs "

            if len(networkInput) > 1:
                trainer.trainEpochs(params['num_epochs'])

        else:
            self.trainer.setData(ds)
            self.trainer.train()

        # run through the training dataset to get the lstm network state right
        self.net.reset()
        for i in xrange(len(networkInput)):
            self.net.activate(ds.getSample(i)[0])

    def iterate(self, params, repetition, iteration, verbose=True):
        self.iteration = iteration

        if self.iteration >= len(self.networkInput):
            return None

        train = False
        if iteration > params['compute_after']:
            if iteration == params['train_at_iteration']:
                train = True

            if params['train_every_month']:
                train = (
                    self.dataset.sequence['time'][iteration].is_month_start
                    and self.dataset.sequence['time'][iteration].hour == 0
                    and self.dataset.sequence['time'][iteration].minute == 0)

            if params['train_every_week']:
                train = (
                    self.dataset.sequence['time'][iteration].dayofweek == 0
                    and self.dataset.sequence['time'][iteration].hour == 0
                    and self.dataset.sequence['time'][iteration].minute == 0)

            if params['online_training']:
                train = True
        if verbose:
            print
            print "iteration: ", iteration, " time: ", self.dataset.sequence[
                'time'][iteration]

        if train:
            if verbose:
                print " train at", iteration, " time: ", self.dataset.sequence[
                    'time'][iteration]
            self.train(params, verbose)

        if train:
            # reset test counter after training
            self.testCounter = params['test_for']

        if self.testCounter == 0:
            return None
        else:
            self.testCounter -= 1

        symbol = self.networkInput[iteration]
        output = self.net.activate(self.inputEncoder.encode(symbol))

        if params['output_encoding'] == None:
            predictions = self.dataset.reconstructSequence(output[0])
        elif params['output_encoding'] == 'likelihood':
            predictions = list(output / sum(output))
        else:
            predictions = None

        if verbose:
            print " test at :", iteration,

        if iteration == params['perturb_after']:
            if verbose:
                print " perturb data and introduce new patterns"

            (newNetworkInput, newTargetPrediction, newTrueData) = \
              self.dataset.generateSequence(perturbed=True,
                                            prediction_nstep=params['prediction_nstep'],
                                            output_encoding=params['output_encoding'],
                                            noise=params['noise'])

            self.networkInput[iteration + 1:] = newNetworkInput[iteration + 1:]
            self.targetPrediction[iteration +
                                  1:] = newTargetPrediction[iteration + 1:]
            self.trueData[iteration + 1:] = newTrueData[iteration + 1:]

        return {
            "current": self.networkInput[iteration],
            "reset": None,
            "train": train,
            "predictions": predictions,
            "truth": self.trueData[iteration]
        }
Example #30
0
# ------------------------------
# Define Neural Network
# ------------------------------

net = buildNetwork(input_dim, 40, 40, 1, hiddenclass=TanhLayer, outclass=LinearLayer, bias=True)
net.sortModules()

# ------------------------------
# Train Neural Network
# ------------------------------

plt.ion()
plt.show()
for i in range(numEpoch):
    t = BackpropTrainer(net, dataset=d_train, learningrate=lr/(1+(i*1.0)/lr_reg), lrdecay=1, momentum=0.2)
    t.train()

    if i % 10 == 0:
        train_error = t.testOnData(dataset=d_train)
        val_error = t.testOnData(dataset=d_val)
        print "Epoch", i+1, "training/val error: ", train_error, "/", val_error
        print >> f, "Epoch", i+1, "training/val error: ", train_error, "/", val_error

        # save trained net
        with open('net/'+directory+filename+'_iter='+str(i)+'.pickle', 'w') as f1:
            pickle.dump([x, y, idx_train, idx_val, net], f1)

        y_pred = np.zeros(y.shape[0])

        # for k in np.concatenate((idx_train, idx_val), axis=0):
        for k in range(x.shape[0]):
Example #31
0
    # Abre o treinamento de uma rede neural articificial salvo,
    # Se a mesmo não existir cria uma novo
    try:
        tmp = open("./cerebro.dump", "rb")
        with tmp as dump:
            print("[%] Carregando Rede Neural Artificial existente...")
            cerebro = pickle.load(dump)
    except (IOError, pickle.PickleError) as e:
        # Inicia a RNA
        print("[!] Nenhuma Rede Neural Artificial encontrada!")
        print("[+] Nova Rede Neural Artificial iniciada!")
        cerebro = buildNetwork(concursos.indim, NEURONIOS, concursos.outdim, recurrent=True, bias=True)

    # Prepara a RNA para o aprendizado
    print("[+] Preparando Rede Neural Artificial para uso!")
    sorteio = BackpropTrainer(cerebro, concursos, learningrate = APRENDIZADO, momentum = 0.99)


    # Registra a tarefa de salvar a RNA ao sair.
    atexit.register(handler_finaliza, cerebro)

    # Executa o aprendizado
    print("[%] Aprendendo com os resultados de concursos anteriores...")
    while 1:
        try:
            tempoDecorrido = time.process_time()
            print('[%s seg] Erros de Aprendizado: %s%%            ' % (tempoDecorrido, sorteio.train()), end="\r")
        except KeyboardInterrupt:
            sorteio.testOnData(verbose=True)
            break
class Suite(PyExperimentSuite):

  def reset(self, params, repetition):
    print params

    self.nDimInput = 3
    self.inputEncoder = PassThroughEncoder()

    if params['output_encoding'] == None:
      self.outputEncoder = PassThroughEncoder()
      self.nDimOutput = 1
    elif params['output_encoding'] == 'likelihood':
      self.outputEncoder = ScalarBucketEncoder()
      self.nDimOutput = self.outputEncoder.encoder.n

    if (params['dataset'] == 'nyc_taxi' or
            params['dataset'] == 'nyc_taxi_perturb_baseline'):
      self.dataset = NYCTaxiDataset(params['dataset'])
    else:
      raise Exception("Dataset not found")

    self.testCounter = 0
    self.resets = []
    self.iteration = 0

    # initialize LSTM network
    random.seed(6)
    if params['output_encoding'] == None:
      self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
                         hiddenclass=LSTMLayer, bias=True, outputbias=True, recurrent=True)
    elif params['output_encoding'] == 'likelihood':
      self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
                         hiddenclass=LSTMLayer, bias=True, outclass=SigmoidLayer, recurrent=True)

    self.trainer = BackpropTrainer(self.net,
                          dataset=SequentialDataSet(self.nDimInput, self.nDimOutput),
                          learningrate=0.01,
                          momentum=0,
                          verbose=params['verbosity'] > 0)

    (self.networkInput, self.targetPrediction, self.trueData) = \
      self.dataset.generateSequence(
      prediction_nstep=params['prediction_nstep'],
      output_encoding=params['output_encoding'],
      noise=params['noise'])


  def window(self, data, params):
    start = max(0, self.iteration - params['learning_window'])
    return data[start:self.iteration]


  def train(self, params, verbose=False):

    if params['reset_every_training']:
      if verbose:
        print 'create lstm network'

      random.seed(6)
      if params['output_encoding'] == None:
        self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
                           hiddenclass=LSTMLayer, bias=True, outputbias=True, recurrent=True)
      elif params['output_encoding'] == 'likelihood':
        self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
                           hiddenclass=LSTMLayer, bias=True, outclass=SigmoidLayer, recurrent=True)

    self.net.reset()

    ds = SequentialDataSet(self.nDimInput, self.nDimOutput)
    networkInput = self.window(self.networkInput, params)
    targetPrediction = self.window(self.targetPrediction, params)

    # prepare a training data-set using the history
    for i in xrange(len(networkInput)):
      ds.addSample(self.inputEncoder.encode(networkInput[i]),
                   self.outputEncoder.encode(targetPrediction[i]))

    if params['num_epochs'] > 1:
      trainer = RPropMinusTrainer(self.net, dataset=ds, verbose=verbose)

      if verbose:
        print " train LSTM on ", len(ds), " records for ", params['num_epochs'], " epochs "

      if len(networkInput) > 1:
        trainer.trainEpochs(params['num_epochs'])

    else:
      self.trainer.setData(ds)
      self.trainer.train()

    # run through the training dataset to get the lstm network state right
    self.net.reset()
    for i in xrange(len(networkInput)):
      self.net.activate(ds.getSample(i)[0])


  def iterate(self, params, repetition, iteration, verbose=True):
    self.iteration = iteration

    if self.iteration >= len(self.networkInput):
      return None

    train = False
    if iteration > params['compute_after']:
      if iteration == params['train_at_iteration']:
        train = True

      if params['train_every_month']:
        train = (self.dataset.sequence['time'][iteration].is_month_start and
                  self.dataset.sequence['time'][iteration].hour == 0 and
                  self.dataset.sequence['time'][iteration].minute == 0)

      if params['train_every_week']:
        train = (self.dataset.sequence['time'][iteration].dayofweek==0 and
                  self.dataset.sequence['time'][iteration].hour == 0 and
                  self.dataset.sequence['time'][iteration].minute == 0)

      if params['online_training']:
        train = True
    if verbose:
      print
      print "iteration: ", iteration, " time: ", self.dataset.sequence['time'][iteration]

    if train:
      if verbose:
        print " train at", iteration, " time: ", self.dataset.sequence['time'][iteration]
      self.train(params, verbose)

    if train:
      # reset test counter after training
      self.testCounter = params['test_for']

    if self.testCounter == 0:
      return None
    else:
      self.testCounter -= 1

    symbol = self.networkInput[iteration]
    output = self.net.activate(self.inputEncoder.encode(symbol))

    if params['output_encoding'] == None:
      predictions = self.dataset.reconstructSequence(output[0])
    elif params['output_encoding'] == 'likelihood':
      predictions = list(output/sum(output))
    else:
      predictions = None

    if verbose:
      print " test at :", iteration,

    if iteration == params['perturb_after']:
      if verbose:
        print " perturb data and introduce new patterns"

      (newNetworkInput, newTargetPrediction, newTrueData) = \
        self.dataset.generateSequence(perturbed=True,
                                      prediction_nstep=params['prediction_nstep'],
                                      output_encoding=params['output_encoding'],
                                      noise=params['noise'])

      self.networkInput[iteration+1:] = newNetworkInput[iteration+1:]
      self.targetPrediction[iteration+1:] = newTargetPrediction[iteration+1:]
      self.trueData[iteration+1:] = newTrueData[iteration+1:]

    return {"current": self.networkInput[iteration],
            "reset": None,
            "train": train,
            "predictions": predictions,
            "truth": self.trueData[iteration]}
Example #33
0
class Suite(PyExperimentSuite):
  def reset(self, params, repetition):
    random.seed(params['seed'])

    if params['encoding'] == 'basic':
      self.encoder = BasicEncoder(params['encoding_num'])
    elif params['encoding'] == 'distributed':
      self.encoder = DistributedEncoder(params['encoding_num'],
                                        maxValue=params['encoding_max'],
                                        minValue=params['encoding_min'],
                                        classifyWithRandom=params[
                                          'classify_with_random'])
    else:
      raise Exception("Encoder not found")

    if params['dataset'] == 'simple':
      self.dataset = SimpleDataset()
    elif params['dataset'] == 'reber':
      self.dataset = ReberDataset(maxLength=params['max_length'])
    elif params['dataset'] == 'high-order':
      self.dataset = HighOrderDataset(numPredictions=params['num_predictions'],
                                      seed=params['seed'])
    else:
      raise Exception("Dataset not found")

    self.computeCounter = 0

    self.history = []
    self.resets = []
    self.randoms = []

    self.currentSequence = []
    self.targetPrediction = []
    self.replenishSequence(params, iteration=0)

    self.net = buildNetwork(params['encoding_num'], params['num_cells'],
                            params['encoding_num'],
                            hiddenclass=LSTMLayer,
                            bias=True,
                            outputbias=params['output_bias'],
                            recurrent=True)

    self.trainer = BackpropTrainer(self.net,
                          dataset=SequentialDataSet(params['encoding_num'], params['encoding_num']),
                          learningrate=0.01,
                          momentum=0,
                          verbose=params['verbosity'] > 0)


    self.sequenceCounter = 0

  def window(self, data, params):
    start = max(0, len(data) - params['learning_window'])
    return data[start:]


  def train(self, params):
    """
    Train LSTM network on buffered dataset history
    After training, run LSTM on history[:-1] to get the state correct
    :param params:
    :return:
    """
    if params['reset_every_training']:
      n = params['encoding_num']
      self.net = buildNetwork(n, params['num_cells'], n,
                               hiddenclass=LSTMLayer,
                               bias=True,
                               outputbias=params['output_bias'],
                               recurrent=True)
      self.net.reset()

    # prepare training dataset
    ds = SequentialDataSet(params['encoding_num'], params['encoding_num'])
    history = self.window(self.history, params)
    resets = self.window(self.resets, params)

    for i in xrange(1, len(history)):
      if not resets[i - 1]:
        ds.addSample(self.encoder.encode(history[i - 1]),
                     self.encoder.encode(history[i]))
      if resets[i]:
        ds.newSequence()

    print "Train LSTM network on buffered dataset of length ", len(history)
    if params['num_epochs'] > 1:
      trainer = RPropMinusTrainer(self.net,
                                  dataset=ds,
                                  verbose=params['verbosity'] > 0)

      if len(history) > 1:
        trainer.trainEpochs(params['num_epochs'])

      # run network on buffered dataset after training to get the state right
      self.net.reset()
      for i in xrange(len(history) - 1):
        symbol = history[i]
        output = self.net.activate(self.encoder.encode(symbol))
        self.encoder.classify(output, num=params['num_predictions'])

        if resets[i]:
          self.net.reset()
    else:
      self.trainer.setData(ds)
      self.trainer.train()

      # run network on buffered dataset after training to get the state right
      self.net.reset()
      for i in xrange(len(history) - 1):
        symbol = history[i]
        output = self.net.activate(self.encoder.encode(symbol))
        self.encoder.classify(output, num=params['num_predictions'])

        if resets[i]:
          self.net.reset()


  def killCells(self, killCellPercent):
    """
    kill a fraction of LSTM cells from the network
    :param killCellPercent:
    :return:
    """
    if killCellPercent <= 0:
      return

    inputLayer = self.net['in']
    lstmLayer = self.net['hidden0']

    numLSTMCell = lstmLayer.outdim
    numDead = round(killCellPercent * numLSTMCell)
    zombiePermutation = numpy.random.permutation(numLSTMCell)
    deadCells = zombiePermutation[0:numDead]

    # remove connections from input layer to dead LSTM cells
    connectionInputToHidden = self.net.connections[inputLayer][0]
    weightInputToHidden = reshape(connectionInputToHidden.params,
                                  (connectionInputToHidden.outdim,
                                   connectionInputToHidden.indim))

    for cell in deadCells:
      for dim in range(4):
        weightInputToHidden[dim * numLSTMCell + cell, :] *= 0

    newParams = reshape(weightInputToHidden,
                        (connectionInputToHidden.paramdim,))
    self.net.connections[inputLayer][0]._setParameters(
      newParams, connectionInputToHidden.owner)

    # remove dead connections within LSTM layer
    connectionHiddenToHidden = self.net.recurrentConns[0]
    weightHiddenToHidden = reshape(connectionHiddenToHidden.params,
                                   (connectionHiddenToHidden.outdim,
                                    connectionHiddenToHidden.indim))

    for cell in deadCells:
      weightHiddenToHidden[:, cell] *= 0

    newParams = reshape(weightHiddenToHidden,
                        (connectionHiddenToHidden.paramdim,))
    self.net.recurrentConns[0]._setParameters(
      newParams, connectionHiddenToHidden.owner)

    # remove connections from dead LSTM cell to output layer
    connectionHiddenToOutput = self.net.connections[lstmLayer][0]
    weightHiddenToOutput = reshape(connectionHiddenToOutput.params,
                                   (connectionHiddenToOutput.outdim,
                                    connectionHiddenToOutput.indim))
    for cell in deadCells:
      weightHiddenToOutput[:, cell] *= 0

    newParams = reshape(weightHiddenToOutput,
                        (connectionHiddenToOutput.paramdim,))
    self.net.connections[lstmLayer][0]._setParameters(
      newParams, connectionHiddenToOutput.owner)


  def replenishSequence(self, params, iteration):
    if iteration > params['perturb_after']:
      sequence, target = self.dataset.generateSequence(params['seed']+iteration,
                                                       perturbed=True)
    else:
      sequence, target = self.dataset.generateSequence(params['seed']+iteration)

    if (iteration > params['inject_noise_after'] and
            iteration < params['stop_inject_noise_after']):
      injectNoiseAt = random.randint(1, 3)
      sequence[injectNoiseAt] = self.encoder.randomSymbol()

    if params['separate_sequences_with'] == 'random':
      sequence.append(self.encoder.randomSymbol(seed=params['seed']+iteration))
      target.append(None)

    if params['verbosity'] > 0:
      print "Add sequence to buffer"
      print "sequence: ", sequence
      print "target: ", target

    self.currentSequence += sequence
    self.targetPrediction += target


  def check_prediction(self, topPredictions, targets):
    if targets is None:
      correct = None
    else:
      if isinstance(targets, numbers.Number):
        correct = targets in topPredictions
      else:
        correct = True
        for prediction in topPredictions:
           correct = correct and (prediction in targets)
    return correct


  def iterate(self, params, repetition, iteration):
    currentElement = self.currentSequence.pop(0)
    target = self.targetPrediction.pop(0)

    # update buffered dataset
    self.history.append(currentElement)

    # whether there will be a reset signal after the current record
    resetFlag = (len(self.currentSequence) == 0 and
                 params['separate_sequences_with'] == 'reset')
    self.resets.append(resetFlag)

    # whether there will be a random symbol after the current record
    randomFlag = (len(self.currentSequence) == 1 and
                  params['separate_sequences_with'] == 'random')

    self.randoms.append(randomFlag)

    if len(self.currentSequence) == 0:
      self.replenishSequence(params, iteration)
      self.sequenceCounter += 1

    # kill cells
    killCell = False
    if iteration == params['kill_cell_after']:
      killCell = True
      self.killCells(params['kill_cell_percent'])

    # reset compute counter
    if iteration > 0 and iteration % params['compute_every'] == 0:
      self.computeCounter = params['compute_for']

    if self.computeCounter == 0 or iteration < params['compute_after']:
      computeLSTM = False
    else:
      computeLSTM = True

    if computeLSTM:
      self.computeCounter -= 1

      train = (not params['compute_test_mode'] or
               iteration % params['compute_every'] == 0)

      if train:
        if params['verbosity'] > 0:
          print "Training LSTM at iteration {}".format(iteration)

        self.train(params)

      # run LSTM on the latest data record

      output = self.net.activate(self.encoder.encode(currentElement))
      if params['encoding'] == 'distributed':
        predictions = self.encoder.classify(output, num=params['num_predictions'])
      elif params['encoding'] == 'basic':
        predictions = self.encoder.classify(output, num=params['num_predictions'])

      correct = self.check_prediction(predictions, target)

      if params['verbosity'] > 0:
        print ("iteration: {0} \t"
               "current: {1} \t"
               "predictions: {2} \t"
               "truth: {3} \t"
               "correct: {4} \t").format(
          iteration, currentElement, predictions, target, correct)

      if self.resets[-1]:
        if params['verbosity'] > 0:
          print "Reset LSTM at iteration {}".format(iteration)
        self.net.reset()

      return {"iteration": iteration,
              "current": currentElement,
              "reset": self.resets[-1],
              "random": self.randoms[-1],
              "train": train,
              "predictions": predictions,
              "truth": target,
              "killCell": killCell,
              "sequenceCounter": self.sequenceCounter}
Example #34
0
import csv
from numpy import *
from pybrain.datasets import SequentialDataSet,UnsupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised import BackpropTrainer

sequencia = []
NEURONIOS = 10e4

if __name__ == "__main__":

    sequencias = SequentialDataSet(1,1)

    for x in range(0,100):
        sequencia.append(x)

    for i,v in enumerate(sequencia):
        if i+1 < len(sequencia):
            sequencias.addSample(v, sequencia[i+1])

    print(sequencias)

    rn = buildNetwork(sequencias.indim, NEURONIOS, sequencias.outdim, recurrent=True)
    sorteio = BackpropTrainer(rn, sequencias, learningrate=1/(NEURONIOS/100))
    while 1:
        try:
            print(sorteio.train())
        except KeyboardInterrupt:
            sorteio.testOnData(verbose=True)
            break
Example #35
0
class Janela_NN_train(QtGui.QMainWindow):
    def __init__(self, parent=None):
        super(Janela_NN_train, self).__init__(parent)
        self.page_2 = Ui_page_arch_train()
        self.page_2.setupUi(self)

        self.move(QtGui.QDesktopWidget().availableGeometry().center().x() - self.geometry().width()/2,\
                  QtGui.QDesktopWidget().availableGeometry().center().y() - self.geometry().height()/2)

        self.page_2.pb_back.clicked.connect(self.Cancel)
        self.page_2.pb_next.clicked.connect(self.next)
        self.page_2.pb_cancel.clicked.connect(self.closeEvent)
        self.page_2.pb_train.clicked.connect(self.dataset_manipulation)
        self.page_2.pb_teste.clicked.connect(self.test_data)
        self.page_2.pb_default.clicked.connect(self.default)
        self.page_2.pb_plot.clicked.connect(self.plotting)
        self.page_2.pb_predict.clicked.connect(self.predicting)

    def dataset_manipulation(self):
        self.dataset = SupervisedDataSet(len(lib.entrada[0]),
                                         len(lib.saida[0]))

        ## Number of neurons in Hidden Layer
        nr_neurons = self.page_2.sb_nr_neurons.value()

        ## Number os epochs
        nr_epochs = self.page_2.sb_nr_epochs.value()

        ## Leaning rate:
        learn_rate = self.page_2.sb_rate.value()

        ## Momentum:
        momentum = self.page_2.sb_momentum.value()

        ## Adding Train Samples
        for i in range(lib.training):
            self.dataset.addSample(lib.entrada[i], lib.saida[i])
        print('Training: %d' % lib.training)

        ## Buid Network
        self.network = buildNetwork(self.dataset.indim,
                                    nr_neurons,
                                    self.dataset.outdim,
                                    bias=True)

        ## Back Propagation Trainer
        self.trainer = BackpropTrainer(self.network, self.dataset, learn_rate,
                                       momentum)

        self.page_2.count_1.setText(str(lib.training))
        self.page_2.count_2.setText(str(lib.validation))
        self.page_2.count_3.setText(str(lib.testing))
        QtGui.QApplication.processEvents()

        self.train_epochs(nr_epochs)

    def train_epochs(self, epochs):
        for epoch in range(epochs):
            self.trainer.train()
        self.page_2.pb_teste.setEnabled(True)

    def test_data(self):
        test_data = SupervisedDataSet(len(lib.entrada[0]), len(lib.saida[0]))
        for i in range(lib.training, (lib.validation + lib.testing)):
            self.dataset.addSample(lib.entrada[i], lib.saida[i])

        print('Testing: %d' % lib.testing)


##        self.trainer.testOnData(test_data, verbose=True)     Still not operating, investigate

    def predicting(self):
        try:
            self.K = self.page_2.sb_nr_neighbors.value()

            #k-Nearest Neighbors Regression
            self.knn = KNeighborsRegressor(n_neighbors=self.K)

            #Train machine until training range
            self.y_ = self.knn.fit(lib.entrada[:lib.training],
                                   lib.saida[:lib.training]).predict(
                                       lib.entrada[:lib.training])

    ##        self.y_ = self.knn.fit(lib.entrada,
    ##                               lib.saida).predict(lib.entrada)
        except:
            QtGui.QMessageBox.warning(
                self, 'Warning',
                'Can not to predict files. Please, try again.',
                QtGui.QMessageBox.Ok)
            QtGui.QApplication.processEvents()

    def plotting(self):
        plot.Capture(lib.entrada, lib.saida, lib.training, lib.validation,
                     lib.testing, self.y_)

        window = plot.Plot(self)
        window.show()

    def next(self):
        pass

    def Cancel(self):
        self.close()

    def closeEvent(self, event):
        self.deleteLater()

    def default(self):
        self.page_2.sb_nr_epochs.setValue(100)
        self.page_2.sb_rate.setValue(0.01)
        self.page_2.sb_momentum.setValue(0.99)
Example #36
0
rnn = buildNetwork(trndata.indim, hidden, trndata.outdim, hiddenclass=LSTMLayer, outclass=SigmoidLayer, recurrent=True)

#rnn.randomize()

#trainer = BackpropTrainer(rnn, dataset)

#for _ in range(100):
#    print trainer.train()
# define a training method
#trainer = RPropMinusTrainer( rnn, dataset=trndata, verbose=True )
# instead, you may also try
trainer = BackpropTrainer( rnn, dataset=trndata, verbose=True)

#carry out the training
for i in xrange(1000):
    #trainer.trainEpochs( 2)
    #trainer.trainOnDataset(trndata)
    #trnresult = 100. * (1.0-testOnSequenceData(rnn, trndata))
    #print trnresult
    #tstresult = 100. * (1.0-testOnSequenceData(rnn, tstdata))
    #print "train error: %5.2f%%" % trnresult, ",  test error: %5.2f%%" % tstresult
    trainer.train()
    #print "train error: %5.2f%%" % trnresult
# just for reference, plot the first 5 timeseries
trainer.testOnData(tstdata, verbose= True)
#plot(trndata['input'][0:50,:],'-o')
#old(True)
#plot(trndata['target'][0:50,:],'-o')
#show()
Example #37
0
        vetor[0], vetor[1], vetor[2], vetor[3], vetor[4], vetor[5], vetor[6],
        vetor[7]
    ], [vetor[8]])

network = None
rna = FeedFoward(network, 8, 16, 1)

trainer = BackpropTrainer(rna.network,
                          dataset,
                          verbose=True,
                          learningrate=0.01,
                          momentum=0.99)
start = timeit.default_timer()

for epoch in range(0, 100):  # treina por 1000 iterações para ajuste de pesos
    resultTrainer = trainer.train()

stop = timeit.default_timer()
print(resultTrainer)
rna.visualizaPesosSinapticos()
print("tempo de execução", stop - start)

##test_data = SupervisedDataSet(4, 1)
##test_data.addSample([1, 1, 0, 1], [0])
##test_data.addSample([1, 1, 0, 1], [0])
##test_data.addSample([0, 0, 1, 1], [0])
##test_data.addSample([0, 0, 1, 1], [0])

##result = trainer.testOnData(test_data, verbose=True)  # verbose=True indica que deve ser impressas mensagens
##erroMedio = result
##print ("erro medio encontrado no teste", erroMedio)
Example #38
0
from pybrain.datasets import SupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised import BackpropTrainer
import matplotlib.pyplot as plt

dataset = SupervisedDataSet(5, 1)
"""
2,58	200	   21	4	500	 2693,6
3,18	230 	16	3	600	 3492,8
1,65	211 	18	5	900	 2094,5
1,99	158	   19	6	380 1565,3
"""

dataset.addSample([0.000258, 0.2, 0.0021, 0.0004, 0.5], [2.694])
dataset.addSample([0.000318, 0.23, 0.0016, 0.0003, 0.6], [3.493])
dataset.addSample([0.000165, 0.211, 0.0018, 0.0005, 0.9], [2.095])
dataset.addSample([0.000199, 0.158, 0.0019, 0.0006, 0.38], [1.565])

network = buildNetwork(dataset.indim, 5, dataset.outdim, bias=False)

trainer = BackpropTrainer(network, dataset, learningrate=0.001, momentum=0.99)

erros = []
for epoch in range(0, 300):
    erros.append(trainer.train())
    print erros[-1]

plt.xlabel("Epocas")
plt.ylabel("Erros")
plt.plot(erros)
Example #39
0
class Suite(PyExperimentSuite):
    def reset(self, params, repetition):
        random.seed(params['seed'])

        if params['encoding'] == 'basic':
            self.encoder = BasicEncoder(params['encoding_num'])
        elif params['encoding'] == 'distributed':
            self.encoder = DistributedEncoder(
                params['encoding_num'],
                maxValue=params['encoding_max'],
                minValue=params['encoding_min'],
                classifyWithRandom=params['classify_with_random'])
        else:
            raise Exception("Encoder not found")

        if params['dataset'] == 'simple':
            self.dataset = SimpleDataset()
        elif params['dataset'] == 'reber':
            self.dataset = ReberDataset(maxLength=params['max_length'])
        elif params['dataset'] == 'high-order':
            self.dataset = HighOrderDataset(
                numPredictions=params['num_predictions'], seed=params['seed'])
        else:
            raise Exception("Dataset not found")

        self.computeCounter = 0

        self.history = []
        self.resets = []
        self.randoms = []

        self.currentSequence = []
        self.targetPrediction = []
        self.replenishSequence(params, iteration=0)

        self.net = buildNetwork(params['encoding_num'],
                                params['num_cells'],
                                params['encoding_num'],
                                hiddenclass=LSTMLayer,
                                bias=True,
                                outputbias=params['output_bias'],
                                recurrent=True)

        self.trainer = BackpropTrainer(self.net,
                                       dataset=SequentialDataSet(
                                           params['encoding_num'],
                                           params['encoding_num']),
                                       learningrate=0.01,
                                       momentum=0,
                                       verbose=params['verbosity'] > 0)

        self.sequenceCounter = 0

    def window(self, data, params):
        start = max(0, len(data) - params['learning_window'])
        return data[start:]

    def train(self, params):
        """
    Train LSTM network on buffered dataset history
    After training, run LSTM on history[:-1] to get the state correct
    :param params:
    :return:
    """
        if params['reset_every_training']:
            n = params['encoding_num']
            self.net = buildNetwork(n,
                                    params['num_cells'],
                                    n,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outputbias=params['output_bias'],
                                    recurrent=True)
            self.net.reset()

        # prepare training dataset
        ds = SequentialDataSet(params['encoding_num'], params['encoding_num'])
        history = self.window(self.history, params)
        resets = self.window(self.resets, params)

        for i in xrange(1, len(history)):
            if not resets[i - 1]:
                ds.addSample(self.encoder.encode(history[i - 1]),
                             self.encoder.encode(history[i]))
            if resets[i]:
                ds.newSequence()

        print "Train LSTM network on buffered dataset of length ", len(history)
        if params['num_epochs'] > 1:
            trainer = RPropMinusTrainer(self.net,
                                        dataset=ds,
                                        verbose=params['verbosity'] > 0)

            if len(history) > 1:
                trainer.trainEpochs(params['num_epochs'])

            # run network on buffered dataset after training to get the state right
            self.net.reset()
            for i in xrange(len(history) - 1):
                symbol = history[i]
                output = self.net.activate(self.encoder.encode(symbol))
                self.encoder.classify(output, num=params['num_predictions'])

                if resets[i]:
                    self.net.reset()
        else:
            self.trainer.setData(ds)
            self.trainer.train()

            # run network on buffered dataset after training to get the state right
            self.net.reset()
            for i in xrange(len(history) - 1):
                symbol = history[i]
                output = self.net.activate(self.encoder.encode(symbol))
                self.encoder.classify(output, num=params['num_predictions'])

                if resets[i]:
                    self.net.reset()

    def killCells(self, killCellPercent):
        """
    kill a fraction of LSTM cells from the network
    :param killCellPercent:
    :return:
    """
        if killCellPercent <= 0:
            return

        inputLayer = self.net['in']
        lstmLayer = self.net['hidden0']

        numLSTMCell = lstmLayer.outdim
        numDead = round(killCellPercent * numLSTMCell)
        zombiePermutation = numpy.random.permutation(numLSTMCell)
        deadCells = zombiePermutation[0:numDead]

        # remove connections from input layer to dead LSTM cells
        connectionInputToHidden = self.net.connections[inputLayer][0]
        weightInputToHidden = reshape(
            connectionInputToHidden.params,
            (connectionInputToHidden.outdim, connectionInputToHidden.indim))

        for cell in deadCells:
            for dim in range(4):
                weightInputToHidden[dim * numLSTMCell + cell, :] *= 0

        newParams = reshape(weightInputToHidden,
                            (connectionInputToHidden.paramdim, ))
        self.net.connections[inputLayer][0]._setParameters(
            newParams, connectionInputToHidden.owner)

        # remove dead connections within LSTM layer
        connectionHiddenToHidden = self.net.recurrentConns[0]
        weightHiddenToHidden = reshape(
            connectionHiddenToHidden.params,
            (connectionHiddenToHidden.outdim, connectionHiddenToHidden.indim))

        for cell in deadCells:
            weightHiddenToHidden[:, cell] *= 0

        newParams = reshape(weightHiddenToHidden,
                            (connectionHiddenToHidden.paramdim, ))
        self.net.recurrentConns[0]._setParameters(
            newParams, connectionHiddenToHidden.owner)

        # remove connections from dead LSTM cell to output layer
        connectionHiddenToOutput = self.net.connections[lstmLayer][0]
        weightHiddenToOutput = reshape(
            connectionHiddenToOutput.params,
            (connectionHiddenToOutput.outdim, connectionHiddenToOutput.indim))
        for cell in deadCells:
            weightHiddenToOutput[:, cell] *= 0

        newParams = reshape(weightHiddenToOutput,
                            (connectionHiddenToOutput.paramdim, ))
        self.net.connections[lstmLayer][0]._setParameters(
            newParams, connectionHiddenToOutput.owner)

    def replenishSequence(self, params, iteration):
        if iteration > params['perturb_after']:
            sequence, target = self.dataset.generateSequence(params['seed'] +
                                                             iteration,
                                                             perturbed=True)
        else:
            sequence, target = self.dataset.generateSequence(params['seed'] +
                                                             iteration)

        if (iteration > params['inject_noise_after']
                and iteration < params['stop_inject_noise_after']):
            injectNoiseAt = random.randint(1, 3)
            sequence[injectNoiseAt] = self.encoder.randomSymbol()

        if params['separate_sequences_with'] == 'random':
            sequence.append(
                self.encoder.randomSymbol(seed=params['seed'] + iteration))
            target.append(None)

        if params['verbosity'] > 0:
            print "Add sequence to buffer"
            print "sequence: ", sequence
            print "target: ", target

        self.currentSequence += sequence
        self.targetPrediction += target

    def check_prediction(self, topPredictions, targets):
        if targets is None:
            correct = None
        else:
            if isinstance(targets, numbers.Number):
                correct = targets in topPredictions
            else:
                correct = True
                for prediction in topPredictions:
                    correct = correct and (prediction in targets)
        return correct

    def iterate(self, params, repetition, iteration):
        currentElement = self.currentSequence.pop(0)
        target = self.targetPrediction.pop(0)

        # update buffered dataset
        self.history.append(currentElement)

        # whether there will be a reset signal after the current record
        resetFlag = (len(self.currentSequence) == 0
                     and params['separate_sequences_with'] == 'reset')
        self.resets.append(resetFlag)

        # whether there will be a random symbol after the current record
        randomFlag = (len(self.currentSequence) == 1
                      and params['separate_sequences_with'] == 'random')

        self.randoms.append(randomFlag)

        if len(self.currentSequence) == 0:
            self.replenishSequence(params, iteration)
            self.sequenceCounter += 1

        # kill cells
        killCell = False
        if iteration == params['kill_cell_after']:
            killCell = True
            self.killCells(params['kill_cell_percent'])

        # reset compute counter
        if iteration > 0 and iteration % params['compute_every'] == 0:
            self.computeCounter = params['compute_for']

        if self.computeCounter == 0 or iteration < params['compute_after']:
            computeLSTM = False
        else:
            computeLSTM = True

        if computeLSTM:
            self.computeCounter -= 1

            train = (not params['compute_test_mode']
                     or iteration % params['compute_every'] == 0)

            if train:
                if params['verbosity'] > 0:
                    print "Training LSTM at iteration {}".format(iteration)

                self.train(params)

            # run LSTM on the latest data record

            output = self.net.activate(self.encoder.encode(currentElement))
            if params['encoding'] == 'distributed':
                predictions = self.encoder.classify(
                    output, num=params['num_predictions'])
            elif params['encoding'] == 'basic':
                predictions = self.encoder.classify(
                    output, num=params['num_predictions'])

            correct = self.check_prediction(predictions, target)

            if params['verbosity'] > 0:
                print("iteration: {0} \t"
                      "current: {1} \t"
                      "predictions: {2} \t"
                      "truth: {3} \t"
                      "correct: {4} \t").format(iteration, currentElement,
                                                predictions, target, correct)

            if self.resets[-1]:
                if params['verbosity'] > 0:
                    print "Reset LSTM at iteration {}".format(iteration)
                self.net.reset()

            return {
                "iteration": iteration,
                "current": currentElement,
                "reset": self.resets[-1],
                "random": self.randoms[-1],
                "train": train,
                "predictions": predictions,
                "truth": target,
                "killCell": killCell,
                "sequenceCounter": self.sequenceCounter
            }
Example #40
0
dataset.outdim é o tamanho da camada de saída
iremos utilizar o "bias" para permitir uma melhor adaptação por parte da rede neural
ao conhecimento à ela fornecido
'''
network = buildNetwork(dataset.indim, 4, dataset.outdim, bias=True)
'''
O procedimento que iremos utilizar para treinar a rede é o backpropagation.
É pasada a rede, o conjunto de dados (dataset), "learningrate" é a taxa de aprendizado,
"momentum" tem por objetivo aumentar a velocidade de treinamento da rede neural e
diminuir o perigo da instabilidade.
'''
trainer = BackpropTrainer(network, dataset, learningrate=0.01, momentum=0.99)

# Logo em seguinda é feito de fato o treinamento da rede
for epoch in range(0, 1000):  # treina por 1000 épocas
    print trainer.train()
'''
Outras formas de treinar:

    trainer.trainEpochs(1000)
    treinar até a convergência: trainer.trainUntilConvergence()
'''
"""
# Agora iremos testar a rede com um conjunto de dados
test_data = SupervisedDataSet(2,1)
test_data.addSample([1,1],[0])
test_data.addSample([1,0],[1])
test_data.addSample([0,1],[1])
test_data.addSample([0,0],[0])
# verbose=True indica que deve ser impressas mensagens
trainer.testOnData(test_data, verbose=True)
Example #41
0
    (entradas[35:50], entradas[85:100], entradas[135:]))
saidas_teste = np.concatenate((saidas[35:50], saidas[85:100], saidas[135:]))
print(len(entradas_teste))
print(len(saidas_teste))
print('--------------------------')

from pybrain.datasets import SupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised import BackpropTrainer

treinamento = SupervisedDataSet(4, 1)
for i in range(len(entradas_treino)):
    treinamento.addSample(entradas_treino[i], saidas_treino[i])
print(len(treinamento))
print(treinamento.indim)
print(treinamento.outdim)
print('--------------------------')

# Construindo a rede
rede = buildNetwork(treinamento.indim, 2, treinamento.outdim, bias=True)
trainer = BackpropTrainer(rede, treinamento, learningrate=0.01, momentum=0.7)

# Treinando a rede
for epoca in range(1000):
    trainer.train()

# Testando a rede
teste = SupervisedDataSet(4, 1)
for i in range(len(entradas_teste)):
    teste.addSample(entradas_teste[i], saidas_teste[i])
trainer.testOnData(teste, verbose=True)
Example #42
0
def train(net, data_set):
    trainer = BackpropTrainer(net, data_set, learningrate=0.01, momentum=0.99, verbose=False)
    for epoch in xrange(0, 1000):
        trainer.train()