Exemplo n.º 1
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogANN(n)
    return n
Exemplo n.º 2
0
def training(d):
    # net = buildNetwork(d.indim, 55, d.outdim, bias=True,recurrent=False, hiddenclass =SigmoidLayer , outclass = SoftmaxLayer)
    net = FeedForwardNetwork()
    inLayer = SigmoidLayer(d.indim)
    hiddenLayer1 = SigmoidLayer(d.outdim)
    hiddenLayer2 = SigmoidLayer(d.outdim)
    outLayer = SigmoidLayer(d.outdim)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer1)
    net.addModule(hiddenLayer2)
    net.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    hidden_to_hidden = FullConnection(hiddenLayer1, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_hidden)
    net.addConnection(hidden_to_out)

    net.sortModules()
    print net

    t = BackpropTrainer(net, d, learningrate = 0.9,momentum=0.9, weightdecay=0.01, verbose = True)
    t.trainUntilConvergence(continueEpochs=1200, maxEpochs=1000)
    NetworkWriter.writeToFile(net, 'myNetwork'+str(time.time())+'.xml')
    return t
Exemplo n.º 3
0
  def train(self, params):
    """
    Train TDNN network on buffered dataset history
    :param params:
    :return:
    """
    # self.net = buildNetwork(params['encoding_num'] * params['num_lags'],
    #                         params['num_cells'],
    #                         params['encoding_num'],
    #                         bias=True,
    #                         outputbias=True)

    ds = SupervisedDataSet(params['encoding_num'] * params['num_lags'],
                           params['encoding_num'])
    history = self.window(self.history, params['learning_window'])

    n = params['encoding_num']
    for i in xrange(params['num_lags'], len(history)):
      targets = numpy.zeros((1, n))
      targets[0, :] = self.encoder.encode(history[i])

      features = numpy.zeros((1, n * params['num_lags']))
      for lags in xrange(params['num_lags']):
        features[0, lags * n:(lags + 1) * n] = self.encoder.encode(
          history[i - (lags + 1)])
      ds.addSample(features, targets)

    trainer = BackpropTrainer(self.net,
                              dataset=ds,
                              verbose=params['verbosity'] > 0)

    if len(history) > 1:
      trainer.trainEpochs(params['num_epochs'])
Exemplo n.º 4
0
def gradientCheck(module, tolerance=0.0001, dataset=None):
    """ check the gradient of a module with a randomly generated dataset,
    (and, in the case of a network, determine which modules contain incorrect derivatives). """
    if module.paramdim == 0:
        print('Module has no parameters')
        return True
    if dataset:
        d = dataset
    else:
        d = buildAppropriateDataset(module)
    b = BackpropTrainer(module)
    res = b._checkGradient(d, True)
    # compute average precision on every parameter
    precision = zeros(module.paramdim)
    for seqres in res:
        for i, p in enumerate(seqres):
            if p[0] == 0 and p[1] == 0:
                precision[i] = 0
            else:
                precision[i] += abs((p[0] + p[1]) / (p[0] - p[1]))
    precision /= len(res)
    if max(precision) < tolerance:
        print('Perfect gradient')
        return True
    else:
        print('Incorrect gradient', precision)
        if isinstance(module, Network):
            index = 0
            for m in module._containerIterator():
                if max(precision[index:index + m.paramdim]) > tolerance:
                    print('Incorrect module:', m, res[-1][index:index + m.paramdim])
                index += m.paramdim
        else:
            print(res)
        return False
Exemplo n.º 5
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break


    exportCatDogANN(n)
    return n
Exemplo n.º 6
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
Exemplo n.º 7
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Exemplo n.º 8
0
    def train(self, **kwargs):

        if "verbose" in kwargs:
            verbose = kwargs["verbose"]
        else:
            verbose = False

        """t = BackpropTrainer(self.rnn, dataset=self.trndata, learningrate = 0.1, momentum = 0.0, verbose = True)
        for i in range(1000):
            t.trainEpochs(5)

        """
       # pdb.set_trace()
        #print self.nn.outdim, " nn | ", self.trndata.outdim, " trndata "
        trainer = BackpropTrainer(self.nn, self.trndata, learningrate = 0.0005, momentum = 0.99)
        assert (self.tstdata is not None)
        assert (self.trndata is not None)
        b1, b2 = trainer.trainUntilConvergence(verbose=verbose,
                              trainingData=self.trndata,
                              validationData=self.tstdata,
                              maxEpochs=10)
        #print b1, b2
        #print "new parameters are: "
        #self.print_connections()

        return b1, b2
Exemplo n.º 9
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
Exemplo n.º 10
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Exemplo n.º 11
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
    def initializeNetwork(self):
        can1 = NNTrainData.NNTrainData(cv2.imread('NNTrain/can1.png'), self.encodingDict["can"])
        can2 = NNTrainData.NNTrainData(cv2.imread('NNTrain/can2.png'), self.encodingDict["can"])
        can3 = NNTrainData.NNTrainData(cv2.imread('NNTrain/can3.png'), self.encodingDict["can"])
        stain1 = NNTrainData.NNTrainData(cv2.imread('NNTrain/stain1.png'), self.encodingDict["stain"])
        stain2 = NNTrainData.NNTrainData(cv2.imread('NNTrain/stain2.png'), self.encodingDict["stain"])
        stain3 = NNTrainData.NNTrainData(cv2.imread('NNTrain/stain3.png'), self.encodingDict["stain"])
        dirt1 = NNTrainData.NNTrainData(cv2.imread('NNTrain/dirt1.png'), self.encodingDict["dirt"])
        dirt2 = NNTrainData.NNTrainData(cv2.imread('NNTrain/dirt2.png'), self.encodingDict["dirt"])
        dirt3 = NNTrainData.NNTrainData(cv2.imread('NNTrain/dirt3.png'), self.encodingDict["dirt"])

        self.trainData.append(can1)
        self.trainData.append(can2)
        self.trainData.append(can3)
        self.trainData.append(stain1)
        self.trainData.append(stain2)
        self.trainData.append(stain3)
        self.trainData.append(dirt1)
        self.trainData.append(dirt2)
        self.trainData.append(dirt3)

        for x in self.trainData:
            x.prepareTrainData()

        self.net = buildNetwork(4, 3, 3, hiddenclass=TanhLayer, outclass=SoftmaxLayer)
        ds = SupervisedDataSet(4, 3)

        for x in self.trainData:
            ds.addSample((x.contours/100.0, x.color[0]/1000.0, x.color[1]/1000.0, x.color[2]/1000.0), x.output)

        trainer = BackpropTrainer(self.net, momentum=0.1, verbose=True, weightdecay=0.01)
        trainer.trainOnDataset(ds, 1000)
        trainer.testOnData(verbose=True)
        print "\nSiec nauczona\n"
Exemplo n.º 13
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
Exemplo n.º 14
0
 def __init__(self, net, task, valueNetwork=None, **args):
     self.net = net
     self.task = task
     self.setArgs(**args)
     if self.valueLearningRate == None:
         self.valueLearningRate = self.learningRate
     if self.valueMomentum == None:
         self.valueMomentum = self.momentum        
     if self.supervisedPlotting:
         from pylab import ion
         ion() 
     
     # adaptive temperature:
     self.tau = 1.
     
     # prepare the datasets to be used
     self.weightedDs = ImportanceDataSet(self.task.outdim, self.task.indim)
     self.rawDs = ReinforcementDataSet(self.task.outdim, self.task.indim)
     self.valueDs = SequentialDataSet(self.task.outdim, 1)
     
     # prepare the supervised trainers
     self.bp = BackpropTrainer(self.net, self.weightedDs, self.learningRate,
                               self.momentum, verbose=False,
                               batchlearning=True)            
     
     # CHECKME: outsource
     self.vnet = valueNetwork
     if valueNetwork != None:
         self.vbp = BackpropTrainer(self.vnet, self.valueDs, self.valueLearningRate,
                                    self.valueMomentum, verbose=self.verbose)
         
     # keep information:
     self.totalSteps = 0
     self.totalEpisodes = 0
Exemplo n.º 15
0
def generate_and_test_nn():
    d = load_training_set()
    n = buildNetwork(d.indim, 13, d.outdim, hiddenclass=LSTMLayer, outclass=SoftmaxLayer, outputbias=False, recurrent=True)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0.99, verbose=True)
    t.trainOnDataset(d, 1000)
    t.testOnData(verbose=True)
    return (n, d)
Exemplo n.º 16
0
class EightBitBrain(object):
    
    def __init__(self, dataset, inNodes, outNodes, hiddenNodes, classes):
        self.__dataset = ClassificationDataSet(inNodes, classes-1)
        for element in dataset:
            self.addDatasetSample(self._binaryList(element[0]), element[1])
        self.__dataset._convertToOneOfMany()
        self.__network = buildNetwork(inNodes, hiddenNodes, self.__dataset.outdim, recurrent=True)
        self.__trainer = BackpropTrainer(self.__network, learningrate = 0.01, momentum = 0.99, verbose = True)
        self.__trainer.setData(self.__dataset)

    def _binaryList(self, n):
        return [int(c) for c in "{0:08b}".format(n)]
    
    def addDatasetSample(self, argument, target):
        self.__dataset.addSample(argument, target)

    def train(self, epochs):
        self.__trainer.trainEpochs(epochs)
    
    def activate(self, information):
        result = self.__network.activate(self._binaryList(information))
        highest = (0,0)
        for resultClass in range(len(result)):
            if result[resultClass] > highest[0]:
                highest = (result[resultClass], resultClass)
        return highest[1]
Exemplo n.º 17
0
def testOldTraining(hidden=15, n=None):
    d = XORDataSet()
    if n is None:
        n = buildNetwork(d.indim, hidden, d.outdim, recurrent=False)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0., verbose=False)
    t.trainOnDataset(d, 250)
    t.testOnData(verbose=True)
Exemplo n.º 18
0
    def execute(self):
        network = self.networkFactoryMethod()
        trainer = BackpropTrainer(network, learningrate = self.learningrate, momentum = self.momentum)
        trainer.trainOnDataset(self.datasetForTraining, self.epochs)
        averageError = trainer.testOnData(self.datasetForTest)
        self.collectedErrors.append(averageError)

        return averageError
Exemplo n.º 19
0
def main():
    print '----- loading train/test datasets -----'
    train_ds, test_ds = create_datasets()
    print '----- building the network -----'
    net = ann_network()
    trainer = BackpropTrainer(net, learningrate=0.1, momentum=0.1, verbose=True)
    print '----- training the model -----'
    trainer.trainOnDataset(train_ds)
Exemplo n.º 20
0
 def learn_until_convergence(self, learning_rate, momentum, max_epochs, continue_epochs, verbose=True):
     if verbose:
         print "Training neural network..."
     trainer = BackpropTrainer(self.network, self.learn_data, learningrate=learning_rate, momentum=momentum)
     training_errors, validation_errors = trainer.trainUntilConvergence(continueEpochs=continue_epochs,
                                                                        maxEpochs=max_epochs)
     self.x = range(1, len(training_errors) + 1)
     self.err = training_errors
     return self.network
Exemplo n.º 21
0
def training(d):
    """
    Builds a network and trains it.
    """
    n = buildNetwork(d.indim, 4, d.outdim,recurrent=True)
    t = BackpropTrainer(n, d, learningrate = 0.01, momentum = 0.99, verbose = True)
    for epoch in range(0,500):
        t.train()
    return t
Exemplo n.º 22
0
def main(f_samples):
    f_reading = open(f_samples, 'r')
    global data
    data = []

    for line in f_reading:
        line = line.split()
        data.append( (float(line[0]), float(line[-1])) )

    #function
    data_module = lambda x: map( lambda z: data[z], filter( lambda y: y% 5 == x, xrange(len(data)) ) )

    global data1
    data1 = [data_module(0), data_module(1), data_module(2), data_module(3), data_module(4)]

    global data_transformed
    data_transformed = take(data, rate = 60)

    global data_transformed_training
    data_transformed_training = map( lambda x: data_transformed[x], filter( lambda x: uniform(0, 1) > 0.3, xrange(len(data_transformed)) ))

    #Learning process-----------------------------------------------------------------

    global net, samples, trainer
    net = FeedForwardNetwork()
    inLayer = LinearLayer(3)
    hiddenLayer0 = SigmoidLayer(1)
    hiddenLayer1 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    net.addInputModule(inLayer)
#    net.addModule(hiddenLayer0)
#    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

#    net.addConnection(FullConnection(inLayer, hiddenLayer0))
    net.addConnection(FullConnection(inLayer, outLayer))
#    net.addConnection(FullConnection(hiddenLayer0, outLayer))
#    net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1))
#    net.addConnection(FullConnection(hiddenLayer1, outLayer))
    net.sortModules()
    print net
    ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out.
    #net = buildNetwork(3,8,8,1)
    ##Set with 2 inputs and one output for each sample
    samples = SupervisedDataSet(3,1)

    for i in data_transformed_training:
        samples.addSample(i['past'], i['next'] - i['average'])
    trainer = BackpropTrainer(net, samples)

    print 'Training'
    trainer.trainUntilConvergence(maxEpochs= 10)

    print 'Comparing'
    compare_net_samples(net, data_transformed)
    print "Number of samples %d for training." %len(data_transformed_training)
 def train(self, epochs=None):
     trainer = BackpropTrainer(
         self.net,
         self.training_data
     )
     if epochs:
         trainer.trainEpochs(epochs)
     else:
         trainer.trainUntilConvergence()
Exemplo n.º 24
0
def training(d):
    """
    Builds a network and trains it.
    """
    n = buildNetwork(d.indim, 4, d.outdim, recurrent=True)
    t = BackpropTrainer(n, d, learningrate=0.01, momentum=0.99, verbose=True)
    for epoch in range(0, 1000):
        t.train()
    return t
    def initializeNetwork(self):        
        self.net = buildNetwork(26, 15, 5, hiddenclass=TanhLayer, outclass=SoftmaxLayer) # 15 is just a mean
        ds = ClassificationDataSet(26, nb_classes=5)
        
        for x in self.train:
            ds.addSample(x.frequency, self.encodingDict[x.lang])
        ds._convertToOneOfMany()

        trainer = BackpropTrainer(self.net, dataset=ds, weightdecay=0.01, momentum=0.1, verbose=True)
        trainer.trainUntilConvergence(maxEpochs=100)
Exemplo n.º 26
0
def testTraining():
    d = PrimesDataSet()
    d._convertToOneOfMany()
    n = buildNetwork(d.indim, 8, d.outdim, recurrent=True)
    t = BackpropTrainer(n, learningrate = 0.01, momentum = 0.99, verbose = True)
    t.trainOnDataset(d, 1000)
    t.testOnData(verbose=True)
    for i in range(15):
        print "Guess: %s || Real: %s" % (str(n.activate(i)), str(i in d.generatePrimes(10)))
    print d
Exemplo n.º 27
0
 def train(self, data, iterations=NETWORK_ITERATIONS):
     for item in data:
         self.dataset.addSample(item[0], item[1])
     trainer = BackpropTrainer(self.network, self.dataset, learningrate=NETWORK_LEARNING_RATE,
                               momentum=NETWORK_MOMENTUM)
     error = 0
     for i in xrange(iterations):
         error = trainer.train()
         print (i + 1), error
     return error
Exemplo n.º 28
0
class PerceptronPyBrainFilter(LinearPerceptron): # PYBRAIN
    def __init__(self, *args, **kwargs):    
        super(PerceptronPyBrainFilter, self).__init__(*args, **kwargs)
        
        # input, hidden_layers, output
        self.perceptron = buildNetwork(self.num_last_measures, 0, 1, \
                                       hiddenclass=pybrain.structure.modules.SigmoidLayer, #@UndefinedVariable \
                                       outclass=pybrain.structure.modules.SigmoidLayer) #@UndefinedVariable
        
        # input dimension, target dimension
        self.pointer = 0
        self.data = SupervisedDataSet(self.num_last_measures, 1)
        for _i in xrange(self.dataset_size):
            self.data.addSample([0] * self.num_last_measures, 0)     
        self.trainer = BackpropTrainer(self.perceptron, self.data, learningrate=self.learning_rate)
        
        # This call does some internal initialization which is necessary before the net can finally
        # be used: for example, the modules are sorted topologically.
        self.perceptron.sortModules()
        

    def train(self):
        self.trainer.trainEpochs(1)
    
    
    def guess(self, x):
        return self.perceptron.activate(x)
    

    def apply(self, x):                
        if len(self.lag_buffer) < self.lag - 1:
            if len(self.last_measures) < self.num_last_measures:
                self.last_measures.append(x)
            else:
                self.lag_buffer.append(x)                  
            return x
        
        self.lag_buffer.append(x)
        #self.data.addSample(tuple(self.last_measures), self.lag_buffer[-1])
        self.data['input'][self.pointer] = np.array(self.last_measures)
                                                
        self.train()
        
        if len(self.data) == self.dataset_size:        
            #del self.data[0]
            #self.data.removeSample
            #self.data.removeSample
            pass
            
        del self.last_measures[0]
        self.last_measures.append(self.lag_buffer[0])
        
        del self.lag_buffer[0]
                        
        return self.guess(self.last_measures)
Exemplo n.º 29
0
def result(request, form):
    dataset = SupervisedDataSet(2, 1)
    dados = form.cleaned_data

    # Adiciona a tabela XOR
    dataset.addSample([0, 0], [0])
    dataset.addSample([0, 1], [1])
    dataset.addSample([1, 0], [1])
    dataset.addSample([1, 1], [0])

    if dados['bias'] is None:
        bias = False
    else:
        bias = True

    # dimensões de entrada e saida, argumento 2 é a quantidade de camadas intermediárias
    network = buildNetwork(dataset.indim, int(dados['num_camadas']), dataset.outdim, bias=bias)
    trainer = BackpropTrainer(network, dataset, learningrate=float(dados['learningrate']), momentum=float(dados['momentum']))

    pesos_iniciais = network.params

    network._setParameters(np.random.uniform(dados['peso_start'], dados['peso_end'], network.params.shape[0]))

    error = 1.00000000

    epocasPercorridas = 0

    errors = []
    it = []
    while epocasPercorridas < dados['epochs'] and error > dados['erro_max']:
        error = trainer.train()
        epocasPercorridas += 1
        errors.append(error)
        it.append(epocasPercorridas)
    graph = []
    idx = 0
    for e in errors:
        temp = []
        temp.append(idx)
        temp.append(e)
        idx +=1
        graph.append(temp)

    context = {'form': form.cleaned_data,
               'error': error,
               'graph': json.dumps(graph),
               'epocas': epocasPercorridas,
               'pesos_iniciais': pesos_iniciais,
               'pesos_finais': network.params,
               'result00': network.activate([0, 0])[0],
               'result01': network.activate([0, 1])[0],
               'result10': network.activate([1, 0])[0],
               'result11': network.activate([1, 1])[0]}

    return render(request, 'result.html', context)
Exemplo n.º 30
0
def neuralNetworkRegression(X,Y):
    print ("NEURAL NETWORK REGRESSION")
    print ("Executing...")

    X_train, X_test, Y_train, Y_test = cross_validation.train_test_split(X, Y, test_size = 0.10, random_state = 5)
    Y_test = Y_test.reshape(-1,1)
    Y_train = Y_train.reshape(-1,1)
    RMSEerror = []

    train = np.vstack((X_train, X_test))  # append both testing and training into one array
    outputTrain = np.vstack((Y_train, Y_test))
    outputTrain = outputTrain.reshape( -1, 1 )

    inputSize = train.shape[1]
    targetSize = outputTrain.shape[1]

    ds = SupervisedDataSet(inputSize, targetSize)
    ds.setField('input', train)
    ds.setField('target', outputTrain)

    hiddenSize = 100
    epochs = 100  # got after parameter tuning

    # neural network training model
    net = buildNetwork( inputSize, hiddenSize, targetSize, bias = True )
    trainer = BackpropTrainer(net, ds)

    # uncomment out to plot epoch vs rmse
    # takes time to execute as gets best epoch value
    # getting the best value of epochs

    print ("training for {} epochs...".format( epochs ))
    '''
    for i in range(epochs):
        print (i)
        mse = trainer.train()
        rmse = mse ** 0.5
        RMSEerror.append(rmse)

    plt.plot(range(epochs), RMSEerror)
    plt.xlabel("Epochs")
    plt.ylabel("RMSE")
    plt.title("RMSE vs Epochs")
    plt.savefig("../Graphs/Network/Question 2c/RMSE vs Epochs.png")

    plt.show()
    '''
    print ("Model training in process...")
    train_mse, validation_mse = trainer.trainUntilConvergence(verbose = True, validationProportion = 0.15, maxEpochs = epochs, continueEpochs = 10)
    p = net.activateOnDataset(ds)
    
    mse = mean_squared_error(outputTrain, p)
    rmse = mse ** 0.5

    print ("Root Mean Squared Error for Best Parameters : " + str(rmse))
    def reset(self, params, repetition):
        print params

        self.nDimInput = 1  #3
        self.inputEncoder = PassThroughEncoder()

        if params['output_encoding'] == None:
            self.outputEncoder = PassThroughEncoder()
            self.nDimOutput = 1
        elif params['output_encoding'] == 'likelihood':
            self.outputEncoder = ScalarBucketEncoder()
            self.nDimOutput = self.outputEncoder.encoder.n

        if (params['dataset'] == 'nyc_taxi'
                or params['dataset'] == 'nyc_taxi_perturb_baseline'):
            self.dataset = NYCTaxiDataset(params['dataset'])
        else:
            raise Exception("Dataset not found")

        self.testCounter = 0
        self.resets = []
        self.iteration = 0

        # initialize LSTM network
        random.seed(6)
        if params['output_encoding'] == None:
            self.net = buildNetwork(self.nDimInput,
                                    params['num_cells'],
                                    self.nDimOutput,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outputbias=True,
                                    recurrent=True)
        elif params['output_encoding'] == 'likelihood':
            self.net = buildNetwork(self.nDimInput,
                                    params['num_cells'],
                                    self.nDimOutput,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outclass=SigmoidLayer,
                                    recurrent=True)
        print self.net['out']
        print self.net['hidden0']
        self.trainer = BackpropTrainer(self.net,
                                       dataset=SequentialDataSet(
                                           self.nDimInput, self.nDimOutput),
                                       learningrate=0.01,
                                       momentum=0,
                                       verbose=params['verbosity'] > 0)

        (self.networkInput, self.targetPrediction, self.trueData) = \
          self.dataset.generateSequence(
          prediction_nstep=params['prediction_nstep'],
          output_encoding=params['output_encoding'],
          noise=params['noise'])
Exemplo n.º 32
0
Arquivo: pred.py Projeto: oreon/sfcode
def training(d):
    """
    Builds a network and trains it.
    """
    n = buildNetwork(d.indim, INPUTS-3,INPUTS-4, d.outdim,recurrent=True)
    print n;
    t = BackpropTrainer(n, d, learningrate = 0.02, momentum = 0.88)
    #for epoch in range(0,700):
    t.trainUntilConvergence(d, 1190)
    
    return t
Exemplo n.º 33
0
    def train_network(self, network, dataset):

        trainer = BackpropTrainer(network,
                                  dataset,
                                  learningrate=0.01,
                                  momentum=0.99,
                                  verbose=True)
        for epoch in range(0, 1000):
            trainer.train()

        return network
Exemplo n.º 34
0
class PerceptronPyBrainFilter(LinearPerceptron):  # PYBRAIN
    def __init__(self, *args, **kwargs):
        super(PerceptronPyBrainFilter, self).__init__(*args, **kwargs)

        # input, hidden_layers, output
        self.perceptron = buildNetwork(self.num_last_measures, 0, 1, \
                                       hiddenclass=pybrain.structure.modules.SigmoidLayer, #@UndefinedVariable \
                                       outclass=pybrain.structure.modules.SigmoidLayer) #@UndefinedVariable

        # input dimension, target dimension
        self.pointer = 0
        self.data = SupervisedDataSet(self.num_last_measures, 1)
        for _i in xrange(self.dataset_size):
            self.data.addSample([0] * self.num_last_measures, 0)
        self.trainer = BackpropTrainer(self.perceptron,
                                       self.data,
                                       learningrate=self.learning_rate)

        # This call does some internal initialization which is necessary before the net can finally
        # be used: for example, the modules are sorted topologically.
        self.perceptron.sortModules()

    def train(self):
        self.trainer.trainEpochs(1)

    def guess(self, x):
        return self.perceptron.activate(x)

    def apply(self, x):
        if len(self.lag_buffer) < self.lag - 1:
            if len(self.last_measures) < self.num_last_measures:
                self.last_measures.append(x)
            else:
                self.lag_buffer.append(x)
            return x

        self.lag_buffer.append(x)
        #self.data.addSample(tuple(self.last_measures), self.lag_buffer[-1])
        self.data['input'][self.pointer] = np.array(self.last_measures)

        self.train()

        if len(self.data) == self.dataset_size:
            #del self.data[0]
            #self.data.removeSample
            #self.data.removeSample
            pass

        del self.last_measures[0]
        self.last_measures.append(self.lag_buffer[0])

        del self.lag_buffer[0]

        return self.guess(self.last_measures)
Exemplo n.º 35
0
    def train(self, train_data_set, test_data_set, epoch=100):
        trainer = BackpropTrainer(self.network, train_data_set)

        progress_bar = ProgressBar(epoch)

        for i in range(epoch):
            progress_bar.update(i+1)
            time.sleep(0.01)
            trainer.train()

        return trainer.testOnData(test_data_set, verbose=True)
Exemplo n.º 36
0
    def trainNetwork(self, net, dataset):
        print("Started Training: " + strftime("%Y-%m-%d %H:%M:%S", gmtime()))

        t = BackpropTrainer(net,
                            dataset,
                            learningrate=0.01,
                            momentum=0,
                            verbose=False)
        t.trainEpochs(epochs=1)

        print("Finished Training: " + strftime("%Y-%m-%d %H:%M:%S", gmtime()))
        return t
Exemplo n.º 37
0
def train_network(d, iterations):
    print("Training")
    n = buildNetwork(d.indim, 4, d.outdim, bias=True)
    t = BackpropTrainer(
        n,
        d,
        learningrate=0.01,
        momentum=0.99,
        verbose=False)
    for epoch in range(iterations):
        t.train()
    return n
Exemplo n.º 38
0
def testTraining():
    print "Reading data"
    d = XORDataSet()
    traind,testd = d.splitWithProportion(0.8)
    print "Building network"
    n = buildNetwork(traind.indim, 4, traind.outdim, recurrent=True)
    print "Training"
    t = BackpropTrainer(n, learningrate = 0.01, momentum = 0.99, verbose = True)
    t.trainOnDataset(traind,100)
    testd = XORDataSet(begin=60000,end=80000)
    print t.module.params
    t.testOnData(testd,verbose= True)
Exemplo n.º 39
0
def _train(X, Y, filename, epochs=50):
    global nn
    nn = buildNetwork(INPUT_SIZE, HIDDEN_LAYERS, OUTPUT_LAYER, bias=True, outclass=SoftmaxLayer)
    ds = ClassificationDataSet(INPUT_SIZE, OUTPUT_LAYER)
    for x, y in zip(X, Y):
        ds.addSample(x, y)
    trainer = BackpropTrainer(nn, ds)
    for i in xrange(epochs):
        error = trainer.train()
        print "Epoch: %d, Error: %7.4f" % (i+1, error)
    # trainer.trainUntilConvergence(verbose=True, maxEpochs=epochs, continueEpochs=10)
    if filename:
        NetworkWriter.writeToFile(nn, 'data/' + filename + '.nn')
Exemplo n.º 40
0
    def reset(self, params, repetition):
        random.seed(params['seed'])

        if params['encoding'] == 'basic':
            self.encoder = BasicEncoder(params['encoding_num'])
        elif params['encoding'] == 'distributed':
            self.encoder = DistributedEncoder(
                params['encoding_num'],
                maxValue=params['encoding_max'],
                minValue=params['encoding_min'],
                classifyWithRandom=params['classify_with_random'])
        else:
            raise Exception("Encoder not found")

        if params['dataset'] == 'simple':
            self.dataset = SimpleDataset()
        elif params['dataset'] == 'reber':
            self.dataset = ReberDataset(maxLength=params['max_length'])
        elif params['dataset'] == 'high-order':
            self.dataset = HighOrderDataset(
                numPredictions=params['num_predictions'], seed=params['seed'])
        else:
            raise Exception("Dataset not found")

        self.computeCounter = 0

        self.history = []
        self.resets = []
        self.randoms = []

        self.currentSequence = []
        self.targetPrediction = []
        self.replenishSequence(params, iteration=0)

        self.net = buildNetwork(params['encoding_num'],
                                params['num_cells'],
                                params['encoding_num'],
                                hiddenclass=LSTMLayer,
                                bias=True,
                                outputbias=params['output_bias'],
                                recurrent=True)

        self.trainer = BackpropTrainer(self.net,
                                       dataset=SequentialDataSet(
                                           params['encoding_num'],
                                           params['encoding_num']),
                                       learningrate=0.01,
                                       momentum=0,
                                       verbose=params['verbosity'] > 0)

        self.sequenceCounter = 0
Exemplo n.º 41
0
def trainedLSTMNN():
    """
    n = RecurrentNetwork()

    inp = LinearLayer(100, name = 'input')
    hid = LSTMLayer(30, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid))
    n.addConnection(FullConnection(hid, out))

    n.addRecurrentConnection(FullConnection(hid, hid))
    n.sortModules()
    """
    n = buildNetwork(100,
                     50,
                     1,
                     hiddenclass=LSTMLayer,
                     outputbias=False,
                     recurrent=True)

    print "Network created"
    d = load1OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count, " error = ", globErr
        if globErr < 0.1:
            break
        count = count + 1
        # if (count == 60):
        #     break

    # for i in range(100):
    #     print t.train()

    exportANN(n)

    return n
Exemplo n.º 42
0
    def train(self):
        """t = BackpropTrainer(self.rnn, dataset=self.trndata, learningrate = 0.1, momentum = 0.0, verbose = True)
        for i in range(1000):
            t.trainEpochs(5)

        """
        print self.nn.outdim, " nn | ", self.trndata.outdim, " trndata "
        trainer = BackpropTrainer(self.nn, self.trndata, learningrate = 0.0005, momentum = 0.99)
        b1, b2 = trainer.trainUntilConvergence(verbose=True,
                              trainingData=self.trndata,
                              validationData=self.tstdata,
                              maxEpochs=10)
        print b1, b2
        print "new parameters are: "
        self.print_connections()
Exemplo n.º 43
0
def trained3ONN():
    n = FeedForwardNetwork()

    inp = LinearLayer(176850, name='input')
    hid = LinearLayer(3, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid, inSliceTo=100, outSliceTo=1))
    n.addConnection(
        FullConnection(inp,
                       hid,
                       inSliceFrom=100,
                       inSliceTo=5150,
                       outSliceFrom=1,
                       outSliceTo=2))
    n.addConnection(FullConnection(inp, hid, inSliceFrom=5150, outSliceFrom=2))
    n.addConnection(FullConnection(hid, out))

    n.sortModules()
    print "Network created"
    d = load3OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count, " error = ", globErr
        if globErr < 0.01:
            break
        count = count + 1
        # if (count == 100):
        #     break

    # for i in range(100):
    #     print t.train()

    exportANN(n)

    return n
Exemplo n.º 44
0
    def training(self,d):
        """
        Builds a network ,trains and returns it
        """

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(4) # 4 inputs
        hiddenLayer = SigmoidLayer(3) # 5 neurons on hidden layer with sigmoid function
        outLayer = LinearLayer(2) # 2 neuron as output layer


        "add layers to NN"
        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        "create connections"
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        "add connections"
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        "some unknown but necessary function :)"
        self.net.sortModules()

        print self.net

        "generate big sized training set"
        trainingSet = SupervisedDataSet(4,2)

        trainArr = self.generate_training_set()
        for ri in range(2000):
            input = ((trainArr[0][ri][0],trainArr[0][ri][1],trainArr[0][ri][2],trainArr[0][ri][3]))
            target = ((trainArr[1][ri][0],trainArr[1][ri][1]))
            trainingSet.addSample(input, target)

        "create backpropogation trainer"
        t = BackpropTrainer(self.net,d,learningrate=0.00001, momentum=0.99)
        while True:
            globErr = t.train()
            print "global error:", globErr
            if globErr < 0.0001:
                break

        return self.net
Exemplo n.º 45
0
 def build_net(self):
     if os.path.exists(self.NET_FILE):
         return NetworkReader.readFrom(self.NET_FILE)
     ds = ClassificationDataSet(len(feats), nb_classes=len(classes))
     for c in classes:
         print c
         with codecs.open(os.path.join(self.data_root, c+".txt"), 'r', 'utf8') as f:
             for line in f:
                 r = Record("11", line, c, "")
                 ds.appendLinked(r.features(), [r.class_idx()])
     ds._convertToOneOfMany([0, 1])
     net = buildNetwork(ds.indim, int((ds.indim + ds.outdim)/2), ds.outdim, bias=True, hiddenclass=TanhLayer, outclass=SoftmaxLayer)
     trainer = BackpropTrainer(net, ds, momentum=0.75, verbose=True)
     trainer.trainUntilConvergence(maxEpochs=300)
     NetworkWriter.writeToFile(net, self.NET_FILE)
     return net
Exemplo n.º 46
0
    def train(self, dataSet):
        """
        Builds a network and trains it.
        """
        if os.stat(self.predictor_path).st_size != 0:
            self.network = NetworkReader.readFrom(self.predictor_path)
        else:
            self.network = buildNetwork(dataSet.indim, 4, dataSet.outdim,recurrent=True)

        t = None


        if len(dataSet) > 0:
            t = BackpropTrainer(self.network, dataSet, learningrate = self.learningrate, momentum = self.momentum, verbose = False)
            for epoch in range(0, self.epochs):
                t.train()

        NetworkWriter.writeToFile(self.network, self.predictor_path)

        return t
Exemplo n.º 47
0
    def __init__(self, *args, **kwargs):
        super(PerceptronPyBrainFilter, self).__init__(*args, **kwargs)

        # input, hidden_layers, output
        self.perceptron = buildNetwork(self.num_last_measures, 0, 1, \
                                       hiddenclass=pybrain.structure.modules.SigmoidLayer, #@UndefinedVariable \
                                       outclass=pybrain.structure.modules.SigmoidLayer) #@UndefinedVariable

        # input dimension, target dimension
        self.pointer = 0
        self.data = SupervisedDataSet(self.num_last_measures, 1)
        for _i in xrange(self.dataset_size):
            self.data.addSample([0] * self.num_last_measures, 0)
        self.trainer = BackpropTrainer(self.perceptron,
                                       self.data,
                                       learningrate=self.learning_rate)

        # This call does some internal initialization which is necessary before the net can finally
        # be used: for example, the modules are sorted topologically.
        self.perceptron.sortModules()
Exemplo n.º 48
0
def testTraining():
    # the AnBnCn dataset (sequential)
    d = AnBnCnDataSet()

    # build a recurrent network to be trained
    hsize = 2
    n = RecurrentNetwork()
    n.addModule(TanhLayer(hsize, name='h'))
    n.addModule(BiasUnit(name='bias'))
    n.addOutputModule(LinearLayer(1, name='out'))
    n.addConnection(FullConnection(n['bias'], n['h']))
    n.addConnection(FullConnection(n['h'], n['out']))
    n.addRecurrentConnection(FullConnection(n['h'], n['h']))
    n.sortModules()

    # initialize the backprop trainer and train
    t = BackpropTrainer(n, learningrate=0.1, momentum=0.0, verbose=True)
    t.trainOnDataset(d, 200)

    # the resulting weights are in the network:
    print('Final weights:', n.params)
Exemplo n.º 49
0
def testOldTraining(hidden=15, n=None):
    d = XORDataSet()
    if n is None:
        n = buildNetwork(d.indim, hidden, d.outdim, recurrent=False)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0., verbose=False)
    t.trainOnDataset(d, 250)
    t.testOnData(verbose=True)
Exemplo n.º 50
0
    def dataset_manipulation(self):
        self.dataset = SupervisedDataSet(len(lib.entrada[0]),
                                         len(lib.saida[0]))

        ## Number of neurons in Hidden Layer
        nr_neurons = self.page_2.sb_nr_neurons.value()

        ## Number os epochs
        nr_epochs = self.page_2.sb_nr_epochs.value()

        ## Leaning rate:
        learn_rate = self.page_2.sb_rate.value()

        ## Momentum:
        momentum = self.page_2.sb_momentum.value()

        ## Adding Train Samples
        for i in range(lib.training):
            self.dataset.addSample(lib.entrada[i], lib.saida[i])
        print('Training: %d' % lib.training)

        ## Buid Network
        self.network = buildNetwork(self.dataset.indim,
                                    nr_neurons,
                                    self.dataset.outdim,
                                    bias=True)

        ## Back Propagation Trainer
        self.trainer = BackpropTrainer(self.network, self.dataset, learn_rate,
                                       momentum)

        self.page_2.count_1.setText(str(lib.training))
        self.page_2.count_2.setText(str(lib.validation))
        self.page_2.count_3.setText(str(lib.testing))
        QtGui.QApplication.processEvents()

        self.train_epochs(nr_epochs)
Exemplo n.º 51
0
def gradientCheck(module, tolerance=0.0001, dataset=None):
    """ check the gradient of a module with a randomly generated dataset,
    (and, in the case of a network, determine which modules contain incorrect derivatives). """
    if module.paramdim == 0:
        print('Module has no parameters')
        return True
    if dataset:
        d = dataset
    else:
        d = buildAppropriateDataset(module)
    b = BackpropTrainer(module)
    res = b._checkGradient(d, True)
    # compute average precision on every parameter
    precision = zeros(module.paramdim)
    for seqres in res:
        for i, p in enumerate(seqres):
            if p[0] == 0 and p[1] == 0:
                precision[i] = 0
            else:
                precision[i] += abs((p[0] + p[1]) / (p[0] - p[1]))
    precision /= len(res)
    if max(precision) < tolerance:
        print('Perfect gradient')
        return True
    else:
        print('Incorrect gradient', precision)
        if isinstance(module, Network):
            index = 0
            for m in module._containerIterator():
                if max(precision[index:index + m.paramdim]) > tolerance:
                    print('Incorrect module:', m,
                          res[-1][index:index + m.paramdim])
                index += m.paramdim
        else:
            print(res)
        return False
def get_third_nn(value, good_data, bad_data):
    build_network = FeedForwardNetwork()
    inLayer = LinearLayer(len(good_data[0]))
    hiddenLayer = SigmoidLayer(value)
    outLayer = SigmoidLayer(1)

    build_network.addInputModule(inLayer)
    build_network.addModule(hiddenLayer)
    build_network.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    in_to_out = FullConnection(inLayer, outLayer)

    build_network.addConnection(in_to_hidden)
    build_network.addConnection(hidden_to_out)
    build_network.addConnection(in_to_out)

    build_network.sortModules()
    trainer = BackpropTrainer(build_network,
                              get_supervised_data_set(good_data, bad_data))

    result = trainer.trainUntilConvergence()
    return result[0][-1]
Exemplo n.º 53
0
def generate_forecasters(data, dtt, alpha):
    #Learning process-----------------------------------------------------------------
    global net, samples, trainer
    net = FeedForwardNetwork()
    inLayer = LinearLayer(3)
    hiddenLayer0 = SigmoidLayer(1)
    hiddenLayer1 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer0)
    #    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

    #    net.addConnection(FullConnection(inLayer, hiddenLayer0))
    net.addConnection(FullConnection(inLayer, outLayer))
    #    net.addConnection(FullConnection(hiddenLayer0, outLayer))
    #    net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1))
    #    net.addConnection(FullConnection(hiddenLayer1, outLayer))
    net.sortModules()
    print net
    ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out.
    #net = buildNetwork(3,8,8,1)
    ##Set with 2 inputs and one output for each sample
    samples = SupervisedDataSet(3, 1)

    for i in dtt:
        samples.addSample(i['past'], i['next'] - i['average'])
    trainer = BackpropTrainer(net, samples)

    print 'Training'
    #trainer.trainUntilConvergence(maxEpochs= 1)

    #Making Forecasters---------------------------------------------------------------
    aux = map(lambda x: x[0], data)

    def exp(self, a, x):
        self.exp = a * data[aux.index(x) - 1][1] + (1 - a) * self.exp
        return self.exp

    naive = Forecaster(name='Naive',
                       predict_function=lambda x: data[aux.index(x) - 1][1])
    exponential = Forecaster(name='Exponential')
    exponential.exp = data[0][1]
    exponential.predict = lambda x: exp(exponential, alpha, x)
    network = Forecaster(name='Network', predict_function=net.activate)

    return naive, exponential, network
Exemplo n.º 54
0
def testTraining():
    ds = WebsiteFeaturesDataSet()
    net = buildNetwork(ds.indim, 4, ds.outdim, recurrent=True)
    trainer = BackpropTrainer(net,
                              learningrate=0.001,
                              momentum=0.99,
                              verbose=True)
    trainer.trainOnDataset(ds, 1000)
    trainer.testOnData(verbose=True)
    import pdb
    pdb.set_trace()
Exemplo n.º 55
0
def generate_and_test_nn():
    d = load_training_set()
    n = buildNetwork(d.indim,
                     13,
                     d.outdim,
                     hiddenclass=LSTMLayer,
                     outclass=SoftmaxLayer,
                     outputbias=False,
                     recurrent=True)
    t = BackpropTrainer(n, learningrate=0.01, momentum=0.99, verbose=True)
    t.trainOnDataset(d, 1000)
    t.testOnData(verbose=True)
    return (n, d)
Exemplo n.º 56
0
def training_and_testing():
    nn = init_neural_network()

    training = learning.get_labeled_data(
        '%strain-images-idx3-ubyte.gz' % (database_folder),
        '%strain-labels-idx1-ubyte.gz' % (database_folder),
        '%strainig' % (database_folder))
    test = learning.get_labeled_data(
        '%st10k-images-idx3-ubyte.gz' % (database_folder),
        '%st10k-labels-idx1-ubyte.gz' % (database_folder),
        '%stest' % (database_folder))

    FEATURES = N_INPUT_LAYER
    print("Caracteristicas a analizar: %i" % FEATURES)
    testdata = ClassificationDataSet(FEATURES, 1, nb_classes=OUTPUT_LAYER)
    trainingdata = ClassificationDataSet(FEATURES, 1, nb_classes=OUTPUT_LAYER)

    for i in range(len(test['data'])):
        testdata.addSample(test['data'][i], test['label'][i])
    for j in range(len(training['data'])):
        trainingdata.addSample(training['data'][j], training['label'][j])

    trainingdata._convertToOneOfMany()
    testdata._convertToOneOfMany()

    trainer = BackpropTrainer(nn,
                              dataset=trainingdata,
                              momentum=MOMENTUM,
                              verbose=True,
                              weightdecay=W_DECAY,
                              learningrate=L_RATE,
                              lrdecay=L_DECAY)

    for i in range(EPOCHS):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(),
                                 trainingdata['class'])
        tstresult = percentError(trainer.testOnClassData(dataset=testdata),
                                 testdata['class'])

        print("epoch: %4d" % trainer.totalepochs,
              "  train error: %5.2f%%" % trnresult,
              "  test error: %5.2f%%" % tstresult)
    return nn
def treinamento_Portas(list_Entrada_Saida, NumCamadasOcultas, taxa_aprendizado,
                       epochs):
    # adiciona-se as amostras
    d_in = 0
    d_out = 0
    for d in list_Entrada_Saida:
        d_in = len(d[0])
        d_out = len(d[1])

    dataset = SupervisedDataSet(d_in, d_out)
    for l in list_Entrada_Saida:
        entrada = l[0]
        saida = l[1]
        dataset.addSample(entrada, saida)

    # construindo a rede

    network = buildNetwork(
        dataset.indim,
        NumCamadasOcultas,
        dataset.outdim,
        bias=True,
        hiddenclass=SigmoidLayer,
        outclass=SigmoidLayer,
    )

    # utilizando o backpropagation
    trainer = BackpropTrainer(network, dataset, learningrate=taxa_aprendizado)

    # trainamento da rede
    for epocas in range(epochs):
        trainer.train()

    # teste da rede
    test_data = SupervisedDataSet(d_in, d_out)
    for l in list_Entrada_Saida:
        entrada = l[0]
        saida = l[1]
        test_data.addSample(entrada, saida)

    try:
        trainer.testOnData(test_data, verbose=True)
    except:
        pass
Exemplo n.º 58
0
def network(dataset, input_list):
    num_words = len(input_list)
    #dividing the dataset into training and testing data
    tstdata, trndata = dataset.splitWithProportion(0.25)

    #building the network
    net = RecurrentNetwork()
    input_layer1 = LinearLayer(num_words, name='input_layer1')
    input_layer2 = LinearLayer(num_words, name='input_layer2')
    hidden_layer = TanhLayer(num_words, name='hidden_layer')
    output_layer = SoftmaxLayer(num_words, name='output_layer')
    net.addInputModule(input_layer1)
    net.addInputModule(input_layer2)
    net.addModule(hidden_layer)
    net.addOutputModule(output_layer)
    net.addConnection(
        FullConnection(input_layer1, hidden_layer, name='in1_to_hidden'))
    net.addConnection(
        FullConnection(input_layer2, hidden_layer, name='in2_to_hidden'))
    net.addConnection(
        FullConnection(hidden_layer, output_layer, name='hidden_to_output'))
    net.addConnection(
        FullConnection(input_layer1, output_layer, name='in1_to_out'))
    net.addConnection(
        FullConnection(input_layer2, output_layer, name='in2_to_out'))
    net.sortModules()
    #backpropagation
    trainer = BackpropTrainer(net,
                              dataset=trndata,
                              momentum=0.1,
                              verbose=True,
                              weightdecay=0.01)
    #error checking part
    for i in range(10):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(), trndata['target'])
        tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
                                 tstdata['target'])
        print "epoch: %4d" % trainer.totalepochs
        print "  train error: %5.10f%%" % trnresult
        print "  test error: %5.10f%%" % tstresult
    return net