def main (  ) :

    # criando os dados para treino
    datasetTreino = montaDados ()

    # criando os dados para teste
    datasetTeste = montaDados()

    # definindo a estrutura de como será a rede neural
    # a entrada será a dimensão de entrada do dataset = 3
    # terá 6 neurônios na primeira camada intermediária
    # terá 6 neurônios na segunda camada escondida
    # terá como dimensão de saída o tamanho do dado de saída = 1
    # terá a função de autocorreção para melhor adaptação da rede
    network = buildNetwork( datasetTreino.indim, 12, 6, datasetTreino.outdim, bias=True )

    # criando a rede neural
    # terá como estrutura de rede neural definida no objeto network
    # utilizará os dados do dataset para treino
    neuralNetwork = BackpropTrainer ( network, datasetTreino, learningrate=0.01, momentum=0.9 )

    # treinando a rede
    neuralNetwork.trainEpochs ( 1500 )

    # validando a rede
    neuralNetwork.testOnData ( datasetTeste, verbose=True )
Esempio n. 2
0
def ANN(
    trainFeature, trainLabel, testFeature, testLabel, netStructure, para_rate, para_momentum
):  # netStructure is a list [in, hidden, out], momentum is a parameter in SGD
    sampleNum = trainFeature.shape[0]
    featureNum = trainFeature.shape[1]
    Dataset = SupervisedDataSet(featureNum, 1)
    i = 0
    while i < sampleNum:
        print(i)
        Dataset.addSample(list(trainFeature[i]), [trainLabel[i]])
        i += 1
    Network = buildNetwork(
        netStructure[0],
        netStructure[1],
        netStructure[2],
        netStructure[3],
        hiddenclass=SigmoidLayer,
        outclass=SigmoidLayer,
    )
    T = BackpropTrainer(Network, Dataset, learningrate=para_rate, momentum=para_momentum, verbose=True)
    # print(Dataset['input'])
    errorList = []
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    while abs(T.testOnData(Dataset) - errorList[-1]) > 0.0001:
        T.trainOnDataset(Dataset)
        errorList.append(T.testOnData(Dataset))
    pass  # this step is for the output of predictedLabel
    print(np.array([Network.activate(x) for x in trainFeature]))
    # print(testLabel)
    print(Network.activate([0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))
    return errorList
Esempio n. 3
0
def main():

    # criando o dataset, onde os dados de entrada no dataset será um vetor de tamanho 2
    # e o dado de saída será um escalar
    dataset = SupervisedDataSet(2, 1)

    criandoDataset(dataset)

    # criando a rede neural
    # onde terá, respectivamente, a quantidade de entrada na rede
    # quantidade de neurônios na camada intermediária
    # dimensão de saída da rede
    # utilizando uma adaptação da rede ao longo do tempo
    network = buildNetwork(dataset.indim, 4, dataset.outdim, bias=True)

    # criando o método de treino da rede
    # passando a rede
    # passando o dataset
    # passando a taxa de aprendizado
    # aumentando o cálculo que maximiza o treinamento da rede
    trainer = BackpropTrainer(network,
                              dataset,
                              learningrate=0.01,
                              momentum=0.99)

    # looping que faz o treino da  função
    for epocas in range(0, 1000):

        trainer.train()

    # realizando o teste
    datasetTeste = SupervisedDataSet(2, 1)
    criandoDataset(datasetTeste)
    trainer.testOnData(datasetTeste, verbose=True)
Esempio n. 4
0
def main():
    print "Calculating mfcc...."
    mfcc_coeff_vectors_dict = {}
    for i in range(1, 201):
        extractor = FeatureExtractor(
            '/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Happiness/HappinessAudios/' + str(i) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    for i in range(201, 401):
        extractor = FeatureExtractor(
            '/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Sadness/SadnessAudios/' + str(i - 200) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    audio_with_min_frames, min_frames = get_min_frames_audio(
        mfcc_coeff_vectors_dict)
    processed_mfcc_coeff = preprocess_input_vectors(
        mfcc_coeff_vectors_dict, min_frames)
    # frames = min_frames
    # print frames
    # print len(processed_mfcc_coeff['1'])
    # for each_vector in processed_mfcc_coeff['1']:
    #     print len(each_vector)
    print "mffcc found..."
    classes = ["happiness", "sadness"]

    training_data = ClassificationDataSet(
        26, target=1, nb_classes=2, class_labels=classes)
    # training_data = SupervisedDataSet(13, 1)
    try:
        network = NetworkReader.readFrom(
            'network_state_frame_level_new2_no_pp1.xml')
    except:
        for i in range(1, 51):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            for each_vector in mfcc_coeff_vectors:
                training_data.appendLinked(each_vector, [1])

        for i in range(201, 251):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            for each_vector in mfcc_coeff_vectors:
                training_data.appendLinked(each_vector, [0])

        training_data._convertToOneOfMany()
        print "prepared training data.."
        print training_data.indim, training_data.outdim
        network = buildNetwork(
            training_data.indim, 5, training_data.outdim, fast=True)
        trainer = BackpropTrainer(network, learningrate=0.01, momentum=0.99)
        print "Before training...", trainer.testOnData(training_data)
        trainer.trainOnDataset(training_data, 1000)
        print "After training...", trainer.testOnData(training_data)
        NetworkWriter.writeToFile(
            network, "network_state_frame_level_new2_no_pp.xml")
def main():
    print "Calculating mfcc...."
    mfcc_coeff_vectors_dict = {}
    for i in range(1, 201):
        extractor = FeatureExtractor('/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Happiness/HappinessAudios/' + str(i) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    for i in range(201, 401):
        extractor = FeatureExtractor('/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Sadness/SadnessAudios/' + str(i - 200) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    audio_with_min_frames, min_frames = get_min_frames_audio(mfcc_coeff_vectors_dict)
    processed_mfcc_coeff = preprocess_input_vectors(mfcc_coeff_vectors_dict, min_frames)
    frames = min_frames
    print "mfcc found...."
    classes = ["happiness", "sadness"]
    try:
        network = NetworkReader.readFrom('network_state_new_.xml')
    except:
        # Create new network and start Training
        training_data = ClassificationDataSet(frames * 26, target=1, nb_classes=2, class_labels=classes)
        # training_data = SupervisedDataSet(frames * 39, 1)
        for i in range(1, 151):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            training_data.appendLinked(mfcc_coeff_vectors.ravel(), [1])
            # training_data.addSample(mfcc_coeff_vectors.ravel(), [1])

        for i in range(201, 351):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            training_data.appendLinked(mfcc_coeff_vectors.ravel(), [0])
            # training_data.addSample(mfcc_coeff_vectors.ravel(), [0])

        training_data._convertToOneOfMany()
        network = buildNetwork(training_data.indim, 5, training_data.outdim)
        trainer = BackpropTrainer(network, learningrate=0.01, momentum=0.99)
        print "Before training...", trainer.testOnData(training_data)
        trainer.trainOnDataset(training_data, 1000)
        print "After training...", trainer.testOnData(training_data)
        NetworkWriter.writeToFile(network, "network_state_new_.xml")

    print "*" * 30 , "Happiness Detection", "*" * 30
    for i in range(151, 201):
        output = network.activate(processed_mfcc_coeff[str(i)].ravel())
        # print output,
        # if output > 0.7:
        #     print "happiness"
        class_index = max(xrange(len(output)), key=output.__getitem__)
        class_name = classes[class_index]
        print class_name
Esempio n. 6
0
def ANN(
    trainFeature, trainLabel, testFeature, testLabel, netStructure, para_rate,
    para_momentum
):  #netStructure is a list [in, hidden, out], momentum is a parameter in SGD
    sampleNum = trainFeature.shape[0]
    featureNum = trainFeature.shape[1]
    Dataset = SupervisedDataSet(featureNum, 1)
    i = 0
    while (i < sampleNum):
        print(i)
        Dataset.addSample(list(trainFeature[i]), [trainLabel[i]])
        i += 1
    Network = buildNetwork(netStructure[0],
                           netStructure[1],
                           netStructure[2],
                           netStructure[3],
                           hiddenclass=SigmoidLayer,
                           outclass=SigmoidLayer)
    T = BackpropTrainer(Network,
                        Dataset,
                        learningrate=para_rate,
                        momentum=para_momentum,
                        verbose=True)
    #print(Dataset['input'])
    errorList = []
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    while (abs(T.testOnData(Dataset) - errorList[-1]) > 0.0001):
        T.trainOnDataset(Dataset)
        errorList.append(T.testOnData(Dataset))
    pass  #this step is for the output of predictedLabel
    print(np.array([Network.activate(x) for x in trainFeature]))
    #print(testLabel)
    print(
        Network.activate([
            0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0, 0
        ]))
    return (errorList)
Esempio n. 7
0
    def train(network_file, input_length, output_length, training_data_file,
              learning_rate, momentum, stop_on_convergence, epochs, classify):
        n = get_network(network_file)
        if classify:
            ds = ClassificationDataSet(int(input_length),
                                       int(output_length) * 2)
            ds._convertToOneOfMany()
        else:
            ds = SupervisedDataSet(int(input_length), int(output_length))
        training_data = get_training_data(training_data_file)

        NetworkManager.last_training_set_length = 0
        for line in training_data:
            data = [float(x) for x in line.strip().split(',') if x != '']
            input_data = tuple(data[:(int(input_length))])
            output_data = tuple(data[(int(input_length)):])
            ds.addSample(input_data, output_data)
            NetworkManager.last_training_set_length += 1

        t = BackpropTrainer(n,
                            learningrate=learning_rate,
                            momentum=momentum,
                            verbose=True)
        print "training network " + network_storage_path + network_file

        if stop_on_convergence:
            t.trainUntilConvergence(ds, epochs)
        else:
            if classify:
                t.trainOnDataset(ds['class'], epochs)
            else:
                t.trainOnDataset(ds, epochs)

        error = t.testOnData()
        print "training done"
        if not math.isnan(error):
            save_network(n, network_file)
        else:
            print "error occured, network not saved"

        print "network saved"

        return error
Esempio n. 8
0
                                    verbose=True)

    print value_network

    cost_network = buildNetwork(1,
                                40,
                                20,
                                1,
                                hiddenclass=SigmoidLayer,
                                bias=True)
    cost_trainer = BackpropTrainer(cost_network,
                                   learningrate=0.01,
                                   momentum=0.00,
                                   verbose=True)

    print 'Value MSE before: %.4f' % value_trainer.testOnData(eval_dataset)
    value_trainer.trainUntilConvergence(dataset,
                                        continueEpochs=6,
                                        maxEpochs=500)
    #    value_trainer.trainOnDataset(dataset, 1000)
    print 'Value MSE after: %.4f' % value_trainer.testOnData(eval_dataset)

    print 'Cost MSE before: %.4f' % cost_trainer.testOnData(eval_costset)
    cost_trainer.trainUntilConvergence(costset,
                                       continueEpochs=6,
                                       maxEpochs=500)
    #    cost_trainer.trainOnDataset(costset, 1000)
    print 'Cost MSE after: %.4f' % cost_trainer.testOnData(eval_costset)
    #    print cost_network.params

    f_value = open('../data2d/valueplot2ddata.txt', 'w')
from pybrain.datasets.supervised import SupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer


dataset = SupervisedDataSet(2, 1)

dataset.addSample([1, 1], [0])
dataset.addSample([1, 0], [1])
dataset.addSample([0, 1], [1])
dataset.addSample([0, 0], [0])

network = buildNetwork(dataset.indim, 2, dataset.outdim, bias=True)

trainer = BackpropTrainer(network, dataset, learningrate=0.01, momentum=0.99)

for epoch in range(1000):
    trainer.train()

test_data = SupervisedDataSet(2, 1)
test_data.addSample([1, 1], [0])
test_data.addSample([1, 0], [1])
test_data.addSample([0, 1], [1])
test_data.addSample([0, 0], [0])

trainer.testOnData(test_data, verbose=True)
    for counter, item in enumerate(train_data):
        D.addSample(train_data[counter], answer[counter])

    #print D['target']

    #Create the NN
    N = buildNetwork(len(train_data[0]), 200, 1, bias=True)  #152 76=(152+1)/2

    #Train the NN with backpropagation
    T = BackpropTrainer(N, D, learningrate=0.1, momentum=0.9)

    i = 0
    error = []
    time_before = time.time()
    while i < 50 and T.testOnData(D) > 0.001:
        errordata = T.testOnData(D)

        if i % 1 == 0:
            print i, '\tMSE:', round(errordata, 6), '\tTime:', round(
                time.time() - time_before, 6)

        #Store the error in a list to plot
        error.append(errordata)

        T.train()
        i += 1

    #print 'It took ', time.time()-time_before, ' seconds to train the NN'

    #Display the error in a chart
Esempio n. 11
0
        D.addSample(train_data[counter], answer[counter])

    #print D['target']


    #Create the NN
    N = buildNetwork(len(train_data[0]),200,1, bias=True) #152 76=(152+1)/2


    #Train the NN with backpropagation
    T = BackpropTrainer(N, D, learningrate = 0.1, momentum = 0.9)

    i=0
    error = []
    time_before = time.time()
    while i < 50 and T.testOnData(D) > 0.001:
        errordata = T.testOnData(D)

        if i % 1 == 0:
            print i, '\tMSE:', round(errordata,6), '\tTime:', round(time.time()-time_before,6)

        #Store the error in a list to plot
        error.append(errordata)

        T.train()
        i += 1

    #print 'It took ', time.time()-time_before, ' seconds to train the NN'

    #Display the error in a chart
    plot(error)
Esempio n. 12
0
'''
Created on Nov 21, 2011

@author: reza
'''
from pybrain.datasets.supervised import SupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer

if __name__ == '__main__':
    dataset = SupervisedDataSet(2, 1)
    dataset.addSample([0, 0], [0])
    dataset.addSample([0, 1], [1])
    dataset.addSample([1, 0], [1])
    dataset.addSample([1, 1], [0])
    
    network = buildNetwork(2, 4, 1)
    trainer = BackpropTrainer(network, learningrate = 0.01, momentum = 0.2,
                              verbose = False)
    
    print 'MSE before', trainer.testOnData(dataset)
    trainer.trainOnDataset(dataset, 1000)
    print 'MSE after', trainer.testOnData(dataset)
    
    z = network.activate([0, 0])
    print z
    
    print 'Final Weights: ', network.params
    
    
Esempio n. 13
0
    #initialize dataset for neural network with 5 input + bias and 3 target
    DS = SupervisedDataSet(5, 1)

    #adding datasets to the network
    for i in range(0, len(normal_array[0])):
        #       DS.addSample([normal_array[0][i],normal_array[1][i],normal_array[2][i],normal_array[3][i],normal_array[4][i]],[normal_array[5][i],normal_array[6][i],normal_array[7][i]])
        DS.addSample([
            normal_array[0][i], normal_array[1][i], normal_array[2][i],
            normal_array[3][i], normal_array[4][i]
        ], [normal_array[5][i]])

#    NN = buildNetwork(5,4,3,bias =True,hiddenclass=TanhLayer)
    NN = buildNetwork(DS.indim, 5, DS.outdim, bias=True, hiddenclass=TanhLayer)
    TRAINER = BackpropTrainer(NN, dataset=DS, learningrate=0.01, momentum=0.99)

    print 'MSE before', TRAINER.testOnData(DS)
    TRAINER.trainOnDataset(DS, 500)
    print 'MSE after', TRAINER.testOnData(DS)

    # testing
    #clearing arrays
    normal_array = [[], [], [], [], [], [], [], []]
    normalized_input = [[], [], [], [], []]
    max_array = [[], [], [], [], [], [], [], []]
    min_array = [[], [], [], [], [], [], [], []]
    range_array = [[], [], [], [], [], [], [], []]
    x_axis = []
    pred_arr = []
    act_arr = []
    training_normalization(args.f2, args.min, args.max)
    for i in range(len(normal_array[0])):
Esempio n. 14
0
    prediction = []
   
    training_normalization(args.f1,args.min,args.max)
    #initialize dataset for neural network with 5 input + bias and 3 target 
    DS = SupervisedDataSet(5,1)

    #adding datasets to the network
    for i in range (0,len(normal_array[0])):
#       DS.addSample([normal_array[0][i],normal_array[1][i],normal_array[2][i],normal_array[3][i],normal_array[4][i]],[normal_array[5][i],normal_array[6][i],normal_array[7][i]])
       DS.addSample([normal_array[0][i],normal_array[1][i],normal_array[2][i],normal_array[3][i],normal_array[4][i]],[normal_array[5][i]])

#    NN = buildNetwork(5,4,3,bias =True,hiddenclass=TanhLayer)
    NN = buildNetwork(DS.indim,5,DS.outdim,bias = True,hiddenclass=TanhLayer)
    TRAINER = BackpropTrainer(NN,dataset=DS,learningrate = 0.01,momentum = 0.99)

    print 'MSE before',TRAINER.testOnData(DS)
    TRAINER.trainOnDataset(DS,500)
    print 'MSE after',TRAINER.testOnData(DS)

# testing 
#clearing arrays
    normal_array           = [[],[],[],[],[],[],[],[]]
    normalized_input  = [[],[],[],[],[]]
    max_array                 = [[],[],[],[],[],[],[],[]]
    min_array                  = [[],[],[],[],[],[],[],[]]   
    range_array              = [[],[],[],[],[],[],[],[]]
    x_axis                          = []
    pred_arr           = []
    act_arr                  = []
    training_normalization(args.f2,args.min,args.max)
    for i in range (len(normal_array[0])):
Esempio n. 15
0
class NeuralNetwork(object):
    '''Neural network wrapper for the pybrain implementation
    '''

    def __init__(self):
        self.path = os.path.dirname(os.path.abspath(__file__)) + "/../../data/"
        self.net = None
        self.trainer = None

    def createNew(self, nInputs, nHiddenLayers, nOutput, bias):
        '''builds a new neural network
        
        :param int nInputs: the number of input nodes
        :param int nHiddenLayers: the number of hidden layers
        :parma int nOutputs: the number of output nodes
        :param bool bias: if True an bias node will be added
        
        :return: instance of a new neural network
        :rtype: NeuralNetwork
        '''
        self.net = buildNetwork(nInputs, nHiddenLayers, nOutput, bias=bias, hiddenclass=TanhLayer)
        return self

    def train(self, dataset, maxEpochs = 10, learningrate = 0.01, momentum = 0.99, continueEpochs=10, validationProportion=0.25):
        '''trains a network with the given dataset
        
        :param SupervisedDataSet dataset: the training dataset
        :param int maxEpochs: max number of iterations to train the network
        :parma float learningrate: helps to 
        :param float momentum: helps out of local minima while training, to get better results
        '''
        self.trainer = BackpropTrainer(self.net, learningrate = learningrate, momentum = momentum)
        self.trainer.trainOnDataset(dataset, maxEpochs)

    def trainConvergence(self, dataset, maxEpochs = 10, learningrate = 0.01, momentum = 0.99, continueEpochs=10, validationProportion=0.25):
        '''trains a network with the given dataset nutil it converges
        
        :param SupervisedDataSet dataset: the training dataset
        :param int maxEpochs: max number of iterations to train the network
        :parma float learningrate: helps to 
        :param float momentum: helps out of local minima while training, to get better results
        '''
        self.trainer = BackpropTrainer(self.net, learningrate = learningrate, momentum = momentum)
        self.trainer.trainUntilConvergence(dataset, maxEpochs, False, continueEpochs, validationProportion)

    def test(self, data=None, verbose=False):
        if not self.trainer:
            raise ValueError("call train() first, to create a valid trainer object") 
        return self.trainer.testOnData(data, verbose)

    def activate(self, value, rnd=False):
        inpt = self.net.activate(value)[0]
        if rnd:
            return self._clazz(inpt)
        return inpt

    def _clazz(self, inpt):
        clazz = round(inpt)
        if (clazz < 0):
            return 0
        if (clazz > 1):
            return 1
        return int(clazz)

    def save(self, name):
        '''saves the neural network
        
        :param string name: filename for the to be saved network
        '''
        f = open(self.path + name + FILE_EXTENSION, 'w')
        pickle.dump(self.net, f)
        f.close()

    def load(self, name):
        '''loades the neural network
        
        :param string name: filename for the to be loaded network
        
        :return: instance of a saved neural network
        :rtype: NeuralNetwork
        '''
        f = open(self.path + name + FILE_EXTENSION, 'r')
        self.net = pickle.load(f)
        f.close()
        return self

    def __repr__(self):
        return "%s\n%s" % (self.__class__.__name__, str(self.net))
Esempio n. 16
0
def base_experiment():
    (eval_dataset, eval_costset) = DomainFnApprox.make_evaluation_datasets()

    random_train_dataset = SupervisedDataSet(2, 1)
    random_train_costset = SupervisedDataSet(2, 1)
    for i in range(RANDOM_TRAINING_SAMPLES):
        x = random.uniform(X_MIN, X_MAX)
        y = random.uniform(Y_MIN, Y_MAX)
        z = FN(x, y)
        z_cost = COST_FN(x, y)
        random_train_dataset.addSample([x, y], [z])
        random_train_costset.addSample([x, y], [z_cost])

    value_network = buildNetwork(2,
                                 80,
                                 20,
                                 1,
                                 hiddenclass=SigmoidLayer,
                                 bias=True)
    value_trainer = BackpropTrainer(value_network,
                                    learningrate=LEARNING_RATE,
                                    momentum=MOMENTUM,
                                    verbose=True)

    print 'Value Network Topology:'
    print value_network

    cost_network = buildNetwork(2,
                                80,
                                20,
                                1,
                                hiddenclass=SigmoidLayer,
                                bias=True)
    cost_trainer = BackpropTrainer(cost_network,
                                   learningrate=LEARNING_RATE,
                                   momentum=MOMENTUM,
                                   verbose=True)

    #    test_derivatives(value_network, [1, 1])
    #    test_derivatives(cost_network, [1, 1])

    print 'Value MSE before: %.4f' % value_trainer.testOnData(eval_dataset)
    value_trainer.trainUntilConvergence(random_train_dataset,
                                        continueEpochs=6,
                                        maxEpochs=MAX_EPOCHS)
    #    value_trainer.trainOnDataset(random_train_dataset, 1000)
    print 'Value MSE after: %.4f' % value_trainer.testOnData(eval_dataset)

    print 'Cost MSE before: %.4f' % cost_trainer.testOnData(eval_costset)
    cost_trainer.trainUntilConvergence(random_train_costset,
                                       continueEpochs=6,
                                       maxEpochs=MAX_EPOCHS)
    #    cost_trainer.trainOnDataset(random_train_costset, 1000)
    print 'Cost MSE after: %.4f' % cost_trainer.testOnData(eval_costset)

    #    test_derivatives(value_network, [1, 1])
    #    test_derivatives(cost_network, [1, 1])

    f_value = open('../data/learnedvalue.txt', 'w')
    f_cost = open('../data/learnedcost.txt', 'w')
    unit = (X_MAX - X_MIN) / (EVAL_SAMPLES_AXIS - 1)
    for i in range(EVAL_SAMPLES_AXIS):
        for j in range(EVAL_SAMPLES_AXIS):
            x = X_MIN + i * unit
            y = Y_MIN + j * unit
            z = value_network.activate([x, y])
            z_cost = cost_network.activate([x, y])
            f_value.write('%f %f %f\n' % (x, y, z[0]))
            f_cost.write('%f %f %f\n' % (x, y, z_cost[0]))
    f_value.close()
    f_cost.close()
class NeuralNetwork(object):
    '''Neural network wrapper for the pybrain implementation
    '''
    def __init__(self):
        self.path = os.path.dirname(
            os.path.abspath(__file__)) + "/../../../data/"
        self.net = None
        self.trainer = None

    def createNew(self, nInputs, nHiddenLayers, nOutput, bias):
        '''builds a new neural network
        
        :param int nInputs: the number of input nodes
        :param int nHiddenLayers: the number of hidden layers
        :parma int nOutputs: the number of output nodes
        :param bool bias: if True an bias node will be added
        
        :return: instance of a new neural network
        :rtype: NeuralNetwork
        '''
        self.net = buildNetwork(nInputs,
                                nHiddenLayers,
                                nOutput,
                                bias=bias,
                                hiddenclass=TanhLayer)
        return self

    def train(self,
              dataset,
              maxEpochs=10,
              learningrate=0.01,
              momentum=0.99,
              continueEpochs=10,
              validationProportion=0.25):
        '''trains a network with the given dataset
        
        :param SupervisedDataSet dataset: the training dataset
        :param int maxEpochs: max number of iterations to train the network
        :parma float learningrate: helps to 
        :param float momentum: helps out of local minima while training, to get better results
        '''
        self.trainer = BackpropTrainer(self.net,
                                       learningrate=learningrate,
                                       momentum=momentum)
        self.trainer.trainOnDataset(dataset, maxEpochs)

    def trainConvergence(self,
                         dataset,
                         maxEpochs=10,
                         learningrate=0.01,
                         momentum=0.99,
                         continueEpochs=10,
                         validationProportion=0.25):
        '''trains a network with the given dataset nutil it converges
        
        :param SupervisedDataSet dataset: the training dataset
        :param int maxEpochs: max number of iterations to train the network
        :parma float learningrate: helps to 
        :param float momentum: helps out of local minima while training, to get better results
        '''
        self.trainer = BackpropTrainer(self.net,
                                       learningrate=learningrate,
                                       momentum=momentum)
        self.trainer.trainUntilConvergence(dataset, maxEpochs, False,
                                           continueEpochs,
                                           validationProportion)

    def test(self, data=None, verbose=False):
        if not self.trainer:
            raise ValueError(
                "call train() first, to create a valid trainer object")
        return self.trainer.testOnData(data, verbose)

    def activate(self, value, rnd=False):
        inpt = self.net.activate(value)[0]
        if rnd:
            return self._clazz(inpt)
        return inpt

    def _clazz(self, inpt):
        clazz = round(inpt)
        if (clazz < 0):
            return 0
        if (clazz > 1):
            return 1
        return int(clazz)

    def _createFilePath(self, filePath, defaultPath=True):
        if defaultPath:
            return self.path + filePath + FILE_EXTENSION
        return filePath + FILE_EXTENSION

    def save(self, filePath, defaultPath=True):
        '''saves the neural network
        
        :param string filePath: filepath for the to be saved network
        '''
        with open(self._createFilePath(filePath, defaultPath), 'w') as f:
            pickle.dump(self.net, f)

    def load(self, filePath, defaultPath=True):
        '''loades the neural network
        
        :param string filePath: filepath for the to be loaded network
        
        :return: instance of a saved neural network
        :rtype: NeuralNetwork
        '''
        with open(self._createFilePath(filePath, defaultPath), 'r') as f:
            self.net = pickle.load(f)
            return self

    def __repr__(self):
        return "%s\n%s" % (self.__class__.__name__, str(self.net))
Esempio n. 18
0
class NeuralNetwork:
	""" Neural network class """
	def __init__(self, num_inputs, num_outputs, num_hidden_layers):	### Neural Network Variables ###
		self.num_inputs = num_inputs;				# Number of inputs
		self.num_outputs = num_outputs;				# Number of outputs
		self.num_hidden_layers = num_hidden_layers;		# Number of hidden layers
		self.dataset = None;					# Dataset object
		self.network = None;					# Network object
		self.backprop = None;					# Backpropogation object
		self.epochs = 0;					# Epochs
		self.RMSE = 0;						# RMSE
		self.training_time = 0;					# Calculated training time
		logger.info("New neural network object created.");

	def run(self, operator, epochs):
		logger.info("Starting up neural network for %s.", operator);
		self.epochs = epochs;
		self.buildDataset();
		self.buildNetwork();

		# XOR data
		if operator is "XOR":
			self.addData([0,0], [0]);
			self.addData([0,1], [1]);
			self.addData([1,0], [1]);
			self.addData([1,1], [0]);

		# NXOR data
		if operator is "NXOR":
			self.addData([0,0], [1]);
			self.addData([0,1], [0]);
			self.addData([1,0], [0]);
			self.addData([1,1], [1]);

		self.backProp();
		self.testData();
		self.trainData();
		self.testData();
		return ;

	def buildDataset(self):
		self.dataset = SupervisedDataSet(self.num_inputs, self.num_outputs);
		logger.info("Dataset object built.");
		return True;

	def addData(self, data_in, data_out):
		self.dataset.addSample(data_in, data_out);
		logger.info("Data appended.");
		return True;

	def buildNetwork(self):
		self.network = buildNetwork(self.num_inputs, self.num_hidden_layers, self.num_outputs);
		logger.info("Network created.");
		return True;

	def backProp(self):
		self.backprop = BackpropTrainer(self.network, learningrate = 0.01, momentum = 0.99);
		logger.info("Back propogation trainer created.");
		return True;

	def testData(self):
		self.RMSE = math.sqrt(self.backprop.testOnData(self.dataset));
		logger.info('RMSE: %s', str(self.RMSE));
		return True;

	def trainData(self):
		start = timeit.default_timer();
		self.backprop.trainOnDataset(self.dataset, self.epochs);
		stop = timeit.default_timer();
		self.training_time = stop - start;
		logger.info("Training time: %s", str(self.training_time));
		return True;
Esempio n. 19
0
__author__ = 'Stubborn'


from pybrain.datasets.supervised import SupervisedDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer

D = SupervisedDataSet(2, 1)
# 2 imput --> 1 output

D.addSample([0,0], [0])
D.addSample([0,1], [1])
D.addSample([1,0], [1])
D.addSample([1,1], [0])
# 4 kombinationer av input och dess output "OR" funktion

N = buildNetwork(2, 4, 1)
# multilayer perception? med 1 gomt lager

T = BackpropTrainer(N, learningrate = 0.01, momentum = 0.99)
# momentum = reduced learningrate

print (('MSE before'), T.testOnData(D))
T.trainOnDataset(D, 1000)
T.trainUntilConvergence()
print (('MSE after'), T.testOnData(D))
print D