예제 #1
0
def buildNN2HiddenLayer(trnData, netNo):
    from pybrain.structure import FeedForwardNetwork, RecurrentNetwork
    from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer
    from pybrain.structure import FullConnection

    n = FeedForwardNetwork()
    inLayer = LinearLayer(trnData.indim)  # Define Layer Types
    if netNo == 1 or netNo == 3:
        hiddenLayer0 = TanhLayer(hiddenLayer0neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer1neurons)  # Sigmoid
    elif netNo == 2:
        hiddenLayer0 = TanhLayer(hiddenLayer1neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer0neurons)  # Sigmoid

    outLayer = SoftmaxLayer(trnData.outdim)  # SoftmaxLayer

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer0)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden0 = FullConnection(inLayer, hiddenLayer0)  # Define connections
    hidden0_to_hidden1 = FullConnection(hiddenLayer0, hiddenLayer1)
    hidden1_to_out = FullConnection(hiddenLayer1, outLayer)
    n.addConnection(in_to_hidden0)
    n.addConnection(hidden0_to_hidden1)
    n.addConnection(hidden1_to_out)
    n.sortModules()
    return n
예제 #2
0
    def create(number_of_hidden_layers, activation_function, input_length,
               output_length, network_file, classify):
        n = FeedForwardNetwork()
        in_layer = LinearLayer(input_length)
        n.addInputModule(in_layer)

        layer_to_connect_to = in_layer
        for x in range(0, number_of_hidden_layers):
            if activation_function == 'sigmoid':
                hidden_layer = SigmoidLayer(input_length)
            else:
                hidden_layer = TanhLayer(input_length)

            n.addModule(hidden_layer)
            hidden_layer_connection = FullConnection(layer_to_connect_to,
                                                     hidden_layer)
            n.addConnection(hidden_layer_connection)
            layer_to_connect_to = hidden_layer

        if classify:
            out_layer = SoftmaxLayer(output_length)
        else:
            out_layer = LinearLayer(output_length)
        n.addOutputModule(out_layer)

        hidden_to_out = FullConnection(layer_to_connect_to, out_layer)
        n.addConnection(hidden_to_out)
        n.sortModules()
        save_network(n, network_file)
예제 #3
0
    def __init__(self, genes=None):

        self.net = FeedForwardNetwork()
        self.inLayer = TanhLayer(16)
        self.hiddenLayer = TanhLayer(20)
        self.hiddenLayer2 = TanhLayer(20)
        self.outLayer = SoftmaxLayer(4)

        self.net.addInputModule(self.inLayer)
        self.net.addModule(self.hiddenLayer)
        self.net.addModule(self.hiddenLayer2)
        self.net.addOutputModule(self.outLayer)

        self.in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer)
        self.hidden1_to_hidden2 = FullConnection(self.hiddenLayer, self.hiddenLayer2)
        self.hidden2_to_out = FullConnection(self.hiddenLayer2, self.outLayer)

        self.net.addConnection(self.in_to_hidden)
        self.net.addConnection(self.hidden1_to_hidden2)
        self.net.addConnection(self.hidden2_to_out)

        self.net.sortModules()

        # Set the params to the provided params
        if genes is not None:
            self.net._setParameters(genes)
예제 #4
0
파일: BrainTest.py 프로젝트: niekai1982/APC
def build_fnn():
    fnn = FeedForwardNetwork()
    inLayer = LinearLayer(2)
    hiddenLayer = TanhLayer(50)
    outLayer = SoftmaxLayer(2)
    fnn.addInputModule(inLayer)
    fnn.addModule(hiddenLayer)
    fnn.addOutputModule(outLayer)
    return fnn
예제 #5
0
def build_ann(indim, outdim):
    ann = FeedForwardNetwork()

    ann.addInputModule(LinearLayer(indim, name='in'))
    ann.addModule(SigmoidLayer(5, name='hidden'))
    #    ann.addModule(Normal(2,name='hidden'))
    ann.addOutputModule(SoftmaxLayer(outdim, name='out'))
    ann.addModule(BiasUnit(name='bias'))

    ann.addConnection(FullConnection(ann['in'], ann['hidden']))
    ann.addConnection(FullConnection(ann['hidden'], ann['out']))
    #    ann.addConnection(FullConnection(ann['in'], ann['out']))
    ann.addConnection(FullConnection(ann['bias'], ann['out']))
    ann.addConnection(FullConnection(ann['bias'], ann['hidden']))

    ann.sortModules()

    #    ann = buildNetwork(indim, outdim, outclass=SoftmaxLayer)
    return ann
예제 #6
0
    def PrepareModel(self, savedmodel=None):

        if savedmodel != None:
            self.trainer = savedmodel
        else:
            attributescount = len(self.traindata[0])
            nrclass = len(set(self.trainlabel))
            self.ds = ClassificationDataSet(attributescount,
                                            target=nrclass,
                                            nb_classes=nrclass,
                                            class_labels=list(
                                                set(self.trainlabel)))

            for i in range(len(self.traindata)):
                self.ds.appendLinked(self.traindata[i], [self.trainlabel[i]])
            self.ds._convertToOneOfMany()

            self.net = FeedForwardNetwork()
            inLayer = LinearLayer(len(self.traindata[0]))
            self.net.addInputModule(inLayer)
            hiddenLayers = []
            for i in range(self.hiddenlayerscount):
                hiddenLayer = SigmoidLayer(self.hiddenlayernodescount)
                hiddenLayers.append(hiddenLayer)
                self.net.addModule(hiddenLayer)
            outLayer = SoftmaxLayer(nrclass)
            self.net.addOutputModule(outLayer)

            layers_connections = []
            layers_connections.append(FullConnection(inLayer, hiddenLayers[0]))
            for i in range(self.hiddenlayerscount - 1):
                layers_connections.append(
                    FullConnection(hiddenLayers[i - 1], hiddenLayers[i]))
            layers_connections.append(
                FullConnection(hiddenLayers[-1], outLayer))

            for layers_connection in layers_connections:
                self.net.addConnection(layers_connection)
            self.net.sortModules()

            #training the network
            self.trainer = BackpropTrainer(self.net, self.ds)
            self.trainer.train()
예제 #7
0
def network(dataset, input_list):
    num_words = len(input_list)
    #dividing the dataset into training and testing data
    tstdata, trndata = dataset.splitWithProportion(0.25)

    #building the network
    net = RecurrentNetwork()
    input_layer1 = LinearLayer(num_words, name='input_layer1')
    input_layer2 = LinearLayer(num_words, name='input_layer2')
    hidden_layer = TanhLayer(num_words, name='hidden_layer')
    output_layer = SoftmaxLayer(num_words, name='output_layer')
    net.addInputModule(input_layer1)
    net.addInputModule(input_layer2)
    net.addModule(hidden_layer)
    net.addOutputModule(output_layer)
    net.addConnection(
        FullConnection(input_layer1, hidden_layer, name='in1_to_hidden'))
    net.addConnection(
        FullConnection(input_layer2, hidden_layer, name='in2_to_hidden'))
    net.addConnection(
        FullConnection(hidden_layer, output_layer, name='hidden_to_output'))
    net.addConnection(
        FullConnection(input_layer1, output_layer, name='in1_to_out'))
    net.addConnection(
        FullConnection(input_layer2, output_layer, name='in2_to_out'))
    net.sortModules()
    #backpropagation
    trainer = BackpropTrainer(net,
                              dataset=trndata,
                              momentum=0.1,
                              verbose=True,
                              weightdecay=0.01)
    #error checking part
    for i in range(10):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(), trndata['target'])
        tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
                                 tstdata['target'])
        print "epoch: %4d" % trainer.totalepochs
        print "  train error: %5.10f%%" % trnresult
        print "  test error: %5.10f%%" % tstresult
    return net
num_coeff = 26
max_freq = 8000
min_freq = 0
melArray = np.linspace(FEXT.freqToMel(min_freq), FEXT.freqToMel(max_freq),
                       num_coeff + 2)
ferqArray = FEXT.melToFreq(melArray)
freqArray_bin = np.floor(513 * ferqArray / 16000)
centralPoints = freqArray_bin[1:21]
freqbank = np.zeros((26, 257))

LSTMre = RecurrentNetwork()

LSTMre.addInputModule(LinearLayer(39, name='input'))
LSTMre.addModule(LSTMLayer(50, name='LSTM_hidden'))
LSTMre.addOutputModule(SoftmaxLayer(5, name='out'))
LSTMre.addConnection(
    FullConnection(LSTMre['input'], LSTMre['LSTM_hidden'], name='c1'))
LSTMre.addConnection(
    FullConnection(LSTMre['LSTM_hidden'], LSTMre['out'], name='c2'))
LSTMre.addRecurrentConnection(
    FullConnection(LSTMre['LSTM_hidden'], LSTMre['LSTM_hidden'], name='c3'))
LSTMre.sortModules()
ds = SupervisedDataSet(39, 5)

#ser.

for i in range(1, 27):
    start, center, stop = int(freqArray_bin[i - 1]), int(
        freqArray_bin[i]), int(freqArray_bin[i + 1])
    temp = np.zeros(257)
def ANN(X_train, Y_train, X_test, Y_test, *args):
    """
    An Artificial Neural Network, based on the python library pybrain. In the future this function
    should be modified to use the SkyNet ANN code instead.
    
    INPUTS:
    X_train - An array containing the features of the training set, of size (N_samples, N_features)
    Y_train - An array containing the class labels of the training set, of size (N_samples,)
    X_test - An array containing the features of the testeing set, of size (N_samples, N_features)
    Y_test - An array containing the class labels of the testing set, of size (N_samples)
    *args - Currently unused. In the future could specify the network architecture and activation
                functions at each node.
    
    OUTPUTS:
    probs - an array containing the probabilities for each class for each member of the testing set,
                of size (N_samples, N_classes)
    """
    
    Y_train_copy = Y_train.copy()
    Y_test_copy = Y_test.copy()

    #Convert class labels from 1,2,3 to 0,1,2 as _convertToOneOfMany requires this
    Y_train_copy[(Y_train_copy==1)]=0
    Y_train_copy[(Y_train_copy==2)]=1
    Y_train_copy[(Y_train_copy==3)]=2

    Y_test_copy[(Y_test_copy==1)]=0
    Y_test_copy[(Y_test_copy==2)]=1
    Y_test_copy[(Y_test_copy==3)]=2
    
    #Put all the data in datasets as required by pybrain
    Y_train_copy = np.expand_dims(Y_train_copy, axis=1)
    Y_test_copy = np.expand_dims(Y_test_copy, axis=1)
    traindata = ClassificationDataSet(X_train.shape[1], nb_classes = len(np.unique(Y_train_copy))) #Preallocate dataset
    traindata.setField('input', X_train) #Add named fields
    traindata.setField('target', Y_train_copy) 
    traindata._convertToOneOfMany() #Convert classes 0, 1, 2 to 001, 010, 100

    testdata = ClassificationDataSet(X_test.shape[1], nb_classes=len(np.unique(Y_test_copy)))
    testdata.setField('input', X_test)
    testdata.setField('target', Y_test_copy)
    testdata._convertToOneOfMany()

    #Create ANN with n_features inputs, n_classes outputs and HL_size nodes in hidden layers
    N = pb.FeedForwardNetwork()
    HL_size1 = X_train.shape[1]*2+2
    HL_size2 = X_train.shape[1]*2+2
    
    #Create layers and connections
    in_layer = LinearLayer(X_train.shape[1])
    hidden_layer1 = SigmoidLayer(HL_size1)
    hidden_layer2 = SigmoidLayer(HL_size2)
    out_layer = SoftmaxLayer(len(np.unique(Y_test_copy))) #Normalizes output so as to sum to 1

    in_to_hidden1 = FullConnection(in_layer, hidden_layer1)
    hidden1_to_hidden2 = FullConnection(hidden_layer1, hidden_layer2)
    hidden2_to_out = FullConnection(hidden_layer2, out_layer)

    #Connect them up
    N.addInputModule(in_layer)
    N.addModule(hidden_layer1)
    N.addModule(hidden_layer2)
    N.addOutputModule(out_layer)
    N.addConnection(in_to_hidden1)
    N.addConnection(hidden1_to_hidden2)
    N.addConnection(hidden2_to_out)

    N.sortModules()

    #Create the backpropagation object
    trainer = BackpropTrainer(N, dataset=traindata,  momentum=0.1, verbose=False, weightdecay=0.01)

    #Train the network on the data for some number of epochs
    for counter in np.arange(40):
        trainer.train()

    #Run the network on testing data
    probs = N.activate(X_test[0, :])
    probs = np.expand_dims(probs, axis=0)

    for counter in np.arange(X_test.shape[0]-1):
        next_probs = N.activate(X_test[counter+1, :])
        next_probs = np.expand_dims(next_probs, axis=0)
        probs = np.append(probs, next_probs, axis=0)
    
    return probs
예제 #10
0
conglomerateSet = []
conglomerateString = []

# Construct LSTM network
rnn = RecurrentNetwork()

inputSize = len(codeTable['a'].values)
outputSize = 4
hiddenSize = 10

rnn.addInputModule(LinearLayer(dim=inputSize, name='in'))
rnn.addModule(TanhLayer(dim=hiddenSize, name='in_proc'))
rnn.addModule(LSTMLayer(dim=hiddenSize, peepholes=True, name='hidden'))
rnn.addModule(TanhLayer(dim=hiddenSize, name='out_proc'))
rnn.addOutputModule(SoftmaxLayer(dim=outputSize, name='out'))

rnn.addConnection(FullConnection(rnn['in'], rnn['in_proc'], name='c1'))
rnn.addConnection(FullConnection(rnn['in_proc'], rnn['hidden'], name='c2'))
rnn.addRecurrentConnection(
    FullConnection(rnn['hidden'], rnn['hidden'], name='c3'))
rnn.addConnection(FullConnection(rnn['hidden'], rnn['out_proc'], name='c4'))
rnn.addConnection(FullConnection(rnn['out_proc'], rnn['out'], name='c5'))

rnn.sortModules()

# Construct dataset
trainingData = SequentialDataSet(inputSize, outputSize)

for index, row in df.iterrows():
    trainingData.newSequence()
예제 #11
0
파일: motko.py 프로젝트: Tiima/Motko
    def pybrain_init(self, input_amount=7, output_amount=8, hidden_layers=6):
        # TODO Randomize Hiden clcasses, ongoing...
        # because threading
        random.jumpahead(1252157)
        self.hiddenLayerAmount = random.randint(1, hidden_layers * 2)
        self.hiddenLayerNeuronsAmount = []
        # layerlist = [LinearLayer,SigmoidLayer,TanhLayer, GaussianLayer, SoftmaxLayer]  # for future use
        self.ds = SupervisedDataSet(input_amount, output_amount)
        self.nn = FeedForwardNetwork()
        self.inLayer = LinearLayer(input_amount, "in")
        # self.bias = BiasUnit(name="bias")
        if (random.randint(0, 100) >= 50):
            self.outLayer = LinearLayer(
                output_amount, "out")  # could be lineare layer or softmax???
        else:
            self.outLayer = SoftmaxLayer(
                output_amount, "out")  # could be lineare layer or softmax???
        self.hiddenlayers = []
        self.connections = []
        self.nn.addInputModule(self.inLayer)
        self.nn.addOutputModule(self.outLayer)
        # self.nn.addModule(self.bias)
        # self.nn.addConnection(FullConnection(self.inLayer, self.bias))

        for i in range(
                self.hiddenLayerAmount
        ):  # example math.random(hidden_layers, hidden_layers*10)???
            self.hiddenLayerNeuronsAmount.append(
                random.randint(1, hidden_layers * 2))
            if (random.randint(0, 100) >= 50):
                self.hiddenlayers.append(
                    TanhLayer(self.hiddenLayerNeuronsAmount[i],
                              "hidden{}".format(i))
                )  # tanh or  sigmoid ??? and how many neurons ? now it is hidden_layers amount
            else:
                self.hiddenlayers.append(
                    SigmoidLayer(self.hiddenLayerNeuronsAmount[i],
                                 "hidden{}".format(i))
                )  # tanh or  sigmoid ??? and how many neurons ? now it is hidden_layers amount

            if (i == 0):
                self.connections.append(
                    FullConnection(self.inLayer,
                                   self.hiddenlayers[i - 1],
                                   name="in_to_hid"))
            else:
                self.connections.append(
                    FullConnection(self.hiddenlayers[i - 1],
                                   self.hiddenlayers[i],
                                   name="hid{}_to_hid{}".format(i - 1, i)))
            self.nn.addModule(self.hiddenlayers[i])

        self.connections.append(
            FullConnection(self.hiddenlayers[len(self.hiddenlayers) - 1],
                           self.outLayer,
                           name="hid_to_out"))

        for i in range(len(self.connections)):
            self.nn.addConnection(self.connections[i])

        self.nn.sortModules()
예제 #12
0
from pybrain.structure import RecurrentNetwork
from pybrain.structure import LSTMLayer, LinearLayer, SoftmaxLayer
from pybrain.structure import FullConnection
from vectorizer_engine import VectorizerEngine

# Initialize vector engine
vec_engine = VectorizerEngine()

# Initialize a recurrent network
# Input layer will be linear layer of dimension equal to our vectorizer_engine's dimension
# Output layer will be a softmax layer of dimension equal to our vectorizer_engine's dimension
# Hidden layer is LSTM layer given a dimension of an arbitary number, say 5.
net = RecurrentNetwork()
in_layer = LinearLayer(vec_engine.word_vec_dim, name="input_layer")
hidden_layer = LSTMLayer(5, name="hidden_layer")
out_layer = SoftmaxLayer(vec_engine.word_vec_dim, name="out_layer")

# Connecting between layers. And a special connection from out to hidden, that is the recurrent connection
conn_in_to_hid = FullConnection(in_layer, hidden_layer, name="in_to_hidden")
conn_hid_to_out = FullConnection(hidden_layer, out_layer, name="hidden_to_out")
recurrent_connection = FullConnection(hidden_layer,
                                      hidden_layer,
                                      name="recurrent")

# Putting everything together.
net.addInputModule(in_layer)
net.addModule(hidden_layer)
net.addOutputModule(out_layer)

net.addConnection(conn_in_to_hid)
net.addConnection(conn_hid_to_out)
예제 #13
0
def exec_algo(xml_file, output_location):
    rootObj = ml.parse(xml_file)
    file_name = rootObj.MachineLearning.prediction.datafile
    file = open(file_name)
    var_input = rootObj.MachineLearning.prediction.input
    var_output = rootObj.MachineLearning.prediction.output
    var_classes = rootObj.MachineLearning.prediction.classes

    DS = ClassificationDataSet(var_input, var_output, nb_classes=var_classes)
    #DS1=ClassificationDataSet(13,1,nb_classes=10)

    for line in file.readlines():
        data = [float(x) for x in line.strip().split(',') if x != '']
        inp = tuple(data[:var_input])
        output = tuple(data[var_input:])
        DS.addSample(inp, output)

    tstdata, trndata = DS.splitWithProportion(0)
    #trndatatest,tstdatatest=DS1.splitWithProportion(0)

    trdata = ClassificationDataSet(trndata.indim, 1, nb_classes=10)
    #tsdata=ClassificationDataSet(DS1.indim,1,nb_classes=10)
    #tsdata1=ClassificationDataSet(DS1.indim,1,nb_classes=10)

    for i in xrange(trndata.getLength()):
        if (trndata.getSample(i)[1][0] != 100):
            trdata.addSample(trndata.getSample(i)[0], trndata.getSample(i)[1])

    trdata._convertToOneOfMany()
    #tsdata._convertToOneOfMany()
    #tsdata1._convertToOneOfMany()
    print "%d" % (trdata.getLength())

    rnn = RecurrentNetwork()
    inputLayer = LinearLayer(trdata.indim)

    hiddenLayer = rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.hiddenLayerActivation
    hiddenNeurons = rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.hiddenNeurons

    if hiddenLayer == 'Sigmoid':
        hiddenLayer = SigmoidLayer(hiddenNeurons)
    elif hiddenLayer == 'Softmax':
        hiddenLayer = SoftmaxLayer(hiddenNeurons)
    else:
        hiddenLayer = LinearLayer(hiddenNeurons)

    outputLayer = rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.outputLayerActivation

    if outputLayer == 'Sigmoid':
        outputLayer = SigmoidLayer(trdata.outdim)
    elif outputLayer == 'Softmax':
        outputLayer = SoftmaxLayer(trdata.outdim)
    else:
        outputLayer = LinearLayer(trdata.outdim)

    rnn.addInputModule(inputLayer)
    rnn.addModule(hiddenLayer)
    rnn.addOutputModule(outputLayer)
    rnn_type = rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.RNN_Type
    in_to_hidden = FullConnection(inputLayer, hiddenLayer)
    hidden_to_outputLayer = FullConnection(hiddenLayer, outputLayer)
    rnn.addConnection(in_to_hidden)
    rnn.addConnection(hidden_to_outputLayer)

    if rnn_type == 'Elman':
        hidden_to_hidden = FullConnection(hiddenLayer, hiddenLayer, name='c3')
        rnn.addRecurrentConnection(hidden_to_hidden)
    #hidden_to_hidden=FullConnection(hiddenLayer,hiddenLayer, name='c3')

    if rnn_type == 'Jordan':
        output_to_hidden = FullConnection(outputLayer, hiddenLayer, name='c3')
        rnn.addRecurrentConnection(output_to_hidden)

    #rnn.addRecurrentConnection(hidden_to_hidden)
    momentum = rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.momentum
    weightdecay = rootObj.MachineLearning.prediction.algorithm.RecurrentNeuralNetwork.learningRate
    rnn.sortModules()
    trainer = BackpropTrainer(rnn,
                              dataset=trdata,
                              momentum=0.1,
                              verbose=True,
                              weightdecay=0.01)
    trainer.train()
    result = (percentError(trainer.testOnClassData(dataset=trdata),
                           trdata['class']))
    #result1=percentError(trainer.testOnClassData(dataset=tsdata1),tsdata1['class'])

    print('%f \n') % (100 - result)
    #print ('%f \n') % (100-result1)

    ts = time.time()
    directory = output_location + sep + str(int(ts))
    makedirs(directory)
    fileObject = open(
        output_location + sep + str(int(ts)) + sep + 'pybrain_RNN', 'w')
    pickle.dump(trainer, fileObject)
    pickle.dump(rnn, fileObject)
    fileObject.close()
예제 #14
0
def exec_algo(xml_file, output_location):
    rootObj = ml.parse(xml_file)

    #Getting the root element so that we get the subclasses and its members and member function
    xmlParamDetails = rootObj.MachineLearning.classification

    #Gather param values from the XML parsed object
    file = open(xmlParamDetails.datafile)
    var_inp = xmlParamDetails.input
    var_out = xmlParamDetails.output
    classes = xmlParamDetails.classes
    split = xmlParamDetails.split
    learningrate = xmlParamDetails.algorithm.MultiLayerPerceptron.learningRate
    momentum = xmlParamDetails.algorithm.MultiLayerPerceptron.momentum
    epochs = xmlParamDetails.algorithm.MultiLayerPerceptron.epochs
    hiddenNeurons = int(
        xmlParamDetails.algorithm.MultiLayerPerceptron.hiddenLayers)
    hiddenLayer = xmlParamDetails.algorithm.MultiLayerPerceptron.hiddenLayerActivation
    outputLayer = xmlParamDetails.algorithm.MultiLayerPerceptron.outputLayerActivation
    delimiter = xmlParamDetails.delimiter

    DS = ClassificationDataSet(var_inp, var_out, nb_classes=classes)

    for line in file.readlines():
        data = [float(x) for x in line.strip().split(',') if x != '']
        inp = tuple(data[:var_inp])
        output = tuple(data[var_inp:])
        DS.addSample(inp, output)

    tstdata, trndata = DS.splitWithProportion(split)
    trdata = ClassificationDataSet(trndata.indim, var_out, nb_classes=classes)
    tsdata = ClassificationDataSet(tstdata.indim, var_out, nb_classes=classes)

    for i in xrange(trndata.getLength()):
        trdata.addSample(trndata.getSample(i)[0], trndata.getSample(i)[1])

    for i in xrange(tstdata.getLength()):
        tsdata.addSample(tstdata.getSample(i)[0], tstdata.getSample(i)[1])

    trdata._convertToOneOfMany()
    tsdata._convertToOneOfMany()

    fnn = FeedForwardNetwork()
    inputLayer = LinearLayer(trdata.indim)

    if hiddenLayer == 'Sigmoid':
        hiddenLayer = SigmoidLayer(hiddenNeurons)
    elif hiddenLayer == 'Softmax':
        hiddenLayer = SoftmaxLayer(hiddenNeurons)
    else:
        hiddenLayer = LinearLayer(hiddenNeurons)

    if outputLayer == 'Sigmoid':
        outputLayer = SigmoidLayer(trdata.outdim)
    elif outputLayer == 'Softmax':
        outputLayer = SoftmaxLayer(trdata.outdim)
    else:
        outputLayer = LinearLayer(trdata.outdim)

    fnn.addInputModule(inputLayer)
    fnn.addModule(hiddenLayer)
    fnn.addOutputModule(outputLayer)

    in_to_hidden = FullConnection(inputLayer, hiddenLayer)
    hidden_to_outputLayer = FullConnection(hiddenLayer, outputLayer)
    fnn.addConnection(in_to_hidden)
    fnn.addConnection(hidden_to_outputLayer)
    fnn.sortModules()

    trainer = BackpropTrainer(fnn,
                              dataset=trdata,
                              verbose=True,
                              learningrate=learningrate,
                              momentum=momentum)
    trainer.trainEpochs(epochs=epochs)

    trresult = percentError(trainer.testOnClassData(), trdata['class'])

    print("Training accuracy : %f " % (100 - trresult))

    ts = time.time()
    directory = output_location + sep + str(int(ts))
    makedirs(directory)
    fileObject = open(
        output_location + sep + str(int(ts)) + sep + 'pybrain_MLP', 'w')
    pickle.dump(trainer, fileObject)
    pickle.dump(fnn, fileObject)
    fileObject.close()