Esempio n. 1
0
    pylab.ylabel('y')
    pylab.title('Neuron Number:' + str(nneuron))
    pylab.grid(True)
    plotname = os.path.join(plotdir, ('jpq2layers_plot' + str(iter)))
    pylab.savefig(plotname)


# set-up the neural network
nneuron = 5
mom = 0.98
netname = "LSL-" + str(nneuron) + "-" + str(mom)
mv = ModuleValidator()
v = Validator()
n = FeedForwardNetwork(name=netname)
inLayer = LinearLayer(1, name='in')
hiddenLayer = SigmoidLayer(nneuron, name='hidden0')
outLayer = LinearLayer(1, name='out')
biasinUnit = BiasUnit(name="bhidden0")
biasoutUnit = BiasUnit(name="bout")
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addModule(biasinUnit)
n.addModule(biasoutUnit)
n.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
bias_to_hidden = FullConnection(biasinUnit, hiddenLayer)
bias_to_out = FullConnection(biasoutUnit, outLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(bias_to_hidden)
n.addConnection(bias_to_out)
Esempio n. 2
0
def ffn(nodesNum, trainingTime):
    """构建神经网络"""
    n = FeedForwardNetwork()

    inLayer = LinearLayer(6)  #构建神经网络的三层
    hiddenLayer1 = SigmoidLayer(nodesNum)
    hiddenLayer2 = SigmoidLayer(nodesNum)
    hiddenLayer3 = SigmoidLayer(nodesNum)
    hiddenLayer4 = SigmoidLayer(nodesNum)
    hiddenLayer5 = SigmoidLayer(nodesNum)
    hiddenLayer6 = SigmoidLayer(nodesNum)
    hiddenLayer7 = SigmoidLayer(nodesNum)
    hiddenLayer8 = SigmoidLayer(nodesNum)
    hiddenLayer9 = SigmoidLayer(nodesNum)
    hiddenLayer10 = SigmoidLayer(nodesNum)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)  #将三层加入网络中
    n.addModule(hiddenLayer1)
    n.addModule(hiddenLayer2)
    n.addModule(hiddenLayer3)
    n.addModule(hiddenLayer4)
    n.addModule(hiddenLayer5)
    n.addModule(hiddenLayer6)
    n.addModule(hiddenLayer7)
    n.addModule(hiddenLayer8)
    n.addModule(hiddenLayer9)
    n.addModule(hiddenLayer10)
    n.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer1)  #设置连接模式
    hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
    hidden_to_hidden2 = FullConnection(hiddenLayer2, hiddenLayer3)
    hidden_to_hidden3 = FullConnection(hiddenLayer3, hiddenLayer4)
    hidden_to_hidden4 = FullConnection(hiddenLayer4, hiddenLayer5)
    hidden_to_hidden5 = FullConnection(hiddenLayer5, hiddenLayer6)
    hidden_to_hidden6 = FullConnection(hiddenLayer6, hiddenLayer7)
    hidden_to_hidden7 = FullConnection(hiddenLayer7, hiddenLayer8)
    hidden_to_hidden8 = FullConnection(hiddenLayer8, hiddenLayer9)
    hidden_to_hidden9 = FullConnection(hiddenLayer9, hiddenLayer10)
    hidden_to_out = FullConnection(hiddenLayer10, outLayer)

    n.addConnection(in_to_hidden)  #将连接加入网络
    n.addConnection(hidden_to_hidden1)
    n.addConnection(hidden_to_hidden2)
    n.addConnection(hidden_to_hidden3)
    n.addConnection(hidden_to_hidden4)
    n.addConnection(hidden_to_hidden5)
    n.addConnection(hidden_to_hidden6)
    n.addConnection(hidden_to_hidden7)
    n.addConnection(hidden_to_hidden8)
    n.addConnection(hidden_to_hidden9)
    n.addConnection(hidden_to_out)

    n.sortModules()  #使网络可用
    print(n)
    """建立数据集"""
    ds = SupervisedDataSet(6, 1)  #六个输入,一个输出

    #表查询语句
    cur1 = conn.cursor()
    cur1.execute(
        'select * from szzs_rise_and_fall_rate limit 2,9999999999999999;')
    result1 = cur1.fetchall()
    fv = []  #特征
    for res in result1:
        a = []
        a.append(float(list(res)[1]))
        a.append(float(list(res)[2]))
        a.append(float(list(res)[3]))
        a.append(float(list(res)[4]))
        a.append(float(list(res)[5]))
        a.append(float(list(res)[6]))
        fv.append(a)

    cur2 = conn.cursor()
    cur2.execute(
        'select rise_fall_rate_next from szzs_rise_and_fall_rate limit 2,9999999999999999;'
    )
    result2 = cur2.fetchall()
    cla = []  #分类
    for res in result2:
        cla.append(float(list(res)[0]))

    cur3 = conn.cursor()
    cur3.execute('select * from szzs_rise_and_fall_rate order by date desc;')
    result3 = cur3.fetchmany(1)
    test = []  #测试数据
    for res in result3:
        test.append(float(list(res)[1]))
        test.append(float(list(res)[2]))
        test.append(float(list(res)[3]))
        test.append(float(list(res)[4]))
        test.append(float(list(res)[5]))
        test.append(float(list(res)[6]))

    for i in range(0, len(fv)):
        ds.addSample(fv[i], cla[i])

    dataTrain, dataTest = ds.splitWithProportion(
        0.8)  #百分之八十的数据用于训练,百分之二十的数据用于测试
    """训练神经网络"""
    trainer = BackpropTrainer(n, dataset=dataTrain)  #神经网络和数据集
    trainer.trainEpochs(trainingTime)  #训练次数
    return n.activate(test)
Esempio n. 3
0
from pybrain.structure import RecurrentNetwork
#estructuras FeedForwardNetwork and RecurrentNetwork

from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer
#modulos BiasUnit,GaussianLayer, LinearLayer, LSTMLayer, MDLSTMLayer, SigmoidLayer, SoftmaxLayer, StateDependentLayer, TanhLayer

from pybrain.structure import FullConnection
#conectores

n = RecurrentNetwork(name='red de prueba1')  #n = red

n.addInputModule(LinearLayer(
    2,
    name='entradas'))  #agrega el modulo de 'entrada' a la red con 2 entradas
n.addModule(SigmoidLayer(3, name='ocultas')
            )  #agrega el modulo de 'oculta' a la red con 3 capas ocultas
#n.addModule(TanhLayer(3, name = 'ocultas'))#agrega el modulo de 'oculta' a la red con 3 capas ocultas
n.addOutputModule(LinearLayer(
    1, name='salidas'))  #agrega el modulo de 'salida' a la red con 1 salida

#flujo de informacion entre las capas
n.addConnection(
    FullConnection(n['entradas'], n['ocultas'], name='con1')
)  #agrega y conecta el modulo de conexion de capa 'entrada' a la red con las capas ocultas
n.addConnection(
    FullConnection(n['ocultas'], n['salidas'], name='con2')
)  #agrega y conecta el modulo de conexion de capa 'salida' a la red con las capas ocultas

n.addRecurrentConnection(
    FullConnection(n['ocultas'], n['ocultas'], name='con3'))  #metodo adicional
Esempio n. 4
0
"""
Created on Fri Apr 17 14:26:43 2020

@author: joao
"""

from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, BiasUnit
from pybrain.structure import FullConnection

#criação da rede neural
rede = FeedForwardNetwork()

#criação das camadas
camada_entrada = LinearLayer(2)
camada_oculta = SigmoidLayer(3)
camada_saida = SigmoidLayer(1)

bias_1 = BiasUnit()
bias_2 = BiasUnit()

#inicialização das camadas dentro da rede neural
rede.addModule(camada_entrada)
rede.addModule(camada_oculta)
rede.addModule(camada_saida)
rede.addModule(bias_1)
rede.addModule(bias_2)

#ligação entre as camadas
ligacao_entrada_oculta = FullConnection(camada_entrada, camada_oculta)
ligacao_oculta_saida = FullConnection(camada_oculta, camada_saida)
Esempio n. 5
0
print('====================================')
print('====================================')
print('  GRAPHS:', GRAPHS)
print('  HIDDENLAYERS:', HIDDENLAYERS)
print('  LEARNINGRATE:', LEARNINGRATE)
print('  MOMENTUM:', MOMENTUM)
print('====================================')
print('====================================')

# Prepare recurrent network
net = RecurrentNetwork()

# Add layers
net.addInputModule(LinearLayer(N * N, name='in'))
for layer in range(1, HIDDENLAYERS + 1):
    net.addModule(SigmoidLayer(N * N, name='hidden' + str(layer)))
net.addOutputModule(TanhLayer(N * N, name='out'))

# Add connections between layers
net.addConnection(FullConnection(net['in'], net['hidden1']))
for layer in range(1, HIDDENLAYERS):
    net.addConnection(
        FullConnection(net['hidden' + str(layer)],
                       net['hidden' + str(layer + 1)]))
net.addConnection(FullConnection(net['hidden' + str(HIDDENLAYERS)],
                                 net['out']))
net.addRecurrentConnection(
    FullConnection(net['hidden' + str(HIDDENLAYERS)], net['hidden1']))
net.sortModules()

# Trainer
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, BiasUnit
from pybrain.structure import FullConnection

rede = FeedForwardNetwork()

camadaEntrada = LinearLayer(2)
camadaOculta = SigmoidLayer(3)
camadaSaida = SigmoidLayer(1)

#bias camada oculta
bias1 = BiasUnit()

#bias camada saida
bias2 = BiasUnit()

# adicionando as camadas dentro da rede (principal)
rede.addModule(camadaEntrada)
rede.addModule(camadaOculta)
rede.addModule(camadaSaida)
rede.addModule(bias1)
rede.addModule(bias2)

# ligação entre camada entrada e camada oculta
entradaOculta = FullConnection(camadaEntrada, camadaOculta)
ocultaSaida = FullConnection(camadaOculta, camadaSaida)

# ligação bias 1 para camada oculta
biasOculta = FullConnection(bias1, camadaOculta)

# ligação bias 2 para camada saida
def neural_network(data_model, classes, runs):
    # Python brain
    from pybrain.structure import FullConnection, FeedForwardNetwork, LinearLayer, SigmoidLayer, SoftmaxLayer
    from pybrain.datasets import ClassificationDataSet
    from pybrain.utilities import percentError
    from pybrain.supervised.trainers import BackpropTrainer
    from pybrain.tools.xml.networkwriter import NetworkWriter
    from pybrain.tools.xml.networkreader import NetworkReader
    import csv

    # Build Network
    try:

        n = NetworkReader.readFrom('resources/net.xml')
        print 'Loading previous network'

    except:

        print 'Generating new network'
        # Create a new Network
        n = FeedForwardNetwork()

        # Define the input layer
        inLayer = LinearLayer(len(data_model[0][0]))

        # Define a hidden layer
        hiddenLayer = SigmoidLayer(10)
        hiddenLayer2 = SigmoidLayer(10)

        # Define the output layer
        outLayer = LinearLayer(classes)

        # Add layers to network n
        n.addInputModule(inLayer)
        n.addModule(hiddenLayer)
        n.addModule(hiddenLayer2)
        n.addOutputModule(outLayer)

        # Create layers
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
        hidden2_to_out = FullConnection(hiddenLayer2, outLayer)

        # Add connectors to network n
        n.addConnection(in_to_hidden)
        n.addConnection(hidden_to_hidden2)
        n.addConnection(hidden2_to_out)

        # Finish Network
        n.sortModules()

    # Other Stuff

    ds = ClassificationDataSet(len(data_model[0][0]), 1, nb_classes=classes)
    # di = ClassificationDataSet(2,1,0)
    for o in data_model:
        ds.addSample(o[0], o[1])
    testing_data, training_data = ds.splitWithProportion(0.3)

    training_data._convertToOneOfMany()
    testing_data._convertToOneOfMany()

    print "Number of training patterns: ", len(training_data)
    print "Input and output dimensions: ", training_data.indim, training_data.outdim
    print "First sample (input, target, class):"
    print training_data['input'][0], training_data['target'][0], training_data[
        'class'][0]

    trainer = BackpropTrainer(n, dataset=training_data)
    smart = []
    dumb = []

    with open("resources/minimum_error.csv", 'rb') as f:
        reader = csv.reader(f)
        for row in reader:
            smart.append(row)

    smart[0] = float(smart[0][0])
    print 'The minimum error from previous runs =', smart[0]

    for t in range(runs):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(),
                                 training_data['class'])
        tstresult = percentError(trainer.testOnClassData(dataset=testing_data),
                                 testing_data['class'])
        print "epoch: %4d" % trainer.totalepochs, "  train error: %5.5f%%" % trnresult, " test error: %5.5f%%" % tstresult
        smart.append(tstresult)

        if tstresult <= min(smart):
            NetworkWriter.writeToFile(n, 'resources/net.xml')
            print 'Best!'
        else:
            dumb.append('1')
            print 'Worst!'

    minimum_error = []
    minimum_error.append(min(smart))

    with open("resources/minimum_error.csv", 'wb') as f:
        writer = csv.writer(f)
        writer.writerow(minimum_error)

    print 'Minimum error (current state)', min(smart)
    return n
"""
Created on Fri May  1 21:12:45 2020

@author: aasousa
"""
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, BiasUnit
from pybrain.structure import FullConnection

# criação da rede

rede = FeedForwardNetwork()

# criando as camadas da rede
camadaEntrada = LinearLayer(2)  # com quantidade de neurônios
camadaOculta = SigmoidLayer(3)  # camada oculta com a função sigmoid
camadaSaida = SigmoidLayer(1)  # camada de saída com a função sigmoid

bias_camada_oculta = BiasUnit()
bias_camada_saida = BiasUnit()

# adicionado as camadas dentro da rede neural
rede.addModule(camadaEntrada)
rede.addModule(camadaOculta)
rede.addModule(camadaSaida)
rede.addModule(bias_camada_oculta)
rede.addModule(bias_camada_saida)

# realizando a conexão com as camadas da rede
entradaOculta = FullConnection(camadaEntrada, camadaOculta)
ocultaSaida = FullConnection(camadaOculta, camadaSaida)
Esempio n. 9
0
    def __init__(self,learningrate = 0.001,inputneurons = 2,hiddenneurons =50,outputneurons = 2,testondata= True, \
                 momentum = 0.2,train_percent = 99,recurnet = False):
        """
        Neural networks class
        assign a learning rate of your choice , default is 0.01
        inputneurons = number of neurons on input layer: can be set to the input dimension of the data
        hiddenneurons= keep it more than inputneurons in general
        outputneurons = output dimension of your data
        testondata = If you want to print out the performance of your neural net, defaults to true
        """
        assert (hiddenneurons > inputneurons), "Number of hiddenneurons can't be lesser than inputneurons"
        
        self.learningrate = learningrate
        self.inputneurons = inputneurons
        self.hiddenneurons = hiddenneurons
        self.outputneurons = outputneurons

        #momentum is the parameter to realize how efficiently the learning will get out of a local minima, 
        #not sure what to put as an appropriate value
        self.momentum = momentum

        #Construct network here
        self.mlpnetwork  = buildNetwork(self.inputneurons, self.hiddenneurons, self.outputneurons, bias=True,recurrent=recurnet)
        self.mlpnetwork.sortModules()
        print self.mlpnetwork,"-----------------------------------"
        self.trainer = None
        self.validation = testondata
        self.data = None
        
        self.learnedNetwork = None
        self.train_percent = (1. *train_percent )/100
        
        #creating a feedforward network
        
        
        
        
        self.ffn = FeedForwardNetwork()
        inlayer = LinearLayer(inputneurons)
        hiddenlayer1 = SigmoidLayer(4)
        hiddenlayer2 = SigmoidLayer(2)
        outlayer = LinearLayer(outputneurons)
        
        #assigning them to layers
        self.ffn.addInputModule(inlayer)
        self.ffn.addModule(hiddenlayer1)
        self.ffn.addModule(hiddenlayer2)
        self.ffn.addOutputModule(outlayer)
        
        #defining connections
        
        in_to_hidden1 = FullConnection(inlayer,hiddenlayer1)
        hidden1_to_hidden2 = FullConnection(hiddenlayer1,hiddenlayer2)
        hidden2_to_out = FullConnection(hiddenlayer2,outlayer)
        
        #explicitly adding them to network
        self.ffn.addConnection(in_to_hidden1)
        self.ffn.addConnection(hidden1_to_hidden2)
        self.ffn.addConnection(hidden2_to_out)
        
        #explicitly call sortmodules
        self.ffn.sortModules()
        
        print "created network successfully...."
Esempio n. 10
0
def generate_network_forecaster(history_size=1):
    #Building Network proccess-------------------------------------------------------
    net = FeedForwardNetwork()
    inLayer = LinearLayer(history_size)
    hiddenLayer0 = SigmoidLayer(history_size)
    hiddenLayer1 = LinearLayer(3)
    outLayer = LinearLayer(1)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer0)
    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

    net.addConnection(FullConnection(inLayer, hiddenLayer0))
    #net.addConnection(FullConnection(inLayer, outLayer))
    #net.addConnection(FullConnection(hiddenLayer0, outLayer))
    net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1))
    net.addConnection(FullConnection(hiddenLayer1, outLayer))
    net.sortModules()
    AUX = 0.1
    print net

    ##Net with 3 inputs, 8 hidden neurons in a layer and 8 in another, and 1 out.
    #net = buildNetwork(3,8,8,1)

    #Making Forecaster---------------------------------------------------------------
    def learn(self, data, lag, epoch=60):
        self.samples.clear()
        self.true_predictions = 0  #This numbers indicates how long the net was used to predict a event rather than using another method because haven't enought data.
        self.predictions = 0
        self.lag = lag
        for i in range(len(data) - (lag + self.history_size)):
            self.samples.addSample(
                [data[j][1] * AUX for j in range(i, i + self.history_size)],
                data[i + lag + self.history_size][1] * AUX)
        print 'Training'

        elapsed = os.times()[-1]  #take time as miliseconds
        self.trainer.trainUntilConvergence(
            maxEpochs=epoch)  # , validationProportion = 0.01)
        elapsed = os.times()[-1] - elapsed

        if elapsed <= 60:
            time = '%2.1f seconds.' % elapsed
        elif elapsed <= 3600:
            time = '%d minutes and %2.1f seconds.' % (elapsed / 60,
                                                      elapsed % 60)
        else:
            time = '%d hours, %d minutes and %2.1f seconds.' % (
                elapsed / 3600, elapsed / 60, elapsed % 60)
        print 'Trained along', time

    def predict(self, lag):
        if self.initialized:
            return self.mem
        else:
            if 'vect' in dir(self):
                return self.vect[-1]
            else:
                raise Exception("Forecaster " + self.name +
                                " don't initialized.")

    def update(self, data):
        if self.initialized:
            self.vect.append(data[1])
            if len(self.vect) > self.history_size:
                self.vect = self.vect[1:]
            self.mem = self.net.activate(map(lambda x: x * AUX,
                                             self.vect)) * (1 / AUX)
        else:
            self.mem = data[1]
            if 'vect' in dir(self):
                self.vect.append(data[1])
            else:
                self.vect = [data[1]]
            if len(self.vect) >= self.history_size:
                self.initialized = True

    NN = Forecaster(name='Neural Net',
                    predict_function=predict,
                    update_function=update,
                    learn_function=learn)
    NN.history_size = history_size
    NN.net = net
    NN.samples = SupervisedDataSet(history_size, 1)
    NN.trainer = BackpropTrainer(NN.net, NN.samples)
    return NN
    def build(self, filename=None):
        """Build and train a network according to a supplied configuration file. The config file should be in
        the following format :

	    line number == 1 | input layer size <tab> hidden layer size <tab> output layer size 
	    line number == 2 | acceptable error
	    line number <= 3 | input 1 <tab> input 2 <tab> | <tab> output 1 <tab> output 2

	<tab> : tab separator

        If multiple hidden layer sizes are specified on the first line , multiple hidden layers will be
        included in the built network. E.g. 5 <tab> 4 <tab> 3 <tab> 2 <tab> will built a net with 4 neurons at
        the input layer, 4 at the first hidden layer, 3 at the second hidden layer, and 2 at the output layer.

        Multiple input and output states can be specified in the training data in a similar way.
		
        Blank lines in the data set will be ignored. This is useful for separating training data for
        readability.

	See red_net_config.txt for an example network config file.
	"""

        if filename is None:  # if no filename is supplied
            filename = self.filename  # use filename given at init

        if filename != self.filename:  # if new filename has been given
            self.filename = filename  # store new filename

        config_file = open(filename, 'rb')  # open the data file
        config_lines = list(csv.reader(
            config_file, delimiter='\t'))  # read as csv with tab delim's
        layers = [int(size)
                  for size in config_lines[0]]  # split off layer config
        acceptable_error = float(
            config_lines[1][0])  # split off acceptable error
        dataset = config_lines[2:]  # split off data set

        # build empty network TODO variable network types
        self.network = FeedForwardNetwork()  # build the net

        # form layers TODO variable layer types
        input_size = layers[0]
        output_size = layers[-1]
        input_layer = LinearLayer(input_size)  # form input layer
        hidden_layers = [SigmoidLayer(size)
                         for size in layers[1:-1]]  # form hidden layers
        output_layer = SigmoidLayer(output_size)  # form output layer

        # add layers to network
        self.network.addInputModule(input_layer)  # add input layer
        [self.network.addModule(layer)
         for layer in hidden_layers]  # add hidden layers
        self.network.addOutputModule(output_layer)  # add output layer

        # form connections TODO variable connection types and topologies
        in_to_h = FullConnection(
            input_layer, hidden_layers[0])  # form input -> first hidden
        h_to_out = FullConnection(hidden_layers[-1],
                                  output_layer)  # form last hidden -> output
        h_to_h = []  # list for hidden conn's
        for x in range(len(hidden_layers)):  # count through hidden layers
            if x is not len(hidden_layers) - 1:  # if not at last hidden layer
                hh_conn = FullConnection(
                    hidden_layers[x],  # form hidden n -> 
                    hidden_layers[x + 1])  # hidden n + 1 connection
                h_to_h.append(hh_conn)  # add to list of hidden conn's

# add connections to network
        self.network.addConnection(in_to_h)  # add input -> first hidden
        self.network.addConnection(h_to_out)  # add last hidden -> output
        [self.network.addConnection(hh_conn)
         for hh_conn in h_to_h]  # add hidden n -> hidden n + 1

        # solidify network
        self.network.sortModules()  # sort network topology

        # train network TODO variable trainer types
        self.dataset = SupervisedDataSet(input_size,
                                         output_size)  # form data set
        for mapping in dataset:
            input_data = tuple(
                [float(input_state) for input_state in mapping[0:input_size]])
            output_data = tuple(
                float(output_state)
                for output_state in mapping[0 - output_size:])
            if input_data is not tuple([]):
                self.dataset.addSample(input_data, output_data)
        trainer = BackpropTrainer(self.network, self.dataset)  # form trainer

        trained = False  # set trained flag to False
        epoch = 0  # set epoch to 0
        self._pause = False
        while trained is False:  # as long as net isn't trained
            if self._pause: time.sleep(1)  # if paused, wait a second
            else:
                epoch += 1  # increment epoch counter
                error = trainer.train()  # reduce the error
                print('epoch : %i error : %f' % (epoch, error)
                      )  # print current error
                if error < acceptable_error:  # if error is acceptable
                    trained = True  # set trained flag to True
Esempio n. 12
0
def execute_mlp(n_neurons, data_size, learn_rate, momentum_rate, f):

    dic = {'Iris-setosa\n': 0, 'Iris-versicolor\n': 1, 'Iris-virginica\n': 2}

    filename = "iris.txt"
    file = read_file(filename)
    file = change_class_name(file, dic)
    file = str_to_number(file)
    file_array = np.array(file)
    data = normalize_data(file_array)

    #data = order_data(data)

    data = data[::-1]  #INTERTENDO A ORDEM DOS ITENS

    inputs = data[:, :-1]  #COPIAR TODAS AS COLUNAS MENOS A ULTIMA
    targets = data[:, -1]  #COPIAR ULTIMA COLUNA

    train_data_temp, test_data_temp = train_test_data(data, data_size)

    train_data = ClassificationDataSet(
        4, nb_classes=3)  #TAMANHO DA ENTRADA, NUMERO DE CLASSES
    test_data = ClassificationDataSet(
        4, nb_classes=3)  #TAMANHO DA ENTRADA, NUMERO DE CLASSES

    cont = 0
    for n in range(0, len(train_data_temp)):
        train_data.addSample(train_data_temp[n][:-1], [train_data_temp[n][-1]])
        #print(train_data.getSample(cont))
        #cont = cont + 1

    for n in range(0, len(test_data_temp)):
        test_data.addSample(test_data_temp[n][:-1], [test_data_temp[n][-1]])

    train_data._convertToOneOfMany()
    test_data._convertToOneOfMany()
    '''
    print ("Number of training patterns: ", len(train_data))
    print ("Input and output dimensions: ", train_data.indim, train_data.outdim)
    print ("First sample (input, target, class):")
    print (test_data['input'][0], test_data['target'][0], test_data['class'][0])
    '''

    network = FeedForwardNetwork()

    inLayer = SigmoidLayer(train_data.indim)
    first_hiddenLayer = SigmoidLayer(n_neurons)
    second_hiddenLayer = SigmoidLayer(n_neurons)
    outLayer = SigmoidLayer(train_data.outdim)

    network.addInputModule(inLayer)
    network.addModule(first_hiddenLayer)
    network.addModule(second_hiddenLayer)
    network.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, first_hiddenLayer)
    hidden_to_hidden = FullConnection(first_hiddenLayer, second_hiddenLayer)
    hidden_to_out = FullConnection(second_hiddenLayer, outLayer)

    network.addConnection(in_to_hidden)
    network.addConnection(hidden_to_hidden)
    network.addConnection(hidden_to_out)

    network.sortModules()
    #trainer = BackpropTrainer( network, dataset=train_data, momentum=momentum_rate, verbose=False, weightdecay=learn_rate)

    trainer = BackpropTrainer(network, dataset=train_data, verbose=False)

    for i in range(1):
        trainer.trainEpochs(1000)

    result = trainer.testOnClassData(test_data, return_targets=True)
    #result = classification(result[1],result[0])
    print(result)
    f.write(str(result))
    f.flush()
Esempio n. 13
0
def main():
    #sample patches from image database
    myCleaver = imageCleaver()

    #instantiate NN with size as defined in image cleaver class.
    #TODO: size should be input to imageCleaver class.
    #    net = buildNetwork(myCleaver.sizePatches*myCleaver.sizePatches, (myCleaver.sizePatches*myCleaver.sizePatches)/2.0, myCleaver.sizePatches*myCleaver.sizePatches, bias = True)
    net = FeedForwardNetwork()
    inLayer = LinearLayer(myCleaver.sizePatches * myCleaver.sizePatches)
    hiddenLayer = SigmoidLayer(
        (myCleaver.sizePatches * myCleaver.sizePatches) / 4.0)
    outLayer = LinearLayer(myCleaver.sizePatches * myCleaver.sizePatches)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_out)

    net.sortModules()

    #    fileObject = open('pickledNet.dat','r')
    #    net = pickle.load(fileObject)
    #    fileObject.close()
    #    net = NetworkReader.readFrom('filename.xml')

    #    print(net.activate([2, 1]))
    #Put imageCleaver dataset into pyBrain dataset format.
    ds = SupervisedDataSet(myCleaver.sizePatches * myCleaver.sizePatches,
                           myCleaver.sizePatches * myCleaver.sizePatches)
    for i in range(myCleaver.concImgArray.shape[1]):
        ds.addSample(myCleaver.concImgArray.T[i] / 256.0,
                     myCleaver.concImgArray.T[i] / 256.0)

#    for inpt, target in ds:
#        print inpt, target

    trainer = BackpropTrainer(net, ds)
    for i in range(1):
        print(trainer.train())

#    fileObject = open('pickledNet.dat', 'w')
#    pickle.dump(net, fileObject)
#    fileObject.close()
#    NetworkWriter.writeToFile(net, 'testNetwork8.xml')

#    saveNetParamsToFile(net)
#    loadNetParamsFromFile(net)

    imitationActivations = net.activate(myCleaver.concImgArray.T[0] / 256.0)
    imitation = np.reshape(imitationActivations,
                           (myCleaver.sizePatches, myCleaver.sizePatches))

    plt.figure(1)
    plt.title('Input vs output')

    plt.subplot(221)
    plt.imshow(myCleaver.patchDataBase[0],
               cmap=plt.cm.gray,
               interpolation='nearest',
               vmin=0,
               vmax=256)
    plt.title('input')

    plt.subplot(223)
    plt.imshow(imitation * 256,
               cmap=plt.cm.gray,
               interpolation='nearest',
               vmin=0,
               vmax=256)
    plt.title('imitation')

    ##    plt.show()
    #    print 'imitation'
    #    print imitation*256

    imitationActivations2 = net.activate(myCleaver.concImgArray.T[1] / 256.0)
    imitation2 = np.reshape(imitationActivations2,
                            (myCleaver.sizePatches, myCleaver.sizePatches))

    #    plt.figure(2)
    #    plt.title('Input vs output2')

    plt.subplot(222)
    plt.imshow(myCleaver.patchDataBase[1],
               cmap=plt.cm.gray,
               interpolation='nearest',
               vmin=0,
               vmax=256)
    plt.title('input2')

    plt.subplot(224)
    plt.imshow(imitation2 * 256,
               cmap=plt.cm.gray,
               interpolation='nearest',
               vmin=0,
               vmax=256)
    plt.title('imitation2')

    ##    plt.subplot_tool()
    plt.show()
    #    print 'imitation2'
    #    print imitation2*256#

    #################################################################
    #calculate and show each hidden nodes learned function, i.e. which input vector maximally excites the hidden node, with constrain ||x||^2 <=1
    #################################################################

    learned = []

    #convert dims from float to integer
    i2hid = int(in_to_hidden.indim)
    i2hod = int(in_to_hidden.outdim)
    print i2hid
    print i2hod
    #go through each hidden node
    for i in range(i2hod):
        print('i: ', i)
        one_learned = []
        sumOfWeights = 0
        #go through weights for the ith hidden node, sum weights
        for j in range(i2hid):
            print('j1: ', j)
            #add to sum, the value of connection between input
            sumOfWeights += (in_to_hidden.params[i * i2hid + j])**2

        #for each input, calculate effect on hidden node by summing all om inputs
        for j in range(i2hid):
            print('j2: ', j)
            one_learned.append(in_to_hidden.params[i * i2hid + j] /
                               math.sqrt(sumOfWeights))
        learned.append(one_learned)

    fig3, axes = plt.subplots(nrows=int(math.sqrt(len(learned))),
                              ncols=(int(math.sqrt(len(learned)))))
    for dat, ax in zip(learned, axes.flat):
        # The vmin and vmax arguments specify the color limits
        im = ax.imshow(np.reshape(
            dat, (myCleaver.sizePatches, myCleaver.sizePatches)),
                       cmap=plt.cm.gray,
                       interpolation='nearest')

    #print(len(myCleaver.patchDataBase))
    #print(myCleaver.getImages)
    #getTrainingSet()


#    print 'np.array(net.activate(myCleaver.concImgArray.T[0]/256.0))'
#    print net.activate(myCleaver.concImgArray.T[0]/256.0)
#    print 'np.array(net.activate(myCleaver.concImgArray.T[1]/256.0))'
#    print net.activate(myCleaver.concImgArray.T[1]/256.0)
#    print('')
#
#    print('the two inputs')
#    print(ds.getSample(0))
#    print(ds.getSample(1))
#    print('')
#
#    print('original inputs')
#    print(myCleaver.concImgArray.T[0]/256.0)
#    print(myCleaver.concImgArray.T[1]/256.0)
#    print('')
#
#    print('first activation')
#    print(net.activate(myCleaver.concImgArray.T[0]/256.0))
#    print('second activation')
#    print(net.activate(myCleaver.concImgArray.T[1]/256.0))

#    print(net.params)
#    print(net)
#    print(net.outmodules)
#
#
#    net = buildNetwork(4,2,4, bias = True)
#    print('simple net activation')
#    print(net.activate((1,2,3,4)))
#    print('simple net activation2')
#    print(net.activate((5,4,3,2)))
#    print('')
#
    for mod in net.modules:
        print "Module:", mod.name
        if mod.paramdim > 0:
            print "--parameters:", mod.params
        for conn in net.connections[mod]:
            print "-connection to", conn.outmod.name
            if conn.paramdim > 0:
                print "- parameters", conn.params
        if hasattr(net, "recurrentConns"):
            print "Recurrent connections"
            for conn in net.recurrentConns:
                print "-", conn.inmod.name, " to", conn.outmod.name
                if conn.paramdim > 0:
                    print "- parameters", conn.params
Esempio n. 14
0
from pybrain.structure import FeedForwardNetwork, SigmoidLayer, LinearLayer, FullConnection
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
import numpy as np

net = FeedForwardNetwork()

l_in = LinearLayer(4)
l_hid = SigmoidLayer(6)
l_out = LinearLayer(3)

net.addInputModule(l_in)
net.addModule(l_hid)
net.addOutputModule(l_out)

i2h = FullConnection(l_in, l_hid)
h2o = FullConnection(l_hid, l_out)

net.addConnection(i2h)
net.addConnection(h2o)

net.sortModules()
print(net)

dataset = SupervisedDataSet(4, 3)
dataset.addSample((1, 0, 0, 0), (1, 0, 0))
dataset.addSample((0, 1, 0, 0), (0, 1, 0))
dataset.addSample((0, 0, 0, 1), (0, 0, 1))
dataset.addSample((0, 0, 1, 1), (0, 0, 1))
dataset.addSample((0, 0, 1, 0), (0, 1, 0))
dataset.addSample((1, 1, 0, 0), (1, 0, 0))
Esempio n. 15
0
 def defineArquitetura(self):
     self.camada_entrada = LinearLayer(self.camada_entrada, name="entrada")
     self.camada_oculta = SigmoidLayer(self.camada_oculta, name="oculta")
     self.camada_saida = LinearLayer(self.camada_saida, name="saida")
     self.adicionaEstrutura()
Esempio n. 16
0
from pybrain.structure import FeedForwardNetwork
n = FeedForwardNetwork()

from pybrain.structure import LinearLayer, SigmoidLayer
inLayer = LinearLayer(2, name="Foo The II of LinearLayer")
hiddenLayer = SigmoidLayer(3, name="Bob the Pesant")
outLayer = LinearLayer(1, name="Foo The II Royal Decree")
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

from pybrain.structure import FullConnection
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
n.sortModules()

#print n

from pybrain.structure import RecurrentNetwork
n = RecurrentNetwork()

n.addInputModule(LinearLayer(2, name='in'))
n.addModule(SigmoidLayer(3, name='hidden'))
n.addOutputModule(LinearLayer(1, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
n.addRecurrentConnection(FullConnection(n['hidden'], n['hidden'], name='c3'))
Esempio n. 17
0
DS = SupervisedDataSet(150, 34)

# add data element to the dataset
for i in np.arange(len(input)):
    DS.addSample(input[i], output[i])

# you can get your input/output this way
X = DS['input']
Y = DS['target']

# createa neural network
fnn = FeedForwardNetwork()

# create three layers
inLayer = LinearLayer(150, name='inLayer')
hiddenLayer0 = SigmoidLayer(50, name='hiddenLayer0')
outLayer = SigmoidLayer(34, name='outLayer')

# add three layers to the neural network
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer0)
fnn.addOutputModule(outLayer)

# link three layers
in_to_hidden0 = FullConnection(inLayer, hiddenLayer0)
hidden0_to_out = FullConnection(hiddenLayer0, outLayer)

# add the links to neural network
fnn.addConnection(in_to_hidden0)
fnn.addConnection(hidden0_to_out)
Esempio n. 18
0
def exec_algo(xml_file, output_location):
    rootObj = ml.parse(xml_file)

    #Getting the root element so that we get the subclasses and its members and member function
    xmlParamDetails = rootObj.MachineLearning.classification

    #Gather param values from the XML parsed object
    file = open(xmlParamDetails.datafile)
    var_inp = xmlParamDetails.input
    var_out = xmlParamDetails.output
    classes = xmlParamDetails.classes
    split = xmlParamDetails.split
    learningrate = xmlParamDetails.algorithm.MultiLayerPerceptron.learningRate
    momentum = xmlParamDetails.algorithm.MultiLayerPerceptron.momentum
    epochs = xmlParamDetails.algorithm.MultiLayerPerceptron.epochs
    hiddenNeurons = int(
        xmlParamDetails.algorithm.MultiLayerPerceptron.hiddenLayers)
    hiddenLayer = xmlParamDetails.algorithm.MultiLayerPerceptron.hiddenLayerActivation
    outputLayer = xmlParamDetails.algorithm.MultiLayerPerceptron.outputLayerActivation
    delimiter = xmlParamDetails.delimiter

    DS = ClassificationDataSet(var_inp, var_out, nb_classes=classes)

    for line in file.readlines():
        data = [float(x) for x in line.strip().split(',') if x != '']
        inp = tuple(data[:var_inp])
        output = tuple(data[var_inp:])
        DS.addSample(inp, output)

    tstdata, trndata = DS.splitWithProportion(split)
    trdata = ClassificationDataSet(trndata.indim, var_out, nb_classes=classes)
    tsdata = ClassificationDataSet(tstdata.indim, var_out, nb_classes=classes)

    for i in xrange(trndata.getLength()):
        trdata.addSample(trndata.getSample(i)[0], trndata.getSample(i)[1])

    for i in xrange(tstdata.getLength()):
        tsdata.addSample(tstdata.getSample(i)[0], tstdata.getSample(i)[1])

    trdata._convertToOneOfMany()
    tsdata._convertToOneOfMany()

    fnn = FeedForwardNetwork()
    inputLayer = LinearLayer(trdata.indim)

    if hiddenLayer == 'Sigmoid':
        hiddenLayer = SigmoidLayer(hiddenNeurons)
    elif hiddenLayer == 'Softmax':
        hiddenLayer = SoftmaxLayer(hiddenNeurons)
    else:
        hiddenLayer = LinearLayer(hiddenNeurons)

    if outputLayer == 'Sigmoid':
        outputLayer = SigmoidLayer(trdata.outdim)
    elif outputLayer == 'Softmax':
        outputLayer = SoftmaxLayer(trdata.outdim)
    else:
        outputLayer = LinearLayer(trdata.outdim)

    fnn.addInputModule(inputLayer)
    fnn.addModule(hiddenLayer)
    fnn.addOutputModule(outputLayer)

    in_to_hidden = FullConnection(inputLayer, hiddenLayer)
    hidden_to_outputLayer = FullConnection(hiddenLayer, outputLayer)
    fnn.addConnection(in_to_hidden)
    fnn.addConnection(hidden_to_outputLayer)
    fnn.sortModules()

    trainer = BackpropTrainer(fnn,
                              dataset=trdata,
                              verbose=True,
                              learningrate=learningrate,
                              momentum=momentum)
    trainer.trainEpochs(epochs=epochs)

    trresult = percentError(trainer.testOnClassData(), trdata['class'])

    print("Training accuracy : %f " % (100 - trresult))

    ts = time.time()
    directory = output_location + sep + str(int(ts))
    makedirs(directory)
    fileObject = open(
        output_location + sep + str(int(ts)) + sep + 'pybrain_MLP', 'w')
    pickle.dump(trainer, fileObject)
    pickle.dump(fnn, fileObject)
    fileObject.close()
Esempio n. 19
0
ds_train, ds_test = ds.splitWithProportion(TRAIN_SIZE)

#%%
# Определение основных констант
HIDDEN_NEURONS_NUM = 10  # Количество нейронов, содержащееся в скрытом слое сети
HIDDEN_NEURONS_NUM2 = 20
MAX_EPOCHS = 250

# Максимальное число итераций алгоритма оптимизации параметров сети

#%%
np.random.seed(0)
net = FeedForwardNetwork()
inLayer = LinearLayer(N)
hiddenLayer = SigmoidLayer(HIDDEN_NEURONS_NUM)
#hiddenLayer2 = SigmoidLayer(HIDDEN_NEURONS_NUM2)
#outLayer = LinearLayer(1)
outLayer = SigmoidLayer(1)

net.addInputModule(inLayer)
net.addModule(hiddenLayer)
#net.addModule(hiddenLayer2)
net.addOutputModule(outLayer)

net.addConnection(FullConnection(inLayer, hiddenLayer))
#net.addConnection(FullConnection(hiddenLayer,hiddenLayer2))
#net.addConnection(FullConnection(hiddenLayer2, outLayer))
net.addConnection(FullConnection(hiddenLayer, outLayer))
net.sortModules()
#net = buildNetwork(ds_train.indim, HIDDEN_NEURONS_NUM, ds_train.outdim, bias=True,outclass=SoftmaxLayer)
Esempio n. 20
0
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer
from pybrain.structure import FullConnection

n = FeedForwardNetwork()
inLayer = LinearLayer(2)
hiddenLayer = SigmoidLayer(3)
outLayer = LinearLayer(1)

n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

n.sortModules()
print n.activate([1, 2])
print in_to_hidden.params
Esempio n. 21
0
def trainNetwork(train_ds, test_ds,
                 train_ds_labels, test_ds_labels,
                 features,
                 learningrate, lrdecay,
                 momentum, weightdecay,
                 hidden_layers,
                 time_limit_seconds):
    fnn = FeedForwardNetwork()
    inLayer = LinearLayer(train_ds.indim)
    fnn.addInputModule(inLayer)
    lastLayer = inLayer
    connection_number = 0 # connection-0 is the connection from the input layer.
    for hidden_layer_size in hidden_layers:
#        hiddenLayer = SigmoidLayer(hidden_layer_size)
        hiddenLayer = TanhLayer(hidden_layer_size)
        fnn.addModule(hiddenLayer)
        fnn.addConnection(
            FullConnection(lastLayer, hiddenLayer,
                           name="connection-%d" % connection_number))
        connection_number = connection_number + 1
        bias = BiasUnit()
        fnn.addModule(bias)
        fnn.addConnection(FullConnection(bias, hiddenLayer))
        lastLayer = hiddenLayer
    outLayer = SigmoidLayer(train_ds.outdim)
    fnn.addOutputModule(outLayer)
    fnn.addConnection(
        FullConnection(lastLayer, outLayer,
                       name="connection-%d" % connection_number))
    bias = BiasUnit()
    fnn.addModule(bias)
    fnn.addConnection(FullConnection(bias, outLayer))
    fnn.sortModules()

    trainer = BackpropTrainer(fnn, dataset=train_ds,
                              learningrate=learningrate,
                              lrdecay=lrdecay,
                              momentum=momentum,
                              verbose=False,
                              weightdecay=weightdecay)

    # Train
    (initial_train_error, initial_train_F1) = percentClassErrorAndF1(fnn, train_ds, train_ds_labels, features)
    train_errors = [initial_train_error]
    train_F1s = [initial_train_F1]
    (initial_test_error, initial_test_F1) = percentClassErrorAndF1(fnn, test_ds, test_ds_labels, features)
    test_errors = [initial_test_error]
    test_F1s = [initial_test_F1]
    train_algo_errors = [trainer.testOnData(train_ds) * 100]
    test_algo_errors = [trainer.testOnData(test_ds) * 100]
    epochs = [0]
    try:
        start_time = time.time()
        for i in range(200):
            for _ in xrange(50):
                train_algo_error = trainer.train() * 100.0
                if math.isnan(train_algo_error):
                    break
            if math.isnan(train_algo_error):
                break
            (trnresult, trnF1) = percentClassErrorAndF1(fnn, train_ds, train_ds_labels, features)
            (tstresult, tstF1) = percentClassErrorAndF1(fnn, test_ds, test_ds_labels, features)
            test_algo_error = trainer.testOnData(test_ds)* 100
            now_time = time.time()
            time_left = time_limit_seconds - (now_time - start_time)
            print("epoch %3d:" % trainer.totalepochs,
                  "  train error: %6.4f%%" % train_algo_error,
                  "  test error: %6.4f%%" % test_algo_error,
                  "  train F1: %s" % ", ".join([("%.2f" % x) for x in trnF1]),
                  "  test F1: %s" % ", ".join([("%.2f" % x) for x in tstF1]),
                  "  %ds left" % int(round(time_left)))

            epochs.append(trainer.totalepochs)
            train_errors.append(trnresult)
            train_F1s.append(trnF1)
            test_errors.append(tstresult)
            test_F1s.append(tstF1)
            train_algo_errors.append(train_algo_error)
            test_algo_errors.append(test_algo_error)
            if time_left <= 0:
                print("Timeout: Time to report the results.")
                break;
            # if test_algo_errors[-1] < 4:
            #     print("Good enough? Don't want to overtrain")
            #     break;

    except KeyboardInterrupt:
        # Someone pressed Ctrl-C, try to still plot the data.
        print("Aborted training...")
        pass

    return (fnn, epochs, train_algo_errors, test_algo_errors, train_F1s, test_F1s)
Esempio n. 22
0
#
if (DEBUG > 2):
    pp(trndata)

#
#	now build a feedforward neural network
#
#	Configuration:
#
#		Input Layer dimension: 2
#		2 hidden layers with 5 sigmoid neurons
#		Output layer has 3 Softmax neurons
#
net = FeedForwardNetwork()
inLayer = LinearLayer(2)
hiddenLayer1 = SigmoidLayer(5)
hiddenLayer2 = SigmoidLayer(5)
outLayer = SoftmaxLayer(3)
#
#	add those layers (modules)
#
net.addInputModule(inLayer)
net.addModule(hiddenLayer1)
net.addModule(hiddenLayer2)
net.addOutputModule(outLayer)
#
#	do the plumbing
#
in_to_hidden1 = FullConnection(inLayer, hiddenLayer1)
hidden1_to_hidden2 = FullConnection(hiddenLayer1, hiddenLayer2)
hidden2_to_out = FullConnection(hiddenLayer2, outLayer)
nY1 = no.normalize01(y1)

epocas = 3000

DS = SupervisedDataSet(1, 1)
#DS.appendLinked([1],[2])
#DS.appendLinked([2],[3])
for i in range(len(nX1)):
    DS.appendLinked((nX1[i]), (nY1[i]))

#Creiando rede FF
n = FeedForwardNetwork()

#Construindo Módulos
inLayer = LinearLayer(DS.indim)
hiddenLayer = SigmoidLayer(8)
hiddenLayer2 = SigmoidLayer(12)
outLayer = LinearLayer(DS.outdim)

#Adicionando os módulos criados a rede
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addModule(hiddenLayer2)
n.addOutputModule(outLayer)

#Deve ser explicito como os módulos devem estar conectados

in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
hidden2_to_out = FullConnection(hiddenLayer2, outLayer)