Beispiel #1
0
    def __setUpBrain(self, genome):
        """
		Set up PyBrain's neural network
		
		Args:
		    genome (G1DList): PyEvolve's individual container
		"""
        self.network = FeedForwardNetwork()

        inLayer = TanhLayer(14)
        hiddenLayer = TanhLayer(12)
        hiddenLayer2 = TanhLayer(6)
        outLayer = TanhLayer(2)

        self.network.addInputModule(inLayer)
        self.network.addModule(hiddenLayer)
        self.network.addModule(hiddenLayer2)
        self.network.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
        hidden2_to_out = FullConnection(hiddenLayer2, outLayer)

        self.network.addConnection(in_to_hidden)
        self.network.addConnection(hidden_to_hidden2)
        self.network.addConnection(hidden2_to_out)

        self.network.sortModules()

        new_params = numpy.array(genome.genomeList)
        self.network._setParameters(new_params)
Beispiel #2
0
def getNetwork(trndata):
    n = RecurrentNetwork()
    n.addInputModule(LinearLayer(trndata.indim, name='in'))
    n.addModule(SigmoidLayer(100, name='hidden'))
    n.addOutputModule(LinearLayer(trndata.outdim, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(
        FullConnection(n['hidden'], n['hidden'], name='c3'))
    n.sortModules()

    # fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
    trainer = BackpropTrainer(n,
                              dataset=trndata,
                              momentum=0.1,
                              verbose=True,
                              weightdecay=0.01)

    # TODO: return network and trainer here. Make another function for training
    # for i in range(20):
    # trainer.trainEpochs(1)
    # trainer.trainUntilConvergence(maxEpochs=100)

    # trnresult = percentError( trainer.testOnClassData(),trndata['class'] )
    # tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] )

    # print "epoch: %4d" % trainer.totalepochs, \
    # 	"  train error: %5.2f%%" % trnresult

    # out = fnn.activateOnDataset(tstdata)
    # out = out.argmax(axis=1)  # the highest output activation gives the class
    return (n, trainer)
def setupNetwork(numHiddenNodes, numHiddenLayers, numFeatures, numSpeakers):

    nn = FeedForwardNetwork()
    inputLayer = LinearLayer(numFeatures)
    nn.addInputModule(inputLayer)

    hiddenLayers = []
    for x in range(numHiddenLayers):
        hiddenLayer = TanhLayer(numHiddenNodes)
        nn.addModule(hiddenLayer)
        hiddenLayers.append(hiddenLayer)
    outputLayer = SoftmaxLayer(numSpeakers)
    nn.addOutputModule(outputLayer)

    inputConnection = FullConnection(inputLayer, hiddenLayers[0])
    nn.addConnection(inputConnection)

    for x in range(numHiddenLayers - 1):
        connect = FullConnection(hiddenLayers[x], hiddenLayers[x - 1])
        nn.addConnection(connect)

    outputConnection = FullConnection(hiddenLayers[numHiddenLayers - 1],
                                      outputLayer)
    nn.addConnection(outputConnection)
    nn.sortModules()

    return nn
Beispiel #4
0
def crearRN():
    #Se crea la red neuronal
    n = FeedForwardNetwork()

    #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal
    inLayer = LinearLayer(4096)
    hiddenLayer = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    #Se agregan los layers a la red neuronal
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addOutputModule(outLayer)

    #Se declaran las conexiones de los nodos
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)

    #Se establecen las conexiones en los layers de la red neuronal
    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    #Red neuronal lista para usar
    n.sortModules()

    return n
Beispiel #5
0
    def __init__(self, genes=None):

        self.net = FeedForwardNetwork()
        self.inLayer = TanhLayer(16)
        self.hiddenLayer = TanhLayer(20)
        self.hiddenLayer2 = TanhLayer(20)
        self.outLayer = SoftmaxLayer(4)

        self.net.addInputModule(self.inLayer)
        self.net.addModule(self.hiddenLayer)
        self.net.addModule(self.hiddenLayer2)
        self.net.addOutputModule(self.outLayer)

        self.in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer)
        self.hidden1_to_hidden2 = FullConnection(self.hiddenLayer, self.hiddenLayer2)
        self.hidden2_to_out = FullConnection(self.hiddenLayer2, self.outLayer)

        self.net.addConnection(self.in_to_hidden)
        self.net.addConnection(self.hidden1_to_hidden2)
        self.net.addConnection(self.hidden2_to_out)

        self.net.sortModules()

        # Set the params to the provided params
        if genes is not None:
            self.net._setParameters(genes)
Beispiel #6
0
def initalize_nn():
    global in_to_hidden
    global hidden_to_hidden2
    global hidden_to_out
    
    # Old code (regression)        
    n = FeedForwardNetwork()
    # n = buildNetwork( 2, 3, data.outdim, outclass=SoftmaxLayer )

    inLayer = LinearLayer(2)
    hiddenLayer = SigmoidLayer(3)
    hiddenLayer2 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)
        
        
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_hidden2)
    n.addConnection(hidden_to_out)
        
    n.sortModules()
    return n
def build_deep_network(linear_dimensions):
    neural_net = FeedForwardNetwork()

    inLayer = LinearLayer(linear_dimensions)
    hiddenLayer_1 = SigmoidLayer(100)
    hiddenLayer_2 = SigmoidLayer(100)
    hiddenLayer_3 = SigmoidLayer(50)
    outLayer = LinearLayer(1)

    neural_net.addInputModule(inLayer)
    neural_net.addModule(hiddenLayer_1)
    neural_net.addModule(hiddenLayer_2)
    neural_net.addModule(hiddenLayer_3)
    neural_net.addOutputModule(outLayer)

    in_to_hidden_1 = FullConnection(inLayer, hiddenLayer_1)
    hidden_1_to_hidden_2 = FullConnection(hiddenLayer_1, hiddenLayer_2)
    hidden_2_to_hidden_3 = FullConnection(hiddenLayer_2, hiddenLayer_3)
    hidden_3_to_output = FullConnection(hiddenLayer_3, outLayer)

    neural_net.addConnection(in_to_hidden_1)
    neural_net.addConnection(hidden_1_to_hidden_2)
    neural_net.addConnection(hidden_2_to_hidden_3)
    neural_net.addConnection(hidden_3_to_output)

    neural_net.sortModules()
    return neural_net
Beispiel #8
0
def create_network():
    # Create the network itself
    network = FeedForwardNetwork()
    # Create layers
    NUMBER_OF_INPUT_BYTES = 1600  # because at input we have picture 40x40 size
    NUMBER_OF_HIDDEN_LAYERS = 10  # number of hidden layers
    NUMBER_OF_OUTPUT_CLASSES = 8  # because in output we have 8 classes
    inLayer = LinearLayer(NUMBER_OF_INPUT_BYTES)
    hiddenLayer = SigmoidLayer(NUMBER_OF_HIDDEN_LAYERS)
    outLayer = LinearLayer(NUMBER_OF_OUTPUT_CLASSES)
    # Create connections between layers
    # We create FullConnection - each neuron of one layer is connected to each neuron of other layer
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    # Add layers to our network
    network.addInputModule(inLayer)
    network.addModule(hiddenLayer)
    network.addOutputModule(outLayer)
    # Add connections to network
    network.addConnection(in_to_hidden)
    network.addConnection(hidden_to_out)
    # Sort modules to make multilayer perceptron usable
    network.sortModules()
    # prepare array to activate network
    d_letter_array = read_array("d")
    # activate network
    network.activate(d_letter_array)
    return network
Beispiel #9
0
def buildNN2HiddenLayer(trnData, netNo):
    from pybrain.structure import FeedForwardNetwork, RecurrentNetwork
    from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer
    from pybrain.structure import FullConnection

    n = FeedForwardNetwork()
    inLayer = LinearLayer(trnData.indim)  # Define Layer Types
    if netNo == 1 or netNo == 3:
        hiddenLayer0 = TanhLayer(hiddenLayer0neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer1neurons)  # Sigmoid
    elif netNo == 2:
        hiddenLayer0 = TanhLayer(hiddenLayer1neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer0neurons)  # Sigmoid

    outLayer = SoftmaxLayer(trnData.outdim)  # SoftmaxLayer

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer0)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden0 = FullConnection(inLayer, hiddenLayer0)  # Define connections
    hidden0_to_hidden1 = FullConnection(hiddenLayer0, hiddenLayer1)
    hidden1_to_out = FullConnection(hiddenLayer1, outLayer)
    n.addConnection(in_to_hidden0)
    n.addConnection(hidden0_to_hidden1)
    n.addConnection(hidden1_to_out)
    n.sortModules()
    return n
def train_net(data_set, n, epochs=1):
    num_inputs = len(data_set[0][0][n])
    ds = SupervisedDataSet(num_inputs, 2)
    for i in range(len(data_set)):
        try:
            ds.appendLinked(data_set[i][0][n],
                            (data_set[i][1], data_set[i][2]))
        except:
            continue
    print str(len(ds)) + ' points successfully aquired'

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(num_inputs, name='input'))
    net.addInputModule(BiasUnit(name='bias'))
    net.addOutputModule(LinearLayer(2, name='output'))
    net.addModule(SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid'))
    net.addModule(TanhLayer(10, name='tanh'))
    net.addConnection(FullConnection(net['bias'], net['sigmoid']))
    net.addConnection(FullConnection(net['bias'], net['tanh']))
    net.addConnection(FullConnection(net['input'], net['sigmoid']))
    net.addConnection(FullConnection(net['sigmoid'], net['tanh']))
    net.addConnection(FullConnection(net['tanh'], net['output']))
    net.sortModules()

    trainer = BackpropTrainer(net,
                              learningrate=0.01,
                              momentum=0.1,
                              verbose=True)

    trainer.trainOnDataset(ds)
    trainer.trainEpochs(epochs)

    return net
Beispiel #11
0
def _init_net(params_len, output_layer_num, hidden_size):
    # init and train
    net = FeedForwardNetwork()
    """ Next, we're constructing the input, hidden and output layers. """
    inLayer = LinearLayer(params_len)
    hiddenLayer = SigmoidLayer(hidden_size)
    hiddenLayer1 = SigmoidLayer(hidden_size)
    outLayer = LinearLayer(output_layer_num)
    """ (Note that we could also have used a hidden layer of type TanhLayer, LinearLayer, etc.)
    Let's add them to the network: """
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)
    """ We still need to explicitly determine how they should be connected. For this we use the most
    common connection type, which produces a full connectivity between two layers (or Modules, in general):
    the 'FullConnection'. """

    in2hidden = FullConnection(inLayer, hiddenLayer)
    hidden2hidden = FullConnection(hiddenLayer, hiddenLayer1)
    hidden2out = FullConnection(hiddenLayer1, outLayer)

    net.addConnection(in2hidden)
    net.addConnection(hidden2hidden)
    net.addConnection(hidden2out)
    """ All the elements are in place now, so we can do the final step that makes our MLP usable,
    which is to call the 'sortModules()' method. """

    net.sortModules()

    # net = buildNetwork( params_len, hidden_size, 601, bias = True )
    return net
Beispiel #12
0
    def create(number_of_hidden_layers, activation_function, input_length,
               output_length, network_file, classify):
        n = FeedForwardNetwork()
        in_layer = LinearLayer(input_length)
        n.addInputModule(in_layer)

        layer_to_connect_to = in_layer
        for x in range(0, number_of_hidden_layers):
            if activation_function == 'sigmoid':
                hidden_layer = SigmoidLayer(input_length)
            else:
                hidden_layer = TanhLayer(input_length)

            n.addModule(hidden_layer)
            hidden_layer_connection = FullConnection(layer_to_connect_to,
                                                     hidden_layer)
            n.addConnection(hidden_layer_connection)
            layer_to_connect_to = hidden_layer

        if classify:
            out_layer = SoftmaxLayer(output_length)
        else:
            out_layer = LinearLayer(output_length)
        n.addOutputModule(out_layer)

        hidden_to_out = FullConnection(layer_to_connect_to, out_layer)
        n.addConnection(hidden_to_out)
        n.sortModules()
        save_network(n, network_file)
    def __init__(self):

        self.Q = FeedForwardNetwork()

        # La funcion de valor se representa con una red neuronal
        # Input: S = (Angulo, Velocidad angular, Posicion), A = accion
        # Output: Valor
        # 2 capas ocultas de 5 neuronas cada una
        # Funcion de activacion sigmoidea
        inLayer = SigmoidLayer(4, name="Input Layer")
        hiddenLayer1 = SigmoidLayer(5, name="Hidden Layer 1")
        hiddenLayer2 = SigmoidLayer(5, name="Hidden Layer 2")
        outLayer = SigmoidLayer(1, name="Output Layer")

        self.Q.addInputModule(inLayer)
        self.Q.addModule(hiddenLayer1)
        self.Q.addModule(hiddenLayer2)
        self.Q.addOutputModule(outLayer)

        connInToHidden1 = FullConnection(inLayer, hiddenLayer1)
        connHidden1ToHidden2 = FullConnection(hiddenLayer1, hiddenLayer2)
        connHidden2ToOut = FullConnection(hiddenLayer2, outLayer)

        self.Q.addConnection(connInToHidden1)
        self.Q.addConnection(connHidden1ToHidden2)
        self.Q.addConnection(connHidden2ToOut)

        self.Q.sortModules()
def BackupNetwork(genome=None):
	#initial a network [12,12,4] and initial weights are baseline policy versions
	
	from pybrain.structure import FeedForwardNetwork,LinearLayer,TanhLayer,FullConnection
	network = FeedForwardNetwork()
	inLayer= LinearLayer(12)
	hiddenLayer = LinearLayer(12)
	outLayer = TanhLayer(4)
	network.addInputModule(inLayer)
	network.addModule(hiddenLayer)
	network.addOutputModule(outLayer)
	
	weights = [] 	
	if(genome == None):
		import pickle
		weights = pickle.load(open("seed"))
	else:
		weights = genome
	 
	in_to_hidden = FullConnection(inLayer,hiddenLayer)   
	hidden_to_out = FullConnection(hiddenLayer,outLayer)
	for i in range(0,144):
		in_to_hidden.params[i] = weights[i]
	for j in range(0,48):
		hidden_to_out.params[j] = weights[j+144] 		
	network.addConnection(in_to_hidden)
	network.addConnection(hidden_to_out)
	network.sortModules()
	return network 		
Beispiel #15
0
def trainFunc(params):
    iter, trainds, validds, input_size, hidden, func, eta, lmda, epochs = params
    print('Iter:', iter, 'Epochs:', epochs, 'Hidden_size:', hidden, 'Eta:',
          eta, 'Lamda:', lmda, 'Activation:', func)

    # Build network
    n = RecurrentNetwork()
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(func(hidden, name='hidden'))
    n.addModule(LinearLayer(hidden, name='context'))
    n.addOutputModule(LinearLayer(1, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='in_to_hidden'))
    n.addConnection(FullConnection(n['hidden'], n['out'],
                                   name='hidden_to_out'))
    n.addRecurrentConnection(FullConnection(n['hidden'], n['context']))
    rnet = n
    rnet.sortModules()

    trainer = BackpropTrainer(n,
                              trainds,
                              learningrate=eta,
                              weightdecay=lmda,
                              momentum=0.1,
                              shuffle=False)
    trainer.trainEpochs(epochs)
    pred = np.nan_to_num(n.activateOnDataset(validds))
    validerr = eval.calc_RMSE(validds['target'], pred)
    varscore = explained_variance_score(validds['target'], pred)
    return validerr, varscore, n
Beispiel #16
0
 def xor_network(self, net):
     net.addInputModule(LinearLayer(2, name='in'))
     net.addModule(BiasUnit(name='bias'))
     net.addModule(LinearLayer(3, name='hidden'))
     net.addOutputModule(LinearLayer(1, name='out'))
     net.addConnection(FullConnection(net['in'], net['hidden']))
     net.addConnection(FullConnection(net['bias'], net['hidden']))
     net.addConnection(FullConnection(net['hidden'], net['out']))
 def init_network(self, net):
     net.addInputModule(LinearLayer(2, 'in'))
     net.addModule(SigmoidLayer(3, 'hidden'))
     net.addOutputModule(LinearLayer(2, 'out'))
     net.addModule(BiasUnit(name='bias'))
     net.addConnection(FullConnection(net['in'], net['hidden']))
     net.addConnection(FullConnection(net['hidden'], net['out']))
     net.sortModules()
Beispiel #18
0
 def adicionaConexoes(self):
     self.ligacao_entrada_oculta = FullConnection(self.camada_entrada,
                                                  self.camada_oculta)
     self.ligacao_oculta_saida = FullConnection(self.camada_oculta,
                                                self.camada_saida)
     self.network.addConnection(self.ligacao_oculta_saida)
     self.network.addConnection(self.ligacao_entrada_oculta)
     self.iniciaRede()
def fit_predict(xTrain, yTrain, xTest, epochs, neurons):

    # Check edge cases
    if (not len(xTrain) == len(yTrain) or len(xTrain) == 0 or len(xTest) == 0
            or epochs <= 0):
        return

    # Randomize the training data (probably not necessary but pybrain might
    # not shuffle the data itself, so perform as safety check)
    indices = np.arange(len(xTrain))
    np.random.shuffle(indices)

    trainSwapX = [xTrain[x] for x in indices]
    trainSwapY = [yTrain[x] for x in indices]

    supTrain = SupervisedDataSet(len(xTrain[0]), 1)
    for x in range(len(trainSwapX)):
        supTrain.addSample(trainSwapX[x], trainSwapY[x])

    # Construct the feed-forward neural network

    n = FeedForwardNetwork()

    inLayer = LinearLayer(len(xTrain[0]))
    hiddenLayer1 = SigmoidLayer(neurons)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    n.sortModules()

    # Train the neural network on the training partition, validating
    # the training progress on the validation partition

    trainer = BackpropTrainer(n,
                              dataset=supTrain,
                              momentum=0.1,
                              learningrate=0.01,
                              verbose=False,
                              weightdecay=0.01)

    trainer.trainUntilConvergence(dataset=supTrain,
                                  maxEpochs=epochs,
                                  validationProportion=0.30)

    outputs = []
    for x in xTest:
        outputs.append(n.activate(x))

    return outputs
Beispiel #20
0
 def testMdlstm(self):
     net = FeedForwardNetwork()
     net.addInputModule(LinearLayer(1, name='in'))
     net.addModule(MDLSTMLayer(1, 1, name='hidden'))
     net.addOutputModule(LinearLayer(1, name='out'))
     net.addConnection(FullConnection(net['in'], net['hidden']))
     net.addConnection(FullConnection(net['hidden'], net['out']))
     net.sortModules()
     self.equivalence_feed_forward(net, net.convertToFastNetwork())
def buildNN(indim=4, hiddim=6, outdim=3):
    net = FeedForwardNetwork()
    net.addInputModule(TanhLayer(indim, name='i'))
    net.addModule(TanhLayer(hiddim, name='h'))
    net.addOutputModule(ThresholdLayer(outdim, name='o', threshold=0.5))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.sortModules()
    return net
Beispiel #22
0
    def __init__(self, x, y, direction):
        self.age = 0

        # position
        self.x = x
        self.y = y

        # number of fruits peeled
        self.num_peeled = 0
        self.num_eaten = 0
        self.num_moved = 0

        # orientation (0 - 359 degrees)
        self.direction = direction

        # touching anything
        self.touching = None
        self.sees = None

        # hunger sensor
        self.hunger = 2000
        self.avg_hunger = 0

        ###
        # Neural Network
        #
        # Inputs:
        # 1. sees_peeled_orange
        # 2. sees_unpeeled_orange
        # 3. sees_peeled_banana
        # 4. sees_unpeeled_banana
        # 5. sees_animat
        # 6. sees_wall
        # 7. hunger
        # 8. touching_peeled_orange
        # 9. touching_unpeeled_orange
        # 10. touching_peeled_banana
        # 11. touching_unpeeled_banana
        # 12. touching_animat
        # 13. touching_wall
        ###

        self.net = FeedForwardNetwork()
        self.net.addInputModule(LinearLayer(13, name='in'))
        self.net.addModule(SigmoidLayer(14, name='hidden'))
        self.net.addOutputModule(LinearLayer(5, name='out'))
        self.net.addConnection(
            FullConnection(self.net['in'], self.net['hidden']))
        self.net.addConnection(
            FullConnection(self.net['hidden'], self.net['out']))
        self.net.sortModules()

        # thresholds for deciding an action
        self.move_threshold = 0
        self.peel_threshold = 0
        self.eat_threshold = 0
    def getFitness(self, smMatrix):  #Store the sm state into memory
        fit = 0

        #Fitness function (3) *************************************************************
        #Record the sm data for this loop and consider its properties
        #print(smMatrix)
        #print(len(smMatrix))

        #net = buildNetwork(3,10,1, bias = True)
        net = FeedForwardNetwork()
        inp = LinearLayer(3)
        h1 = SigmoidLayer(10)
        outp = LinearLayer(1)
        # add modules
        net.addOutputModule(outp)
        net.addInputModule(inp)
        net.addModule(h1)
        # create connections
        iToH = FullConnection(inp, h1)
        hToO = FullConnection(h1, outp)
        net.addConnection(iToH)
        net.addConnection(hToO)
        # finish up
        net.sortModules()

        ds = SupervisedDataSet(3, 1)

        trainSet = []
        for index_x, x in enumerate(smMatrix):
            if index_x > 0 and index_x < len(smMatrix) - 1:
                #trainSet.append( [smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2], smMatrix[index_x+1][3] ] )
                ds.addSample(([
                    smMatrix[index_x][0], smMatrix[index_x][1],
                    smMatrix[index_x][2]
                ]), (smMatrix[index_x + 1][3]))
        #print(trainSet)
        #print(ds)
        trainer = BackpropTrainer(net, ds, weightdecay=0.01)
        err = trainer.trainUntilConvergence(maxEpochs=100)
        #Visualize the network performance and structure.

        #nn = NNregression(ds, epoinc = 10)
        #nn.setupNN()
        #nn.runTraining()
        #self.pesos_conexiones(net)
        print("Input to hidden", iToH.params)
        #print("H to output", hToO.params)
        #print(iToH.params)
        n1 = iToH.params
        n1a = zip(*[iter(n1)] * 3)
        n2 = hToO.params
        fit = sum(n1a[:]) + sum(n2[:])
        print fit
        return fit
def runNeuralSimulation(dataTrain, dataTest, train_tfidf, test_tfidf):
    outFile = open('neuralLog.txt','a')
    outFile.write('-------------------------------------\n')
    outFile.write('train==> %d, %d \n'%(train_tfidf.shape[0],train_tfidf.shape[1]))
    outFile.write('test==>  %d, %d \n'%(test_tfidf.shape[0],test_tfidf.shape[1]))
    
    trainDS = getDataSetFromTfidf(train_tfidf, dataTrain.target)
    testDS = getDataSetFromTfidf(test_tfidf, dataTest.target)
    
    print "Number of training patterns: ", len(trainDS)
    print "Input and output dimensions: ", trainDS.indim, trainDS.outdim
    print "First sample (input, target, class):"
    print len(trainDS['input'][0]), trainDS['target'][0], trainDS['class'][0]
    
#     with SimpleTimer('time to train', outFile):
#         net = buildNetwork(trainDS.indim, trainDS.indim/2, trainDS.indim/4, trainDS.indim/8, trainDS.indim/16, 2, hiddenclass=TanhLayer, outclass=SoftmaxLayer)
#         trainer = BackpropTrainer( net, dataset=trainDS, momentum=0.1, verbose=True, weightdecay=0.01, batchlearning=True)
    net = RecurrentNetwork()
    net.addInputModule(LinearLayer(trainDS.indim, name='in'))
    net.addModule(SigmoidLayer(trainDS.indim/2, name='hidden'))
    net.addModule(SigmoidLayer(trainDS.indim/4, name='hidden2'))
    net.addOutputModule(SoftmaxLayer(2, name='out'))
    net.addConnection(FullConnection(net['in'], net['hidden'], name='c1'))
    net.addConnection(FullConnection(net['hidden'], net['out'], name='c2'))
    net.addRecurrentConnection(FullConnection(net['hidden'], net['hidden'], name='c3'))
    net.addRecurrentConnection(FullConnection(net['hidden2'], net['hidden'], name='c4'))
    net.sortModules()
    trainer = BackpropTrainer( net, dataset=trainDS, momentum=0.01, verbose=True, weightdecay=0.01)
    
    outFile.write('%s \n' % (net.__str__()))
    epochs = 2000
    with SimpleTimer('time to train %d epochs' % epochs, outFile):
        for i in range(epochs):
            trainer.trainEpochs(1)
            trnresult = percentError( trainer.testOnClassData(),
                                  trainDS['class'] )
            tstresult = percentError( trainer.testOnClassData(
               dataset=testDS ), testDS['class'] )
    
            print "epoch: %4d" % trainer.totalepochs, \
                  "  train error: %5.2f%%" % trnresult, \
                  "  test error: %5.2f%%" % tstresult
            outFile.write('%5.2f , %5.2f \n' % (100.0-trnresult, 100.0-tstresult))
                  
    predicted = trainer.testOnClassData(dataset=testDS)
    results = predicted == testDS['class'].flatten()
    wrong = []
    for i in range(len(results)):
        if not results[i]:
            wrong.append(i)
    print 'classifier got these wrong:'
    for i in wrong[:10]:
        print dataTest.data[i], dataTest.target[i]
        outFile.write('%s %d \n' % (dataTest.data[i], dataTest.target[i]))
Beispiel #25
0
 def fillConnections(self, net, addedStack, stackToGo, layers):
     connections = []
     recurrentConnections = []
     if len(stackToGo) == 0:
         return connections, recurrentConnections
     ways = []
     futureStack = []
     for neuron in stackToGo:
         way = []
         for w in ways:
             if w.count(neuron) != 0:
                 way = w
             else:
                 continue
         if not way:
             way.append(neuron)
             ways.append(way)
         for connection in range(len(net[neuron])):
             if net[neuron][connection] == 1:
                 if addedStack.count(connection) != 0:
                     recurrentConnections.append(FullConnection(layers[neuron], layers[connection]))
                 else:
                     if stackToGo.count(connection) != 0:
                         if way.count(connection) != 0:
                             recurrentConnections.append(FullConnection(layers[neuron], layers[connection]))
                         else:
                             flag = True
                             for w in ways:
                                 if w.count(connection) != 0:
                                     connections.append(FullConnection(layers[neuron], layers[connection]))
                                     for n in w:
                                         way.append(n)
                                     ways.pop(ways.index(w))
                                     flag = False
                                     break
                                 else:
                                     continue
                             if flag:
                                 way.append(connection)
                                 connections.append(FullConnection(layers[neuron], layers[connection]))
                     else:
                         connections.append(FullConnection(layers[neuron], layers[connection]))
                         futureStack.append(connection)
             else:
                 continue
     for v in stackToGo:
         addedStack.append(v)
     c, rc = self.fillConnections(net, addedStack, futureStack, layers)
     for con in c:
         connections.append(con)
     for rcon in rc:
         recurrentConnections.append(rcon)
     return connections, recurrentConnections
Beispiel #26
0
 def rec_three_layer_network(self, net):
     inlayer = LinearLayer(1, name='in')
     hiddenlayer = LinearLayer(1, name='hidden')
     outlayer = LinearLayer(1, name='out')
     con1 = FullConnection(inlayer, hiddenlayer)
     con2 = FullConnection(hiddenlayer, outlayer)
     con3 = FullConnection(hiddenlayer, hiddenlayer)
     net.addInputModule(inlayer)
     net.addModule(hiddenlayer)
     net.addOutputModule(outlayer)
     net.addConnection(con1)
     net.addConnection(con2)
     net.addRecurrentConnection(con3)
Beispiel #27
0
 def lstm_network(self, net):
     i = LinearLayer(1, name='in')
     h = LSTMLayer(2, name='hidden')
     o = LinearLayer(1, name='out')
     b = BiasUnit(name='bias')
     net.addModule(b)
     net.addOutputModule(o)
     net.addInputModule(i)
     net.addModule(h)
     net.addConnection(FullConnection(i, h))
     net.addConnection(FullConnection(b, h))
     net.addRecurrentConnection(FullConnection(h, h))
     net.addConnection(FullConnection(h, o))
def createNNLong(trndata):
    nn = FeedForwardNetwork()
    inLayer = LinearLayer(trndata.indim, name='in')
    hiddenLayer = TanhLayer(6, name='hidden0')
    outLayer = TanhLayer(trndata.outdim, name='out')
    nn.addInputModule(inLayer)
    nn.addModule(hiddenLayer)
    nn.addOutputModule(outLayer)
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    nn.addConnection(in_to_hidden)
    nn.addConnection(hidden_to_out)
    nn.sortModules()
    return nn
Beispiel #29
0
    def constructNet(self, input, hidden, output):
        inputLayer = LinearLayer(input)
        hiddenLayer = TanhLayer(hidden)
        outputLayer = LinearLayer(output)

        self.net.addInputModule(inputLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outputLayer)

        conn1 = FullConnection(inputLayer, hiddenLayer)
        conn2 = FullConnection(hiddenLayer, outputLayer)

        self.net.addConnection(conn1)
        self.net.addConnection(conn2)
Beispiel #30
0
    def __init__(self, hidden_layers, ally_champ_obj_list,
                 enemy_champ_obj_list):

        self.ally_champ_obj_list = ally_champ_obj_list
        self.enemy_champ_obj_list = enemy_champ_obj_list

        self.set_nodes()

        self.network = FeedForwardNetwork()

        connect_queue = Queue.Queue()

        for layer in xrange(0, hidden_layers):
            connect_queue.put(
                TanhLayer(self.input_node_count,
                          name='hidden_layer_{}'.format(layer)))

        connect_queue.put(SigmoidLayer(1, name='output_layer'))

        prev_layer = LinearLayer(self.input_node_count, name='input_layer')
        self.network.addInputModule(prev_layer)

        while not connect_queue.empty():

            current_layer = connect_queue.get()
            if current_layer.name == 'output_layer':
                self.network.addOutputModule(current_layer)
            else:
                self.network.addModule(current_layer)

            bias = BiasUnit()
            bias_connection = FullConnection(
                bias,
                current_layer,
                name="bias_to_{}_connection".format(current_layer.name))
            self.network.addModule(bias)
            self.network.addConnection(bias_connection)

            connection = FullConnection(prev_layer,
                                        current_layer,
                                        name="{}_to_{}_connection".format(
                                            prev_layer.name,
                                            current_layer.name))
            self.network.addConnection(connection)

            prev_layer = current_layer

        self.network.sortModules()
Beispiel #31
0
def classify(imSize, dataset, hidden_neurons, initial_error):

    tstdata, trndata = dataset.splitWithProportion(0.25)
    # nos da una proporcion de data de entrenamiento de .75 y prueba .25

    # imSize es el tamano de las capas de entrada
    # define layer structures
    inLayer = LinearLayer(imSize)
    hiddenLayer = SigmoidLayer(imSize / 3)
    outLayer = SoftmaxLayer(1)

    # add layers to network
    net = FeedForwardNetwork()
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addOutputModule(outLayer)

    # define conncections for network
    theta1 = FullConnection(inLayer, hiddenLayer)
    theta2 = FullConnection(hiddenLayer, outLayer)

    # add connections to network
    net.addConnection(theta1)
    net.addConnection(theta2)

    # sort module
    net.sortModules()

    dataset._convertToOneOfMany()

    fnn = buildNetwork(dataset.indim,
                       imSize / 3,
                       dataset.outdim,
                       outclass=SoftmaxLayer)

    #Creamos un entrenador de retropropagacion usando el dataset y la red
    trainer = BackpropTrainer(fnn, dataset)

    error = initial_error
    iteration = 0
    #iteramos mientras el error sea menor 0.001
    while error > 0.01:
        error = trainer.train()
        iteration += 1
        #print "Iteration: {0} Error {1}".format(iteration, error)
    print "Terminado luego de: ", iteration, " iteraciones"
    print "Con un error de: ", error
    return fnn
Beispiel #32
0
    def top_layer(self, autoencoder, hidden_layers, next_layer, bias_layers):
        # connect 2nd to last and last
        last_layer = hidden_layers[-1].outmod
        autoencoder.addOutputModule(last_layer)
        connection = FullConnection(next_layer, last_layer)
        connection.params[:] = hidden_layers[-1].params
        autoencoder.addConnection(connection)
        if self.bias:
            bias = bias_layers[-1]
            bias_unit = bias.inmod
            autoencoder.addModule(bias_unit)
            connection = FullConnection(bias_unit, last_layer)
            connection.params[:] = bias.params
            autoencoder.addConnection(connection)

        autoencoder.sortModules()
        return autoencoder
Beispiel #33
0
 def train(self):
     # We will build up a network piecewise in order to create a new dataset
     # for each layer.
     dataset = self.dataset
     piecenet = FeedForwardNetwork()
     piecenet.addInputModule(copy.deepcopy(self.net.inmodules[0]))
     # Add a bias
     bias = BiasUnit()
     piecenet.addModule(bias)
     # Add the first visible layer
     firstRbm = self.iterRbms().next()
     visible = copy.deepcopy(firstRbm.visible)
     piecenet.addModule(visible)
     # For saving the rbms and their inverses
     self.invRbms = []
     self.rbms = []
     for rbm in self.iterRbms():
         self.net.sortModules()
         # Train the first layer with an rbm trainer for `epoch` epochs.
         trainer = self.trainerKlass(rbm, dataset, self.cfg)
         for _ in xrange(self.epochs):
             trainer.train()
         self.invRbms.append(trainer.invRbm)
         self.rbms.append(rbm)
         # Add the connections and the hidden layer of the rbm to the net.
         hidden = copy.deepcopy(rbm.hidden)
         biascon = FullConnection(bias, hidden)
         biascon.params[:] = rbm.biasWeights
         con = FullConnection(visible, hidden)
         con.params[:] = rbm.weights
         
         piecenet.addConnection(biascon)
         piecenet.addConnection(con)
         piecenet.addModule(hidden)
         # Overwrite old outputs
         piecenet.outmodules = [hidden]
         piecenet.outdim = rbm.hiddenDim
         piecenet.sortModules()
         
         dataset = UnsupervisedDataSet(rbm.hiddenDim)
         for sample, in self.dataset:
             new_sample = piecenet.activate(sample)
             dataset.addSample(new_sample)
         visible = hidden
Beispiel #34
0
    def fromDims(cls, visibledim, hiddendim, params=None, biasParams=None):
        """Return a restricted Boltzmann machine of the given dimensions with the
        given distributions."""
        net = FeedForwardNetwork()
        bias = BiasUnit('bias')
        visible = LinearLayer(visibledim, 'visible')
        hidden = SigmoidLayer(hiddendim, 'hidden')
        con1 = FullConnection(visible, hidden)
        con2 = FullConnection(bias, hidden)
        if params is not None:
            con1.params[:] = params
        if biasParams is not None:
            con2.params[:] = biasParams

        net.addInputModule(visible)
        net.addModule(bias)
        net.addOutputModule(hidden)
        net.addConnection(con1)
        net.addConnection(con2)
        net.sortModules()
        return cls(net)
Beispiel #35
0
def buildNetwork():
    # make network objects
    network = FeedForwardNetwork()
    inputLayer = LinearLayer(N*N)
    hiddenLayer = SigmoidLayer(hiddenNodes)
    outputLayer = SoftmaxLayer(len(CLASSES))
    
    # connect the network
    network.addInputModule(inputLayer)
    network.addModule(hiddenLayer)
    network.addOutputModule(outputLayer)
    input_to_hidden = FullConnection(inputLayer, hiddenLayer)
    hidden_to_output = FullConnection(hiddenLayer, outputLayer)
    
    # connection weights determined by training
    input_to_hidden._setParameters([4.35074052, 0.54309903, -1.70788914, -0.37641228, 5.36652276, -5.95097706, -3.31479105, 1.48726254, 4.01124973, -2.23954635, -2.06566738, -1.05526604, 0.29454287, 0.34454901, 0.85887803, -1.54283834, -0.20189867, 2.39341244, -6.41137004, -5.21972223, 3.9786469, 2.62502833, -0.71182606, 1.49128153, 4.07595571,  3.84903291,  1.23479208, -1.84971481, -2.48423784, -2.89135785, 1.66872929, 2.9749277, 0.13388845, -6.34083074, 2.45295962, 4.37867807, -2.04605488, 5.83143133, 8.31634908, -1.15218811, -1.67941218, -2.21080881, 0.73068735, -1.38228386, -3.62022672, -0.93999936, 0.93052909, -3.83909343, -4.79640119, 3.39088663, 1.88639523, -2.10136095, -5.79122022, -0.39145108, -3.16506474, -0.99953878, -4.03241107, 2.27154235, 1.29838529, 1.65980538, 3.56765327, 0.2334956, -1.50648655, 3.43253185, 1.96654523, -0.52561201, 2.16708779, 3.43269409, -0.89938682, -4.57967624, -0.54859459, -0.02998314, 0.57531064, -3.22645606, -2.01130185, -0.92961716, 2.72892938, 1.70440582, -0.74359544, -1.75646309])
    hidden_to_output._setParameters([5.23726518, 4.23002634, -10.17060945, 0.24449631, -0.40430924, -5.10170453, -1.97718355, -3.29225334, 9.76797256, -0.31218471, -2.26134518, 8.19478967, 4.64314258, -5.10091146, -6.16245232, 5.12979712, -6.02720454, 0.98557444, -7.00566727, 5.45351844, -6.94985525, -5.76529963, 10.43833636, -0.35966021, -0.53525741])

    network.addConnection(input_to_hidden)
    network.addConnection(hidden_to_output)
    network.sortModules()               # initialize    
    return network
Beispiel #36
0
    def _train(self):
        hidden_layers = []
        bias_layers = []
        compressed_data = copy.copy(self.unsupervised) # it isn't compressed at this point, but will be later on
        compressed_supervised = self.supervised

        mid_layers = self.layers[1:-1] # remove the first and last
        for i,current in enumerate(mid_layers):
            prior = self.layers[i] # This accesses the layer before the "current" one, since the indexing in mid_layers and self.layers is offset by 1

            # build the NN with a bottleneck
            bottleneck = FeedForwardNetwork()
            in_layer = LinearLayer(prior)
            hidden_layer = self.hidden_layer(current)
            out_layer = self.hidden_layer(prior)
            bottleneck.addInputModule(in_layer)
            bottleneck.addModule(hidden_layer)
            bottleneck.addOutputModule(out_layer)
            in_to_hidden = FullConnection(in_layer, hidden_layer)
            hidden_to_out = FullConnection(hidden_layer, out_layer)
            bottleneck.addConnection(in_to_hidden)
            bottleneck.addConnection(hidden_to_out)
            if self.bias:
                bias1 = BiasUnit()
                bias2 = BiasUnit()
                bottleneck.addModule(bias1)
                bottleneck.addModule(bias2)
                bias_in = FullConnection(bias1, hidden_layer)
                bias_hidden = FullConnection(bias2, out_layer)
                bottleneck.addConnection(bias_in)
                bottleneck.addConnection(bias_hidden)
            bottleneck.sortModules()

            # train the bottleneck
            print "\n...training for layer ", prior, " to ", current
            ds = SupervisedDataSet(prior,prior)
            if self.dropout_on:
                noisy_data, originals = self.dropout(compressed_data, noise=0.2, bag=1)
                for i,n in enumerate(noisy_data):
                    original = originals[i]
                    ds.addSample(n, original)
            else:
                for d in (compressed_data): ds.addSample(d, d)
            trainer = BackpropTrainer(bottleneck, dataset=ds, learningrate=0.001, momentum=0.05, verbose=self.verbose, weightdecay=0.05)
            trainer.trainEpochs(self.compression_epochs)
            if self.verbose: print "...data:\n...", compressed_data[0][:8], "\nreconstructed to:\n...", bottleneck.activate(compressed_data[0])[:8]

            hidden_layers.append(in_to_hidden)
            if self.bias: bias_layers.append(bias_in)

            # use the params from the bottleneck to compress the training data
            compressor = FeedForwardNetwork()
            compressor.addInputModule(in_layer)
            compressor.addOutputModule(hidden_layer) # use the hidden layer from above
            compressor.addConnection(in_to_hidden)
            compressor.sortModules()
            compressed_data = [compressor.activate(d) for d in compressed_data]
            compressed_supervised = [compressor.activate(d) for d in compressed_supervised]

            self.nn.append(compressor)

        # Train the softmax layer
        print "\n...training for softmax layer "
        softmax = FeedForwardNetwork()
        in_layer = LinearLayer(self.layers[-2])
        out_layer = self.final_layer(self.layers[-1])
        softmax.addInputModule(in_layer)
        softmax.addOutputModule(out_layer)
        in_to_out = FullConnection(in_layer, out_layer)
        softmax.addConnection(in_to_out)
        if self.bias:
            bias = BiasUnit()
            softmax.addModule(bias)
            bias_in = FullConnection(bias, out_layer)
            softmax.addConnection(bias_in)
        softmax.sortModules()

        # see if it's for classification or regression
        if self.final_layer == SoftmaxLayer:
            print "...training for a softmax network"
            ds = ClassificationDataSet(self.layers[-2], 1)
        else:
            print "...training for a regression network"
            ds = SupervisedDataSet(self.layers[-2], self.layers[-1])
        bag = 1
        noisy_data, _ = self.dropout(compressed_supervised, noise=0.5, bag=bag)
        bagged_targets = []
        for t in self.targets:
            for b in range(bag):
                bagged_targets.append(t)
        for i,d in enumerate(noisy_data):
            target = bagged_targets[i]
            ds.addSample(d, target)

        # see if it's for classification or regression
        if self.final_layer == SoftmaxLayer:
            ds._convertToOneOfMany()

        # TODO make these configurable
        trainer = BackpropTrainer(softmax, dataset=ds, learningrate=0.001, momentum=0.05, verbose=self.verbose, weightdecay=0.05)
        trainer.trainEpochs(self.compression_epochs)
        self.nn.append(softmax)
        hidden_layers.append(in_to_out)
        if self.bias: bias_layers.append(bias_in)

        # Recreate the whole thing
        # connect the first two
        autoencoder = FeedForwardNetwork()
        first_layer = hidden_layers[0].inmod
        next_layer = hidden_layers[0].outmod
        autoencoder.addInputModule(first_layer)
        connection = FullConnection(first_layer, next_layer)
        connection.params[:] = hidden_layers[0].params
        autoencoder.addConnection(connection)

        # decide whether this should be the output layer or not
        if self.autoencoding_only and (len(self.layers) <= 3): # TODO change this to 2 when you aren't using the softmax above
            autoencoder.addOutputModule(next_layer)
        else:
            autoencoder.addModule(next_layer)
        if self.bias:
            bias = bias_layers[0]
            bias_unit = bias.inmod
            autoencoder.addModule(bias_unit)
            connection = FullConnection(bias_unit, next_layer)
            connection.params[:] = bias.params
            autoencoder.addConnection(connection)

        # connect the middle layers
        for i,h in enumerate(hidden_layers[1:-1]):
            new_next_layer = h.outmod

            # decide whether this should be the output layer or not
            if self.autoencoding_only and i == (len(hidden_layers) - 3):
                autoencoder.addOutputModule(new_next_layer)
            else:
                autoencoder.addModule(new_next_layer)
            connection = FullConnection(next_layer, new_next_layer)
            connection.params[:] = h.params
            autoencoder.addConnection(connection)
            next_layer = new_next_layer

            if self.bias:
                bias = bias_layers[i+1]
                bias_unit = bias.inmod
                autoencoder.addModule(bias_unit)
                connection = FullConnection(bias_unit, next_layer)
                connection.params[:] = bias.params
                autoencoder.addConnection(connection)

        return autoencoder, hidden_layers, next_layer, bias_layers
Beispiel #37
0
def simple_network_builder(layers,partial_path):
    n = FeedForwardNetwork()
    ## create the network
    inlayer = LinearLayer(layers[0], name = "In")
    hidden_one = TanhLayer(layers[1], name = "Hidden 1")
    hidden_two = TanhLayer(layers[2], name  ="Hidden 2")
    b1 = BiasUnit(name="Bias")
    output = LinearLayer(1,name = "Out")
    n.addInputModule(inlayer)
    n.addModule(hidden_one)
    n.addModule(hidden_two)
    n.addModule(b1)
    n.addOutputModule(output)
    in_to_one = FullConnection(inlayer,hidden_one)
    one_to_two = FullConnection(hidden_one,hidden_two)
    two_to_out = FullConnection(hidden_two,output)
    b1_to_one = FullConnection(b1,hidden_one)
    b2_to_two = FullConnection(b1,hidden_two)
    b3_to_output = FullConnection(b1,output)
    ### load weights and biases
    in_to_one._setParameters(np.array((csv_loader(partial_path + '_w1.csv'))))
    one_to_two._setParameters(np.array(csv_loader(partial_path + '_w2.csv')))
    two_to_out._setParameters(np.array(csv_loader(partial_path + '_w3.csv')))
    b1_to_one._setParameters(np.array(csv_loader(partial_path + '_b1.csv')))
    b2_to_two._setParameters(np.array(csv_loader(partial_path + '_b2.csv')))
    b3_to_output._setParameters(np.array(csv_loader(partial_path + '_b3.csv')))

    ### connect the network topology
    n.addConnection(in_to_one)
    n.addConnection(one_to_two)
    n.addConnection(two_to_out)
#    n.sortModules()

    n.addConnection(b1_to_one)
    n.addConnection(b2_to_two)
    n.addConnection(b3_to_output)

    ### finalize network object
    n.sortModules()

    return n
def StateToActionNetwork():
	#initial a network [12,12,4] and initial weights are baseline policy versions
	
	from pybrain.structure import FeedForwardNetwork,LinearLayer,TanhLayer,FullConnection
	network = FeedForwardNetwork()
	inLayer= LinearLayer(12)
	hiddenLayer = TanhLayer(12)
	outLayer = TanhLayer(4)
	network.addInputModule(inLayer)
	network.addModule(hiddenLayer)
	network.addOutputModule(outLayer)
	#inital weights
	initial_weights = """-0.82254189 -1.13179445 -0.05073786  0.26591425 -0.1284119   0.08943027
 			      0.42276271 -0.7071644  -1.45276617 -0.44496227  0.59200697 -0.76490859
 			     -0.82167338 -0.39902595 -0.34932747 -1.5301132   0.4874284  -1.75511689
  			      0.48486169  0.81363237 -0.15560306 -1.28014402  0.432026   -0.245171
  			      0.78031838 -0.15817382  0.11117014  1.04968207 -0.2928946  -1.83646693
  			      1.44371163 -0.16511239 -0.28240856  0.66388542 -1.82       -0.31922762
 			     -1.62204838 -0.12312791  0.22280484  0.7411014  -0.05863777 -0.5328774
 			     -2.78973853  0.46491466  1.42202806 -2.93244732 -0.24784862 -1.09437463
 			     -0.07650338 -0.22306763 -0.0183736   1.78186929 -0.67513165 -0.55366751
 			      0.5925835  -1.82412031 -0.05014905 -0.53091446 -1.00910792  1.35670824
 			      1.58071742 -1.65940386  1.17890985 -1.64222732  0.56357161 -0.304062
 			      0.83628703  0.80113178 -0.09179465  0.74009935 -0.34146348  0.45395903
 			     -0.80083394 -0.3178608  -0.46622104 -1.59866551 -0.3893681  -0.67853351
 			     -0.61304091 -0.12200701  1.65532154 -0.33992727 -1.56087088 -2.02031568
  			      1.02997029  1.21253299 -0.06733012  0.28724485 -0.27014336 -0.83057191
 			      0.39323538  1.30558669  0.26726448 -0.65961121  1.54584633  0.09210854
			     -0.99081429  0.59634696  0.08429763 -0.1085911   0.01785386  1.78681155
 			      0.87636657 -1.2413246   0.08531575 -0.92648945 -0.20240477  0.16277405
 			     -0.23555818  0.49663751  2.75629226  0.03482599 -0.64754342 -0.61886618
  			      0.77400906 -0.28588506 -0.18226857 -1.15349435  0.09339758 -0.84021149
 			     -0.3769615  -2.71952741 -1.92806955  1.33770349  0.46549708  1.0234573
  			      0.77816064 -0.36149316  0.65660944 -0.79934234 -0.85783489 -0.10840895
 			      1.35537789 -1.50803792  0.10239295  0.20335467  0.07891178 -0.56889871
  			      0.59446914 -1.07917626 -1.44565869  0.46396979 -1.0022648  -0.36037274
  			      0.04604105  0.10828613 -0.09156346  0.05961271  0.07350161  0.03483664
  			      0.01918546  0.029282   -0.03780433  0.01140018 -0.04217829  0.12422228
  			      0.10494436  0.03090324  0.02751887  0.1757922  -0.1175768   0.04984245
                              0.03805592  0.07699565  0.0927753   0.03017363 -0.02785207 -0.08504634
 		              0.23548627 -0.024849    0.08893206 -0.02284833  0.04222917 -0.01530065
 			     -0.0336135   0.08849411 -0.02291273 -0.05779803 -0.01868145  0.00836078
 			      0.08720535 -0.11581814 -0.03772317  0.05162675  0.10993543  0.08677515
     			     -0.03086664 -0.02367544  0.10032227  0.15426584 -0.03793561 -0.07125042"""
  
	weights = [] 
	for i in initial_weights.split(" "):
		num = i.strip().replace('\t','')
		num = num.replace('\n',' ')
		num = num.strip()
		try:
			weights.append(float(num))
		except:
			pass 
 
	 
	in_to_hidden = FullConnection(inLayer,hiddenLayer)   
	hidden_to_out = FullConnection(hiddenLayer,outLayer)
	for i in range(0,144):
		in_to_hidden.params[i] = weights[i]

	for j in range(0,48):
		hidden_to_out.params[j] = weights[j+144] 		
	network.addConnection(in_to_hidden)
	network.addConnection(hidden_to_out)
	network.sortModules()
	return network 		
Beispiel #39
0
    def _train(self):
        global bias_in
        hidden_layers = []
        bias_layers = []
        compressed_data = copy.copy(self.unsupervised)  # it isn't compressed at this point, but will be later on
        compressed_supervised = self.supervised

        mid_layers = self.layers[1:-1]  # remove the first and last
        for i, current in enumerate(mid_layers):
            prior = self.layers[i]
            # This accesses the layer before the "current" one,
            # since the indexing in mid_layers and self.layers is offset by 1
            # print "Compressed data at stage {0} {1}".format(i, compressed_data)

            """ build the NN with a bottleneck """
            bottleneck = FeedForwardNetwork()
            in_layer = LinearLayer(prior)
            hidden_layer = self.hidden_layer(current)
            out_layer = self.hidden_layer(prior)
            bottleneck.addInputModule(in_layer)
            bottleneck.addModule(hidden_layer)
            bottleneck.addOutputModule(out_layer)
            in_to_hidden = FullConnection(in_layer, hidden_layer)
            hidden_to_out = FullConnection(hidden_layer, out_layer)
            bottleneck.addConnection(in_to_hidden)
            bottleneck.addConnection(hidden_to_out)
            if self.bias:
                bias1 = BiasUnit()
                bias2 = BiasUnit()
                bottleneck.addModule(bias1)
                bottleneck.addModule(bias2)
                bias_in = FullConnection(bias1, hidden_layer)
                bias_hidden = FullConnection(bias2, out_layer)
                bottleneck.addConnection(bias_in)
                bottleneck.addConnection(bias_hidden)
            bottleneck.sortModules()
            print("3here network is okay bottleneck")  # ====================================

            """ train the bottleneck """
            print "\n...training for layer ", prior, " to ", current
            ds = SupervisedDataSet(prior, prior)
            print ("5here supervised dataset was built")  # ==============================
            print("8.====================compressed_data_size=============")
            print compressed_data.__sizeof__()
            if self.dropout_on:
                noisy_data, originals = self.dropout(compressed_data, noise=0.2, bag=1, debug=False)
                print("6here dropout is begin processing and it's okay")  # ==============================
                print "=============noisylen================"
                print len(noisy_data)  # =====
                for i, n in enumerate(noisy_data):
                    original = originals[i]

                    ds.addSample(n, original)

                print("7.drop out add nosizy sample success")  # =============================
            else:
                for d in (compressed_data):
                    ds.addSample(d, d)
            print("4here begin bp bp bp")  # ============================================
            trainer = BackpropTrainer(bottleneck, dataset=ds, learningrate=0.001, momentum=0.05,
                                      verbose=self.verbose, weightdecay=0.05)
            trainer.trainEpochs(self.compression_epochs)
            if self.verbose:
                print "...data:\n...", compressed_data[0][:10], \
                    "\nreconstructed to:\n...", bottleneck.activate(compressed_data[0])[:10]
                # just used 10dim of 95 dim mfcc

            hidden_layers.append(in_to_hidden)
            if self.bias: bias_layers.append(bias_in)

            """ use the params from the bottleneck to compress the training data """
            compressor = FeedForwardNetwork()
            compressor.addInputModule(in_layer)
            compressor.addOutputModule(hidden_layer)  # use the hidden layer from above
            compressor.addConnection(in_to_hidden)
            compressor.sortModules()
            compressed_data = [compressor.activate(d) for d in compressed_data]
            # del compressed_data  #del==============================================
            compressed_supervised = [compressor.activate(d) for d in compressed_supervised]
            # del compressed_supervised  #del==============================================

            self.nn.append(compressor)

        """ Train the softmax layer """
        print "\n...training for softmax layer "
        softmax = FeedForwardNetwork()
        in_layer = LinearLayer(self.layers[-2])
        out_layer = self.final_layer(self.layers[-1])
        softmax.addInputModule(in_layer)
        softmax.addOutputModule(out_layer)
        in_to_out = FullConnection(in_layer, out_layer)
        softmax.addConnection(in_to_out)
        if self.bias:
            bias = BiasUnit()
            softmax.addModule(bias)
            bias_in = FullConnection(bias, out_layer)
            softmax.addConnection(bias_in)
        softmax.sortModules()

        # see if it's for classification or regression
        if self.final_layer == SoftmaxLayer:
            print "...training for a softmax network"
            ds = ClassificationDataSet(self.layers[-2], 1)
        else:
            print "...training for a regression network"
            ds = SupervisedDataSet(self.layers[-2], self.layers[-1])
        bag = 1
        noisy_data, _ = self.dropout(compressed_supervised, noise=0.5, bag=bag, debug=True)
        bagged_targets = []
        for t in self.targets:
            for b in range(bag):
                bagged_targets.append(t)

        for i, d in enumerate(noisy_data):
            target = bagged_targets[i]
            ds.addSample(d, target)

        # see if it's for classification or regression
        if self.final_layer == SoftmaxLayer:
            ds._convertToOneOfMany()

        trainer = BackpropTrainer(softmax, dataset=ds, learningrate=0.001,
                                  momentum=0.05, verbose=self.verbose, weightdecay=0.05)
        trainer.trainEpochs(self.compression_epochs)
        self.nn.append(softmax)
        # print "ABOUT TO APPEND"
        # print len(in_to_out.params)
        hidden_layers.append(in_to_out)
        if self.bias:
            bias_layers.append(bias_in)

        """ Recreate the whole thing """
        # print "hidden layers: " + str(hidden_layers)
        # print "bias layers: " + str(bias_layers)
        # print "len hidden layers: " + str(len(hidden_layers))
        # print "len bias layers: " + str(len(bias_layers))
        # connect the first two
        autoencoder = FeedForwardNetwork()
        first_layer = hidden_layers[0].inmod
        next_layer = hidden_layers[0].outmod
        autoencoder.addInputModule(first_layer)
        connection = FullConnection(first_layer, next_layer)
        connection.params[:] = hidden_layers[0].params
        autoencoder.addConnection(connection)

        # decide whether this should be the output layer or not
        if self.autoencoding_only and (len(self.layers) <= 3):
            #  TODO change this to 2 when you aren't using the softmax above
            autoencoder.addOutputModule(next_layer)
        else:
            autoencoder.addModule(next_layer)
        if self.bias:
            bias = bias_layers[0]
            bias_unit = bias.inmod
            autoencoder.addModule(bias_unit)
            connection = FullConnection(bias_unit, next_layer)
            # print bias.params
            connection.params[:] = bias.params
            autoencoder.addConnection(connection)
            # print connection.params

        # connect the middle layers
        for i, h in enumerate(hidden_layers[1:-1]):
            new_next_layer = h.outmod

            # decide whether this should be the output layer or not
            if self.autoencoding_only and i == (len(hidden_layers) - 3):
                autoencoder.addOutputModule(new_next_layer)
            else:
                autoencoder.addModule(new_next_layer)
            connection = FullConnection(next_layer, new_next_layer)
            connection.params[:] = h.params
            autoencoder.addConnection(connection)
            next_layer = new_next_layer

            if self.bias:
                bias = bias_layers[i + 1]
                bias_unit = bias.inmod
                autoencoder.addModule(bias_unit)
                connection = FullConnection(bias_unit, next_layer)
                connection.params[:] = bias.params
                autoencoder.addConnection(connection)

        return autoencoder, hidden_layers, next_layer, bias_layers