def _init_net(params_len, output_layer_num, hidden_size): # init and train net = FeedForwardNetwork() """ Next, we're constructing the input, hidden and output layers. """ inLayer = LinearLayer(params_len) hiddenLayer = SigmoidLayer(hidden_size) hiddenLayer1 = SigmoidLayer(hidden_size) outLayer = LinearLayer(output_layer_num) """ (Note that we could also have used a hidden layer of type TanhLayer, LinearLayer, etc.) Let's add them to the network: """ net.addInputModule(inLayer) net.addModule(hiddenLayer) net.addModule(hiddenLayer1) net.addOutputModule(outLayer) """ We still need to explicitly determine how they should be connected. For this we use the most common connection type, which produces a full connectivity between two layers (or Modules, in general): the 'FullConnection'. """ in2hidden = FullConnection(inLayer, hiddenLayer) hidden2hidden = FullConnection(hiddenLayer, hiddenLayer1) hidden2out = FullConnection(hiddenLayer1, outLayer) net.addConnection(in2hidden) net.addConnection(hidden2hidden) net.addConnection(hidden2out) """ All the elements are in place now, so we can do the final step that makes our MLP usable, which is to call the 'sortModules()' method. """ net.sortModules() # net = buildNetwork( params_len, hidden_size, 601, bias = True ) return net
def Update(self, hiden, h): self.net = FeedForwardNetwork() self.inputlayer = LinearLayer(self.inputsize, "Input") self.net.addInputModule(self.inputlayer) self.outputlayer = LinearLayer(self.outputsize, "Output") self.net.addOutputModule(self.outputlayer) self.hidenlayers = [] for i in xrange(len(hiden)): self.hidenlayers.append(SigmoidLayer(hiden[i], "hiden%s" % i)) self.net.addModule(self.hidenlayers[-1]) self.net.addConnection( FullConnection(self.inputlayer, self.outputlayer)) for i in xrange(len(self.hidenlayers)): self.net.addConnection( FullConnection(self.inputlayer, self.hidenlayers[i])) self.net.addConnection( FullConnection(self.hidenlayers[i], self.outputlayer)) for i in xrange(len(self.hidenlayers)): for j in xrange(i + 1, len(self.hidenlayers)): if i < h: self.net.addConnection( FullConnection(self.hidenlayers[i], self.hidenlayers[j])) elif i == h: self.net.addConnection( FullConnection(self.hidenlayers[i], self.hidenlayers[j], inSliceTo=hiden[i] - 1)) else: self.net.addConnection( FullConnection(self.hidenlayers[i], self.hidenlayers[j])) #self.print_conections(self.net) self.net.sortModules() self.hiden = hiden
def train_net(data_set, n, epochs=1): num_inputs = len(data_set[0][0][n]) ds = SupervisedDataSet(num_inputs, 2) for i in range(len(data_set)): try: ds.appendLinked(data_set[i][0][n], (data_set[i][1], data_set[i][2])) except: continue print str(len(ds)) + ' points successfully aquired' net = FeedForwardNetwork() net.addInputModule(LinearLayer(num_inputs, name='input')) net.addInputModule(BiasUnit(name='bias')) net.addOutputModule(LinearLayer(2, name='output')) net.addModule(SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid')) net.addModule(TanhLayer(10, name='tanh')) net.addConnection(FullConnection(net['bias'], net['sigmoid'])) net.addConnection(FullConnection(net['bias'], net['tanh'])) net.addConnection(FullConnection(net['input'], net['sigmoid'])) net.addConnection(FullConnection(net['sigmoid'], net['tanh'])) net.addConnection(FullConnection(net['tanh'], net['output'])) net.sortModules() trainer = BackpropTrainer(net, learningrate=0.01, momentum=0.1, verbose=True) trainer.trainOnDataset(ds) trainer.trainEpochs(epochs) return net
def create(number_of_hidden_layers, activation_function, input_length, output_length, network_file, classify): n = FeedForwardNetwork() in_layer = LinearLayer(input_length) n.addInputModule(in_layer) layer_to_connect_to = in_layer for x in range(0, number_of_hidden_layers): if activation_function == 'sigmoid': hidden_layer = SigmoidLayer(input_length) else: hidden_layer = TanhLayer(input_length) n.addModule(hidden_layer) hidden_layer_connection = FullConnection(layer_to_connect_to, hidden_layer) n.addConnection(hidden_layer_connection) layer_to_connect_to = hidden_layer if classify: out_layer = SoftmaxLayer(output_length) else: out_layer = LinearLayer(output_length) n.addOutputModule(out_layer) hidden_to_out = FullConnection(layer_to_connect_to, out_layer) n.addConnection(hidden_to_out) n.sortModules() save_network(n, network_file)
def create_network(): # Create the network itself network = FeedForwardNetwork() # Create layers NUMBER_OF_INPUT_BYTES = 1600 # because at input we have picture 40x40 size NUMBER_OF_HIDDEN_LAYERS = 10 # number of hidden layers NUMBER_OF_OUTPUT_CLASSES = 8 # because in output we have 8 classes inLayer = LinearLayer(NUMBER_OF_INPUT_BYTES) hiddenLayer = SigmoidLayer(NUMBER_OF_HIDDEN_LAYERS) outLayer = LinearLayer(NUMBER_OF_OUTPUT_CLASSES) # Create connections between layers # We create FullConnection - each neuron of one layer is connected to each neuron of other layer in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) # Add layers to our network network.addInputModule(inLayer) network.addModule(hiddenLayer) network.addOutputModule(outLayer) # Add connections to network network.addConnection(in_to_hidden) network.addConnection(hidden_to_out) # Sort modules to make multilayer perceptron usable network.sortModules() # prepare array to activate network d_letter_array = read_array("d") # activate network network.activate(d_letter_array) return network
def buildNonGravityNet(recurrent=False): if recurrent: net = RecurrentNetwork() else: net = FeedForwardNetwork() l1 = LinearLayer(2) l2 = LinearLayer(3) s1 = SigmoidLayer(2) l3 = LinearLayer(1) net.addInputModule(l1) net.addModule(l2) net.addModule(s1) net.addOutputModule(l3) net.addConnection(IdentityConnection(l1, l2, outSliceFrom=1)) net.addConnection(IdentityConnection(l1, l2, outSliceTo=2)) net.addConnection(IdentityConnection(l2, l3, inSliceFrom=2)) net.addConnection(IdentityConnection(l2, l3, inSliceTo=1)) net.addConnection(IdentityConnection(l1, s1)) net.addConnection(IdentityConnection(l2, s1, inSliceFrom=1)) net.addConnection(IdentityConnection(s1, l3, inSliceFrom=1)) if recurrent: net.addRecurrentConnection(IdentityConnection(s1, l1)) net.addRecurrentConnection( IdentityConnection(l2, l2, inSliceFrom=1, outSliceTo=2)) net.sortModules() return net
def getNetwork(trndata): n = RecurrentNetwork() n.addInputModule(LinearLayer(trndata.indim, name='in')) n.addModule(SigmoidLayer(100, name='hidden')) n.addOutputModule(LinearLayer(trndata.outdim, name='out')) n.addConnection(FullConnection(n['in'], n['hidden'], name='c1')) n.addConnection(FullConnection(n['hidden'], n['out'], name='c2')) n.addRecurrentConnection( FullConnection(n['hidden'], n['hidden'], name='c3')) n.sortModules() # fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer ) trainer = BackpropTrainer(n, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01) # TODO: return network and trainer here. Make another function for training # for i in range(20): # trainer.trainEpochs(1) # trainer.trainUntilConvergence(maxEpochs=100) # trnresult = percentError( trainer.testOnClassData(),trndata['class'] ) # tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] ) # print "epoch: %4d" % trainer.totalepochs, \ # " train error: %5.2f%%" % trnresult # out = fnn.activateOnDataset(tstdata) # out = out.argmax(axis=1) # the highest output activation gives the class return (n, trainer)
def crearRN(): #Se crea la red neuronal n = FeedForwardNetwork() #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal inLayer = LinearLayer(4096) hiddenLayer = SigmoidLayer(3) outLayer = LinearLayer(1) #Se agregan los layers a la red neuronal n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addOutputModule(outLayer) #Se declaran las conexiones de los nodos in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) #Se establecen las conexiones en los layers de la red neuronal n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) #Red neuronal lista para usar n.sortModules() return n
def build_deep_network(linear_dimensions): neural_net = FeedForwardNetwork() inLayer = LinearLayer(linear_dimensions) hiddenLayer_1 = SigmoidLayer(100) hiddenLayer_2 = SigmoidLayer(100) hiddenLayer_3 = SigmoidLayer(50) outLayer = LinearLayer(1) neural_net.addInputModule(inLayer) neural_net.addModule(hiddenLayer_1) neural_net.addModule(hiddenLayer_2) neural_net.addModule(hiddenLayer_3) neural_net.addOutputModule(outLayer) in_to_hidden_1 = FullConnection(inLayer, hiddenLayer_1) hidden_1_to_hidden_2 = FullConnection(hiddenLayer_1, hiddenLayer_2) hidden_2_to_hidden_3 = FullConnection(hiddenLayer_2, hiddenLayer_3) hidden_3_to_output = FullConnection(hiddenLayer_3, outLayer) neural_net.addConnection(in_to_hidden_1) neural_net.addConnection(hidden_1_to_hidden_2) neural_net.addConnection(hidden_2_to_hidden_3) neural_net.addConnection(hidden_3_to_output) neural_net.sortModules() return neural_net
def initalize_nn(): global in_to_hidden global hidden_to_hidden2 global hidden_to_out # Old code (regression) n = FeedForwardNetwork() # n = buildNetwork( 2, 3, data.outdim, outclass=SoftmaxLayer ) inLayer = LinearLayer(2) hiddenLayer = SigmoidLayer(3) hiddenLayer2 = SigmoidLayer(3) outLayer = LinearLayer(1) n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addModule(hiddenLayer2) n.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2) hidden_to_out = FullConnection(hiddenLayer2, outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_hidden2) n.addConnection(hidden_to_out) n.sortModules() return n
def getMultiplayerFeedForwardNetwork(inputLayerLen, hiddenLayersLenList, outLayerLen=1): #create net net = FeedForwardNetwork() #create layers inLayer = LinearLayer(inputLayerLen, name='inLinearLayer') hiddenLayers = [ SigmoidLayer(n, name='sigmoidLayer' + str(i)) for i, n in enumerate(hiddenLayersLenList) ] outLayer = LinearLayer(outLayerLen, name='outLinearLayer') #add layers to net net.addInputModule(inLayer) for l in hiddenLayers: net.addModule(l) net.addOutputModule(outLayer) #create connections layers = [inLayer] + hiddenLayers + [outLayer] connections = [ FullConnection(layers[i], layers[i + 1], name='connection' + str(i)) for i in range(len(layers) - 1) ] #add connections to net for c in connections: net.addConnection(c) #do some required initialization net.sortModules() return net
def __init__(self, arg): self.inputsize = arg[0] self.outputsize = arg[-1] self.hiden = arg[1:-1] self.err = 1 self.old_err = 1 b = [] b.append(self.inputsize) b += self.hiden b.append(self.outputsize) #print b#"%s, %s, %s, hiddenclass=TanhLayer"%(self.inputsize, self.hiden, self.outputsize) self.net = FeedForwardNetwork() self.inputlayer = LinearLayer(self.inputsize, "Input") self.net.addInputModule(self.inputlayer) self.outputlayer = LinearLayer(self.outputsize, "Output") self.net.addOutputModule(self.outputlayer) self.hidenlayers = [] for i in xrange(len(self.hiden)): self.hidenlayers.append(SigmoidLayer(self.hiden[i], "hiden%s" % i)) self.net.addModule(self.hidenlayers[-1]) self.net.addConnection( FullConnection(self.inputlayer, self.outputlayer)) for i in xrange(len(self.hidenlayers)): self.net.addConnection( FullConnection(self.inputlayer, self.hidenlayers[i])) self.net.addConnection( FullConnection(self.hidenlayers[i], self.outputlayer)) for i in xrange(len(self.hidenlayers)): for j in xrange(i + 1, len(self.hidenlayers)): self.net.addConnection( FullConnection(self.hidenlayers[i], self.hidenlayers[j])) #self.print_conections(self.net) self.net.sortModules() self.ds = SupervisedDataSet(self.inputsize, self.outputsize)
def classicNeuralNetwork(self, features, labels, autoencoder=False): dataSet = SupervisedDataSet(features.shape[1], 1) dataSet.setField('input', features) if autoencoder: labels = features dataSet.setField('target', labels) tstdata, trndata = dataSet.splitWithProportion(0.25) print features.shape simpleNeuralNetwork = _buildNetwork(\ (LinearLayer(features.shape[1],'in'),),\ (SigmoidLayer(20,'hidden0'),),\ (LinearLayer(labels.shape[1],'out'),),\ bias=True) trainer = BackpropTrainer(simpleNeuralNetwork, dataset=trndata, verbose=True) #, momentum=0.1) trainer.trainUntilConvergence(maxEpochs=15) trnresult = percentError(trainer.testOnData(dataset=trndata), trndata['target']) tstresult = percentError(trainer.testOnData(dataset=tstdata), tstdata['target']) print "epoch: %4d" % trainer.totalepochs, \ " train error: %5.2f%%" % trnresult, \ " test error: %5.2f%%" % tstresult self.neuralNetwork = simpleNeuralNetwork
def init_network(self, net): net.addInputModule(LinearLayer(2, 'in')) net.addModule(SigmoidLayer(3, 'hidden')) net.addOutputModule(LinearLayer(2, 'out')) net.addModule(BiasUnit(name='bias')) net.addConnection(FullConnection(net['in'], net['hidden'])) net.addConnection(FullConnection(net['hidden'], net['out'])) net.sortModules()
def xor_network(self, net): net.addInputModule(LinearLayer(2, name='in')) net.addModule(BiasUnit(name='bias')) net.addModule(LinearLayer(3, name='hidden')) net.addOutputModule(LinearLayer(1, name='out')) net.addConnection(FullConnection(net['in'], net['hidden'])) net.addConnection(FullConnection(net['bias'], net['hidden'])) net.addConnection(FullConnection(net['hidden'], net['out']))
def fit_predict(xTrain, yTrain, xTest, epochs, neurons): # Check edge cases if (not len(xTrain) == len(yTrain) or len(xTrain) == 0 or len(xTest) == 0 or epochs <= 0): return # Randomize the training data (probably not necessary but pybrain might # not shuffle the data itself, so perform as safety check) indices = np.arange(len(xTrain)) np.random.shuffle(indices) trainSwapX = [xTrain[x] for x in indices] trainSwapY = [yTrain[x] for x in indices] supTrain = SupervisedDataSet(len(xTrain[0]), 1) for x in range(len(trainSwapX)): supTrain.addSample(trainSwapX[x], trainSwapY[x]) # Construct the feed-forward neural network n = FeedForwardNetwork() inLayer = LinearLayer(len(xTrain[0])) hiddenLayer1 = SigmoidLayer(neurons) outLayer = LinearLayer(1) n.addInputModule(inLayer) n.addModule(hiddenLayer1) n.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer1) hidden_to_out = FullConnection(hiddenLayer1, outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) n.sortModules() # Train the neural network on the training partition, validating # the training progress on the validation partition trainer = BackpropTrainer(n, dataset=supTrain, momentum=0.1, learningrate=0.01, verbose=False, weightdecay=0.01) trainer.trainUntilConvergence(dataset=supTrain, maxEpochs=epochs, validationProportion=0.30) outputs = [] for x in xTest: outputs.append(n.activate(x)) return outputs
def main(f_samples): f_reading = open(f_samples, 'r') global data data = [] for line in f_reading: line = line.split() data.append( (float(line[0]), float(line[-1])) ) #function data_module = lambda x: map( lambda z: data[z], filter( lambda y: y% 5 == x, xrange(len(data)) ) ) global data1 data1 = [data_module(0), data_module(1), data_module(2), data_module(3), data_module(4)] global data_transformed data_transformed = take(data, rate = 60) global data_transformed_training data_transformed_training = map( lambda x: data_transformed[x], filter( lambda x: uniform(0, 1) > 0.3, xrange(len(data_transformed)) )) #Learning process----------------------------------------------------------------- global net, samples, trainer net = FeedForwardNetwork() inLayer = LinearLayer(3) hiddenLayer0 = SigmoidLayer(1) hiddenLayer1 = SigmoidLayer(3) outLayer = LinearLayer(1) net.addInputModule(inLayer) # net.addModule(hiddenLayer0) # net.addModule(hiddenLayer1) net.addOutputModule(outLayer) # net.addConnection(FullConnection(inLayer, hiddenLayer0)) net.addConnection(FullConnection(inLayer, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1)) # net.addConnection(FullConnection(hiddenLayer1, outLayer)) net.sortModules() print net ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out. #net = buildNetwork(3,8,8,1) ##Set with 2 inputs and one output for each sample samples = SupervisedDataSet(3,1) for i in data_transformed_training: samples.addSample(i['past'], i['next'] - i['average']) trainer = BackpropTrainer(net, samples) print 'Training' trainer.trainUntilConvergence(maxEpochs= 10) print 'Comparing' compare_net_samples(net, data_transformed) print "Number of samples %d for training." %len(data_transformed_training)
def rec_two_layer_network(self, net): inlayer = LinearLayer(2, 'in') outlayer = LinearLayer(2, 'out') con = IdentityConnection(inlayer, outlayer) rcon = IdentityConnection(inlayer, outlayer) net.addInputModule(inlayer) net.addOutputModule(outlayer) net.addConnection(con) net.addRecurrentConnection(rcon)
def testMdlstm(self): net = FeedForwardNetwork() net.addInputModule(LinearLayer(1, name='in')) net.addModule(MDLSTMLayer(1, 1, name='hidden')) net.addOutputModule(LinearLayer(1, name='out')) net.addConnection(FullConnection(net['in'], net['hidden'])) net.addConnection(FullConnection(net['hidden'], net['out'])) net.sortModules() self.equivalence_feed_forward(net, net.convertToFastNetwork())
def __init__(self, x, y, direction): self.age = 0 # position self.x = x self.y = y # number of fruits peeled self.num_peeled = 0 self.num_eaten = 0 self.num_moved = 0 # orientation (0 - 359 degrees) self.direction = direction # touching anything self.touching = None self.sees = None # hunger sensor self.hunger = 2000 self.avg_hunger = 0 ### # Neural Network # # Inputs: # 1. sees_peeled_orange # 2. sees_unpeeled_orange # 3. sees_peeled_banana # 4. sees_unpeeled_banana # 5. sees_animat # 6. sees_wall # 7. hunger # 8. touching_peeled_orange # 9. touching_unpeeled_orange # 10. touching_peeled_banana # 11. touching_unpeeled_banana # 12. touching_animat # 13. touching_wall ### self.net = FeedForwardNetwork() self.net.addInputModule(LinearLayer(13, name='in')) self.net.addModule(SigmoidLayer(14, name='hidden')) self.net.addOutputModule(LinearLayer(5, name='out')) self.net.addConnection( FullConnection(self.net['in'], self.net['hidden'])) self.net.addConnection( FullConnection(self.net['hidden'], self.net['out'])) self.net.sortModules() # thresholds for deciding an action self.move_threshold = 0 self.peel_threshold = 0 self.eat_threshold = 0
def getFitness(self, smMatrix): #Store the sm state into memory fit = 0 #Fitness function (3) ************************************************************* #Record the sm data for this loop and consider its properties #print(smMatrix) #print(len(smMatrix)) #net = buildNetwork(3,10,1, bias = True) net = FeedForwardNetwork() inp = LinearLayer(3) h1 = SigmoidLayer(10) outp = LinearLayer(1) # add modules net.addOutputModule(outp) net.addInputModule(inp) net.addModule(h1) # create connections iToH = FullConnection(inp, h1) hToO = FullConnection(h1, outp) net.addConnection(iToH) net.addConnection(hToO) # finish up net.sortModules() ds = SupervisedDataSet(3, 1) trainSet = [] for index_x, x in enumerate(smMatrix): if index_x > 0 and index_x < len(smMatrix) - 1: #trainSet.append( [smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2], smMatrix[index_x+1][3] ] ) ds.addSample(([ smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2] ]), (smMatrix[index_x + 1][3])) #print(trainSet) #print(ds) trainer = BackpropTrainer(net, ds, weightdecay=0.01) err = trainer.trainUntilConvergence(maxEpochs=100) #Visualize the network performance and structure. #nn = NNregression(ds, epoinc = 10) #nn.setupNN() #nn.runTraining() #self.pesos_conexiones(net) print("Input to hidden", iToH.params) #print("H to output", hToO.params) #print(iToH.params) n1 = iToH.params n1a = zip(*[iter(n1)] * 3) n2 = hToO.params fit = sum(n1a[:]) + sum(n2[:]) print fit return fit
def lstm_cell(self, net): inpt = LinearLayer(4, 'inpt') forgetgate = GateLayer(1, 'forgetgate') ingate = GateLayer(1, 'ingate') outgate = GateLayer(1, 'outgate') state = LinearLayer(1, 'state') in_to_fg = IdentityConnection(inpt, forgetgate, inSliceFrom=0, inSliceTo=1, outSliceFrom=0, outSliceTo=1, name='in_to_fg') in_to_og = IdentityConnection(inpt, outgate, inSliceFrom=1, inSliceTo=2, outSliceFrom=1, outSliceTo=2, name='in_to_og') in_to_ig = IdentityConnection(inpt, ingate, inSliceFrom=2, inSliceTo=4, outSliceFrom=0, outSliceTo=2, name='in_to_ig') fg_to_st = IdentityConnection(forgetgate, state, name='fg_to_st') st_to_fg = IdentityConnection(state, forgetgate, outSliceFrom=1, outSliceTo=2, name='st_to_fg') st_to_og = IdentityConnection(state, outgate, outSliceFrom=1, outSliceTo=2, name='st_to_og') ig_to_st = IdentityConnection(ingate, state, name='ig_to_st') net.addInputModule(inpt) net.addModule(forgetgate) net.addModule(ingate) net.addModule(state) net.addOutputModule(outgate) net.addConnection(in_to_fg) net.addConnection(in_to_og) net.addConnection(in_to_ig) net.addConnection(fg_to_st) net.addRecurrentConnection(st_to_fg) net.addConnection(st_to_og) net.addConnection(ig_to_st)
def buildMinimalLSTMNetwork(): N = RecurrentNetwork('simpleLstmNet') i = LinearLayer(4, name='i') h = LSTMLayer(1, peepholes=True, name='lstm') o = LinearLayer(1, name='o') N.addInputModule(i) N.addModule(h) N.addOutputModule(o) N.addConnection(IdentityConnection(i, h)) N.addConnection(IdentityConnection(h, o)) N.sortModules() return N
def buildMinimalMDLSTMNetwork(): N = RecurrentNetwork('simpleMdLstmNet') i = LinearLayer(4, name = 'i') h = MDLSTMLayer(1, peepholes = True, name = 'mdlstm') o = LinearLayer(1, name = 'o') N.addInputModule(i) N.addModule(h) N.addOutputModule(o) N.addConnection(IdentityConnection(i, h, outSliceTo = 4)) N.addRecurrentConnection(IdentityConnection(h, h, outSliceFrom = 4, inSliceFrom = 1)) N.addConnection(IdentityConnection(h, o, inSliceTo = 1)) N.sortModules() return N
def lstm_network(self, net): i = LinearLayer(1, name='in') h = LSTMLayer(2, name='hidden') o = LinearLayer(1, name='out') b = BiasUnit(name='bias') net.addModule(b) net.addOutputModule(o) net.addInputModule(i) net.addModule(h) net.addConnection(FullConnection(i, h)) net.addConnection(FullConnection(b, h)) net.addRecurrentConnection(FullConnection(h, h)) net.addConnection(FullConnection(h, o))
def rec_three_layer_network(self, net): inlayer = LinearLayer(1, name='in') hiddenlayer = LinearLayer(1, name='hidden') outlayer = LinearLayer(1, name='out') con1 = FullConnection(inlayer, hiddenlayer) con2 = FullConnection(hiddenlayer, outlayer) con3 = FullConnection(hiddenlayer, hiddenlayer) net.addInputModule(inlayer) net.addModule(hiddenlayer) net.addOutputModule(outlayer) net.addConnection(con1) net.addConnection(con2) net.addRecurrentConnection(con3)
def constructNet(self, input, hidden, output): inputLayer = LinearLayer(input) hiddenLayer = TanhLayer(hidden) outputLayer = LinearLayer(output) self.net.addInputModule(inputLayer) self.net.addModule(hiddenLayer) self.net.addOutputModule(outputLayer) conn1 = FullConnection(inputLayer, hiddenLayer) conn2 = FullConnection(hiddenLayer, outputLayer) self.net.addConnection(conn1) self.net.addConnection(conn2)
def sliced_connection_network(self, net): inlayer = LinearLayer(2, 'in') outlayer = LinearLayer(2, 'out') con = IdentityConnection(inlayer, outlayer, inSliceFrom=0, inSliceTo=1, outSliceFrom=1, outSliceTo=2, ) con = IdentityConnection(inlayer, outlayer, inSliceFrom=1, inSliceTo=2, outSliceFrom=0, outSliceTo=1, ) net.addInputModule(inlayer) net.addOutputModule(outlayer) net.addConnection(con)
def add_layers(self): self.inLayer = LinearLayer(784, name='in') self.outLayer = LinearLayer(784, name='out') if self.hidden_type == 'sigmoid': self.hiddenLayer = SigmoidLayer(self.hidden_neuron_num, name='hidden') else: # I found I had to overwrite the output layer to sigmoid to get the # hidden layer to work as linear self.hiddenLayer = LinearLayer(self.hidden_neuron_num, name='hidden') self.outLayer = SigmoidLayer(784, name='out') self.net.addInputModule(self.inLayer) self.net.addModule(self.hiddenLayer) self.net.addOutputModule(self.outLayer)
def generate_forecasters(data, dtt, alpha): #Learning process----------------------------------------------------------------- global net, samples, trainer net = FeedForwardNetwork() inLayer = LinearLayer(3) hiddenLayer0 = SigmoidLayer(1) hiddenLayer1 = SigmoidLayer(3) outLayer = LinearLayer(1) net.addInputModule(inLayer) net.addModule(hiddenLayer0) # net.addModule(hiddenLayer1) net.addOutputModule(outLayer) # net.addConnection(FullConnection(inLayer, hiddenLayer0)) net.addConnection(FullConnection(inLayer, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1)) # net.addConnection(FullConnection(hiddenLayer1, outLayer)) net.sortModules() print net ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out. #net = buildNetwork(3,8,8,1) ##Set with 2 inputs and one output for each sample samples = SupervisedDataSet(3, 1) for i in dtt: samples.addSample(i['past'], i['next'] - i['average']) trainer = BackpropTrainer(net, samples) print 'Training' #trainer.trainUntilConvergence(maxEpochs= 1) #Making Forecasters--------------------------------------------------------------- aux = map(lambda x: x[0], data) def exp(self, a, x): self.exp = a * data[aux.index(x) - 1][1] + (1 - a) * self.exp return self.exp naive = Forecaster(name='Naive', predict_function=lambda x: data[aux.index(x) - 1][1]) exponential = Forecaster(name='Exponential') exponential.exp = data[0][1] exponential.predict = lambda x: exp(exponential, alpha, x) network = Forecaster(name='Network', predict_function=net.activate) return naive, exponential, network
def make_net(self): net = FeedForwardNetwork() vision = LinearLayer(5, name="Vision") vision.x = 0 vision.y = 0 vision.color = '#FF0000' vision.orient = "vertical" action = LinearLayer(5, name="Action") action.x = 28 action.y = 0 action.color = '#00FFFF' action.orient = "vertical" drive = DrivesSquashLayer(3, name="Drive") drive.x = 0 drive.y = 23 drive.color = '#0000FF' drive.orient = "horizontal" net.addInputModule(vision) net.addInputModule(drive) net.addOutputModule(action) stm = LinearLayer(5, name="STM") stm.x = 18 stm.y = 20 stm.orient = "horizontal" stm.color = '#FF00FF' net.addModule(stm) vision_action = FullConnection(vision, action, name="vision->action") drive_action = FullConnection(drive, action, name="drive->action") net.addConnection(vision_action) net.addConnection(drive_action) test_stm = MaxOnlyConnection(action, stm, name="test_stm") net.addConnection(test_stm) net.sortModules() return net