def _createRBF(self): # choose random centers on map for i in range(self.numCenters): self.centers.append(self.env._randomInitPose()) # create an RBF network params = FeedForwardNetwork() inLayer = LinearLayer(self.task.outdim) hiddenLayer = RBFLayer(self.numCenters, self.centers) #inLayer = RBFLayer(self.numCenters, self.centers) outLayer = LinearLayer(self.task.indim) params.addInputModule(inLayer) params.addModule(hiddenLayer) params.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer,hiddenLayer) hidden_to_out = FullConnection(hiddenLayer,outLayer) params.addConnection(in_to_hidden) params.addConnection(hidden_to_out) params.sortModules() return params
def encoderdecoder(outersize,innersize,indata, fname): # create network n = FeedForwardNetwork() inLayer = LinearLayer(outersize) hiddenLayer = SigmoidLayer(innersize) outLayer = LinearLayer(outersize) n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) n.sortModules() # create dataset ds = SupervisedDataSet(outersize,outersize) for x,y in indata,indata: ds.addSample(x,y) # train network trainer = BackpropTrainer(n,ds) trainer.trainUntilConvergence() n.saveNetwork(fname) return [[in_to_hidden,hidden_to_out], [inLayer,hiddenLayer,outLayer], n]
def train_net(data_set, n, epochs=1): num_inputs = len(data_set[0][0][n]) ds = SupervisedDataSet(num_inputs, 2) for i in range(len(data_set)): try: ds.appendLinked(data_set[i][0][n], (data_set[i][1], data_set[i][2])) except: continue print str(len(ds)) + ' points successfully aquired' net = FeedForwardNetwork() net.addInputModule(LinearLayer(num_inputs, name='input')) net.addInputModule(BiasUnit(name='bias')) net.addOutputModule(LinearLayer(2, name='output')) net.addModule(SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid')) net.addModule(TanhLayer(10, name='tanh')) net.addConnection(FullConnection(net['bias'], net['sigmoid'])) net.addConnection(FullConnection(net['bias'], net['tanh'])) net.addConnection(FullConnection(net['input'], net['sigmoid'])) net.addConnection(FullConnection(net['sigmoid'], net['tanh'])) net.addConnection(FullConnection(net['tanh'], net['output'])) net.sortModules() trainer = BackpropTrainer(net, learningrate=0.01, momentum=0.1, verbose=True) trainer.trainOnDataset(ds) trainer.trainEpochs(epochs) return net
def build_network(self, layers=None, end=1): layerobjects = [] for item in layers: try: t, n = item if t == "sig": if n == 0: continue layerobjects.append(SigmoidLayer(n)) except TypeError: layerobjects.append(LinearLayer(item)) n = FeedForwardNetwork() n.addInputModule(layerobjects[0]) for i, layer in enumerate(layerobjects[1:-1]): n.addModule(layer) connection = FullConnection(layerobjects[i], layerobjects[i+1]) n.addConnection(connection) n.addOutputModule(layerobjects[-1]) connection = FullConnection(layerobjects[-2], layerobjects[-1]) n.addConnection(connection) n.sortModules() return n
def create(number_of_hidden_layers, activation_function, input_length, output_length, network_file, classify): n = FeedForwardNetwork() in_layer = LinearLayer(input_length) n.addInputModule(in_layer) layer_to_connect_to = in_layer for x in range(0, number_of_hidden_layers): if activation_function == 'sigmoid': hidden_layer = SigmoidLayer(input_length) else: hidden_layer = TanhLayer(input_length) n.addModule(hidden_layer) hidden_layer_connection = FullConnection(layer_to_connect_to, hidden_layer) n.addConnection(hidden_layer_connection) layer_to_connect_to = hidden_layer if classify: out_layer = SoftmaxLayer(output_length) else: out_layer = LinearLayer(output_length) n.addOutputModule(out_layer) hidden_to_out = FullConnection(layer_to_connect_to, out_layer) n.addConnection(hidden_to_out) n.sortModules() save_network(n, network_file)
def crearRN(): #Se crea la red neuronal n = FeedForwardNetwork() #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal inLayer = LinearLayer(4096) hiddenLayer = SigmoidLayer(3) outLayer = LinearLayer(1) #Se agregan los layers a la red neuronal n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addOutputModule(outLayer) #Se declaran las conexiones de los nodos in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) #Se establecen las conexiones en los layers de la red neuronal n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) #Red neuronal lista para usar n.sortModules() return n
def BackupNetwork(genome=None): #initial a network [12,12,4] and initial weights are baseline policy versions from pybrain.structure import FeedForwardNetwork,LinearLayer,TanhLayer,FullConnection network = FeedForwardNetwork() inLayer= LinearLayer(12) hiddenLayer = LinearLayer(12) outLayer = TanhLayer(4) network.addInputModule(inLayer) network.addModule(hiddenLayer) network.addOutputModule(outLayer) weights = [] if(genome == None): import pickle weights = pickle.load(open("seed")) else: weights = genome in_to_hidden = FullConnection(inLayer,hiddenLayer) hidden_to_out = FullConnection(hiddenLayer,outLayer) for i in range(0,144): in_to_hidden.params[i] = weights[i] for j in range(0,48): hidden_to_out.params[j] = weights[j+144] network.addConnection(in_to_hidden) network.addConnection(hidden_to_out) network.sortModules() return network
def trainedANN(): n = FeedForwardNetwork() n.addInputModule(LinearLayer(4, name='in')) n.addModule(SigmoidLayer(6, name='hidden')) n.addOutputModule(LinearLayer(2, name='out')) n.addConnection(FullConnection(n['in'], n['hidden'], name='c1')) n.addConnection(FullConnection(n['hidden'], n['out'], name='c2')) n.sortModules() draw_connections(n) # d = generateTrainingData() d = getDatasetFromFile(root.path() + "/res/dataSet") t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75) t.trainOnDataset(d) # FIXME: I'm not sure the recurrent ANN is going to converge # so just training for fixed number of epochs count = 0 while True: globErr = t.train() print globErr if globErr < 0.01: break count += 1 if count == 20: return trainedANN() exportANN(n) draw_connections(n) return n
def trainedANN(): n = FeedForwardNetwork() n.addInputModule(LinearLayer(4, name='in')) n.addModule(SigmoidLayer(6, name='hidden')) n.addOutputModule(LinearLayer(2, name='out')) n.addConnection(FullConnection(n['in'], n['hidden'], name='c1')) n.addConnection(FullConnection(n['hidden'], n['out'], name='c2')) n.sortModules() draw_connections(n) # d = generateTrainingData() d = getDatasetFromFile(root.path()+"/res/dataSet") t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75) t.trainOnDataset(d) # FIXME: I'm not sure the recurrent ANN is going to converge # so just training for fixed number of epochs count = 0 while True: globErr = t.train() print globErr if globErr < 0.01: break count += 1 if count == 20: return trainedANN() exportANN(n) draw_connections(n) return n
def trained_cat_dog_ANN(): n = FeedForwardNetwork() d = get_cat_dog_trainset() input_size = d.getDimension('input') n.addInputModule(LinearLayer(input_size, name='in')) n.addModule(SigmoidLayer(input_size + 1500, name='hidden')) n.addOutputModule(LinearLayer(2, name='out')) n.addConnection(FullConnection(n['in'], n['hidden'], name='c1')) n.addConnection(FullConnection(n['hidden'], n['out'], name='c2')) n.sortModules() n.convertToFastNetwork() print 'successful converted to fast network' t = BackpropTrainer(n, d, learningrate=0.0001) #, momentum=0.75) count = 0 while True: globErr = t.train() print globErr count += 1 if globErr < 0.01: break if count == 30: break exportCatDogANN(n) return n
def crearRN(): #Se crea la red neuronal n = FeedForwardNetwork() #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal inLayer = LinearLayer(4096) hiddenLayer = SigmoidLayer(3) outLayer = LinearLayer(1) #Se agregan los layers a la red neuronal n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addOutputModule(outLayer) #Se declaran las conexiones de los nodos in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) #Se establecen las conexiones en los layers de la red neuronal n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) #Red neuronal lista para usar n.sortModules() return n
def main(): n = FeedForwardNetwork() in_layer = LinearLayer(2) hidden_layer = SigmoidLayer(3) out_layer = LinearLayer(1) n.addInputModule(in_layer) n.addModule(hidden_layer) n.addOutputModule(out_layer) in_to_hidden = FullConnection(in_layer, hidden_layer) hidden_to_out = FullConnection(hidden_layer, out_layer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) n.sortModules() print(">>> print n") print(n) print(">>> n.activate([1, 2])") print(n.activate([1, 2])) print(">>> in_to_hidden.params") print(in_to_hidden.params) print(">>> hidden_to_out.params") print(hidden_to_out.params) print(">>> n.params") print(n.params)
def trained_cat_dog_ANN(): n = FeedForwardNetwork() d = get_cat_dog_trainset() input_size = d.getDimension('input') n.addInputModule(LinearLayer(input_size, name='in')) n.addModule(SigmoidLayer(input_size+1500, name='hidden')) n.addOutputModule(LinearLayer(2, name='out')) n.addConnection(FullConnection(n['in'], n['hidden'], name='c1')) n.addConnection(FullConnection(n['hidden'], n['out'], name='c2')) n.sortModules() n.convertToFastNetwork() print 'successful converted to fast network' t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75) count = 0 while True: globErr = t.train() print globErr count += 1 if globErr < 0.01: break if count == 30: break exportCatDogANN(n) return n
def _constructNetwork(self, nIn, nOut, params): ''' Construct the network ''' nHidden = params.setdefault('nHidden', 2) hiddenSize = np.empty(nHidden) for i in range(nHidden): pstr = 'hiddenSize[' + str(i) + ']' hiddenSize[i] = params.setdefault(pstr, nIn + nOut) # Construct network ann = FeedForwardNetwork() # Add layers layers = [] layers.append(LinearLayer(nIn)) for nHid in hiddenSize: layers.append(SoftmaxLayer(nHid)) layers.append(LinearLayer(nOut)) ann.addOutputModule(layers[-1]) ann.addInputModule(layers[0]) for mod in layers[1:-1]: ann.addModule(mod) # Connections for i, mod in enumerate(layers): if i < len(layers) - 1: conn = FullConnection(mod, layers[i+1]) ann.addConnection(conn) # Sort the modules ann.sortModules() return ann
def create_network(): # Create the network itself network = FeedForwardNetwork() # Create layers NUMBER_OF_INPUT_BYTES = 1600 # because at input we have picture 40x40 size NUMBER_OF_HIDDEN_LAYERS = 10 # number of hidden layers NUMBER_OF_OUTPUT_CLASSES = 8 # because in output we have 8 classes inLayer = LinearLayer(NUMBER_OF_INPUT_BYTES) hiddenLayer = SigmoidLayer(NUMBER_OF_HIDDEN_LAYERS) outLayer = LinearLayer(NUMBER_OF_OUTPUT_CLASSES) # Create connections between layers # We create FullConnection - each neuron of one layer is connected to each neuron of other layer in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) # Add layers to our network network.addInputModule(inLayer) network.addModule(hiddenLayer) network.addOutputModule(outLayer) # Add connections to network network.addConnection(in_to_hidden) network.addConnection(hidden_to_out) # Sort modules to make multilayer perceptron usable network.sortModules() # prepare array to activate network d_letter_array = read_array("d") # activate network network.activate(d_letter_array) return network
def create_network(): # Create the network itself network = FeedForwardNetwork() # Create layers NUMBER_OF_INPUT_BYTES = 1600 # because at input we have picture 40x40 size NUMBER_OF_HIDDEN_LAYERS = 10 # number of hidden layers NUMBER_OF_OUTPUT_CLASSES = 8 # because in output we have 8 classes inLayer = LinearLayer( NUMBER_OF_INPUT_BYTES ) hiddenLayer = SigmoidLayer( NUMBER_OF_HIDDEN_LAYERS ) outLayer = LinearLayer( NUMBER_OF_OUTPUT_CLASSES ) # Create connections between layers # We create FullConnection - each neuron of one layer is connected to each neuron of other layer in_to_hidden = FullConnection( inLayer, hiddenLayer ) hidden_to_out = FullConnection( hiddenLayer, outLayer ) # Add layers to our network network.addInputModule( inLayer ) network.addModule( hiddenLayer ) network.addOutputModule( outLayer ) # Add connections to network network.addConnection( in_to_hidden ) network.addConnection( hidden_to_out ) # Sort modules to make multilayer perceptron usable network.sortModules() # prepare array to activate network d_letter_array = read_array( "d" ) # activate network network.activate( d_letter_array ) return network
def constructPerceptron(name, numNeurons): """Возвращает необученную сеть Аргументы: name -- имя сети, строка numNeurons -- число нейронов в каждом слое, список из целых чисел """ # Создаём сеть net = FeedForwardNetwork(name) # Создаём слои и добавляем их в сеть prevLayer = None newLayer = None for i, val in enumerate(numNeurons): # Если слой входной, он линейный if (i == 0): newLayer = LinearLayer(val, 'input') net.addInputModule(newLayer) prevLayer = newLayer # Если слой выходной, он линейный elif (i == len(numNeurons) - 1): newLayer = LinearLayer(val, 'output') net.addOutputModule(newLayer) # Иначе - слой сигмоидный else: newLayer = SigmoidLayer(val, 'hidden_' + str(i)) net.addModule(newLayer) # Если слой не входной, создаём связь между новым и предыдущим слоями if (i > 0): conn = FullConnection(prevLayer, newLayer, 'conn_' + str(i)) net.addConnection(conn) prevLayer = newLayer # Готовим сеть к активации, упорядочивая её внутреннюю структуру net.sortModules() # Готово return net
def create_ff_network(options): """Create the FeedForware network :param options: The input options. :return: """ # Create FF network net = FeedForwardNetwork() # Create each Layer instance in_layer = LinearLayer(options['inUnitCount']) hidden_layer = SigmoidLayer(options['hiddenUnitCount']) out_layer = LinearLayer(options['outUnitCount']) # Build network layer topology net.addInputModule(in_layer) net.addModule(hidden_layer) net.addOutputModule(out_layer) in_to_hidden = FullConnection(in_layer, hidden_layer) hidden_to_out = FullConnection(hidden_layer, out_layer) net.addConnection(in_to_hidden) net.addConnection(hidden_to_out) # Complete structure network net.sortModules() return net
class NNet(FunctionApproximator): def __init__(self, num_features, num_hidden_neurons): super(NNet,self).__init__(num_features) self.ds = SupervisedDataSet(num_features, 1) self.net = FeedForwardNetwork() self.net.addInputModule(LinearLayer(num_features, name='in')) self.net.addModule(LinearLayer(num_hidden_neurons, name='hidden')) self.net.addOutputModule(LinearLayer(1, name='out')) self.net.addConnection(FullConnection(self.net['in'], self.net['hidden'], name='c1')) self.net.addConnection(FullConnection(self.net['hidden'], self.net['out'], name='c2')) self.net.sortModules() def getY(self, inpt): #giving NAN return self.net.activate(inpt) def update(self, inpt, target): q_old = self.qvalue(state, action) q_new = self.qvalue(new_state, new_action) target = q_old + self.alpha*(reward + (self.gamma*q_new)-q_old) self.ds.addSample(inpt, target) # print inpt.shape, target.shape # print inpt, target trainer = BackpropTrainer(self.net, self.ds) # try: # trainer.trainUntilConvergence() # except: trainer.train()
class ANNApproximator(object): def __init__(self, alpha): self.name = "ANNApprox" self.network = FeedForwardNetwork() inLayer = LinearLayer(4) hiddenLayer = SigmoidLayer(12) outLayer = LinearLayer(1) self.network.addInputModule(inLayer) self.network.addModule(hiddenLayer) self.network.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) self.network.addConnection(in_to_hidden) self.network.addConnection(hidden_to_out) # Last step to make sure everything works in the connections self.network.sortModules() self.dataset = SupervisedDataSet(4, 1) self.trainer = BackpropTrainer(self.network, self.dataset, learningrate=alpha, momentum=0.0, verbose=True) def computeOutput(self, state_features): return self.network.activate(state_features)[0] def updateWeights(self, features, desired_output): print("updateWeights: features: {0}".format(features)) print("updateWeights: value: {0}".format(desired_output)) self.dataset.addSample(features, desired_output) # self.trainer.train() self.trainer.trainEpochs(10) self.dataset.clear()
class MyNet: def __init__(self, file='config.xml'): self.net = FeedForwardNetwork() self.file = file def constructNet(self, input, hidden, output): inputLayer = LinearLayer(input) hiddenLayer = TanhLayer(hidden) outputLayer = LinearLayer(output) self.net.addInputModule(inputLayer) self.net.addModule(hiddenLayer) self.net.addOutputModule(outputLayer) conn1 = FullConnection(inputLayer, hiddenLayer) conn2 = FullConnection(hiddenLayer, outputLayer) self.net.addConnection(conn1) self.net.addConnection(conn2) def setup(self): self.net.sortModules() def saveToFile(self, file='config.xml'): NetworkWriter.writeToFile(self.net, file) def loadFromFile(self, file='config.xml'): self.net = NetworkReader.readFrom(file)
def __init__(self, index, name, params): self.name = name self.index = index self.liste = []#ClassificationDataSet(17, 1, nb_classes=4) self.status_good = True self.number_of_moves = 0 self.number_of_sound_moves = 0 n = FeedForwardNetwork() self.inLayer = LinearLayer(5) self.hiddenLayer1 = SigmoidLayer(15) self.hiddenLayer2 = SigmoidLayer(15) self.hiddenLayer3 = SigmoidLayer(15) self.outLayer = LinearLayer(4) n.addInputModule(self.inLayer) n.addModule(self.hiddenLayer1) n.addModule(self.hiddenLayer2) n.addModule(self.hiddenLayer3) n.addOutputModule(self.outLayer) from pybrain.structure import FullConnection in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer1) hidden_to_hidden1 = FullConnection(self.hiddenLayer1, self.outLayer2) hidden_to_hidden2 = FullConnection(self.hiddenLayer2, self.outLayer3) hidden_to_out = FullConnection(self.hiddenLayer3, self.outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_hidden1)
def getMultiplayerFeedForwardNetwork(inputLayerLen, hiddenLayersLenList, outLayerLen=1): #create net net = FeedForwardNetwork() #create layers inLayer = LinearLayer(inputLayerLen, name='inLinearLayer') hiddenLayers = [ SigmoidLayer(n, name='sigmoidLayer' + str(i)) for i, n in enumerate(hiddenLayersLenList) ] outLayer = LinearLayer(outLayerLen, name='outLinearLayer') #add layers to net net.addInputModule(inLayer) for l in hiddenLayers: net.addModule(l) net.addOutputModule(outLayer) #create connections layers = [inLayer] + hiddenLayers + [outLayer] connections = [ FullConnection(layers[i], layers[i + 1], name='connection' + str(i)) for i in range(len(layers) - 1) ] #add connections to net for c in connections: net.addConnection(c) #do some required initialization net.sortModules() return net
def __init__(self, index, name, params): self.name = name self.index = index self.status_good = True n = FeedForwardNetwork() self.inLayer = LinearLayer(17) self.hiddenLayer = SigmoidLayer(5) self.outLayer = LinearLayer(4) n.addInputModule(self.inLayer) n.addModule(self.hiddenLayer) n.addOutputModule(self.outLayer) from pybrain.structure import FullConnection in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer) hidden_to_out = FullConnection(self.hiddenLayer, self.outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) n.sortModules() for j, i in enumerate(params[0]): n.connections[self.hiddenLayer][0].params[j] = i for j, i in enumerate(params[1]): n.connections[self.inLayer][0].params[j] = i self.n = n
class MyNet: def __init__(self, file='config.xml'): self.net = FeedForwardNetwork() self.file = file def constructNet(self, input, hidden, output): inputLayer = LinearLayer(input) hiddenLayer = TanhLayer(hidden) outputLayer = LinearLayer(output) self.net.addInputModule(inputLayer) self.net.addModule(hiddenLayer) self.net.addOutputModule(outputLayer) conn1 = FullConnection(inputLayer, hiddenLayer) conn2 = FullConnection(hiddenLayer, outputLayer) self.net.addConnection(conn1) self.net.addConnection(conn2) def setup(self): self.net.sortModules() def saveToFile(self,file='config.xml'): NetworkWriter.writeToFile(self.net, file) def loadFromFile(self, file='config.xml'): self.net = NetworkReader.readFrom(file)
def buildNN(self, net, functions, inp, out): layers = [] inLayer = self.func[functions[0]](inp) layers.append(inLayer) outLayer = self.func[functions[-1]](out) for neural in range(1, len(net) - 1): layers.append(self.func[functions[neural]](1)) layers.append(outLayer) connections, recConnections = self.fillConnections(net, [], [0], layers) if len(recConnections) == 0: n = FeedForwardNetwork() else: n = RecurrentNetwork() n.addInputModule(inLayer) for layer in range(1, len(layers) - 1): n.addModule(layers[layer]) n.addOutputModule(outLayer) for con in connections: n.addConnection(con) for rcon in recConnections: n.addRecurrentConnection(rcon) n.sortModules() return n
class NeuralNetwork(BaseEstimator, RegressorMixin): def __init__( self, inp_neu=4, hid_neu=3, out_neu=1, learn_rate=0.1, nomentum=0.5, weight_dec=0.0001, epochs=100, split_prop=0.25, ): self.inp_neu = inp_neu self.hid_neu = hid_neu self.out_neu = out_neu self.learn_rate = learn_rate self.nomentum = nomentum self.weight_dec = weight_dec self.epochs = epochs self.split_prop = split_prop def data(self, X, y=None): DS = SupervisedDataSet(self.inp_neu, self.out_neu) for i in range(0, len(X)): DS.addSample((X[i][0], X[i][1], X[i][2], X[i][3]), y[i]) # ATTENTION pas optimisé pour toutes les tailles return DS def fit(self, X, y): self.n = FeedForwardNetwork() self.n.addInputModule(SigmoidLayer(self.inp_neu, name="in")) self.n.addModule(SigmoidLayer(self.hid_neu, name="hidden")) self.n.addOutputModule(LinearLayer(self.out_neu, name="out")) self.n.addConnection(FullConnection(self.n["in"], self.n["hidden"], name="c1")) self.n.addConnection(FullConnection(self.n["hidden"], self.n["out"], name="c2")) self.n.sortModules() # initialisation self.tstdata, trndata = self.data(X, y).splitWithProportion(self.split_prop) trainer = BackpropTrainer( self.n, trndata, learningrate=self.learn_rate, momentum=self.nomentum, weightdecay=self.weight_dec ) trainer.trainUntilConvergence(verbose=True, maxEpochs=self.epochs) return self def predict(self, X): self.yhat = [] for i in X: self.yhat.append(float(self.n.activate(i))) self.yhat = np.array(self.yhat) return self.yhat def score(self, y): vect_se = (self.yhat - y) ** 2 mse = float(np.sum(vect_se)) / float(len(vect_se)) return mse
def fit_predict(xTrain, yTrain, xTest, epochs, neurons): # Check edge cases if (not len(xTrain) == len(yTrain) or len(xTrain) == 0 or len(xTest) == 0 or epochs <= 0): return # Randomize the training data (probably not necessary but pybrain might # not shuffle the data itself, so perform as safety check) indices = np.arange(len(xTrain)) np.random.shuffle(indices) trainSwapX = [xTrain[x] for x in indices] trainSwapY = [yTrain[x] for x in indices] supTrain = SupervisedDataSet(len(xTrain[0]), 1) for x in range(len(trainSwapX)): supTrain.addSample(trainSwapX[x], trainSwapY[x]) # Construct the feed-forward neural network n = FeedForwardNetwork() inLayer = LinearLayer(len(xTrain[0])) hiddenLayer1 = SigmoidLayer(neurons) outLayer = LinearLayer(1) n.addInputModule(inLayer) n.addModule(hiddenLayer1) n.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer1) hidden_to_out = FullConnection(hiddenLayer1, outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) n.sortModules() # Train the neural network on the training partition, validating # the training progress on the validation partition trainer = BackpropTrainer(n, dataset=supTrain, momentum=0.1, learningrate=0.01, verbose=False, weightdecay=0.01) trainer.trainUntilConvergence(dataset=supTrain, maxEpochs=epochs, validationProportion=0.30) outputs = [] for x in xTest: outputs.append(n.activate(x)) return outputs
def testMdlstm(self): net = FeedForwardNetwork() net.addInputModule(LinearLayer(1, name='in')) net.addModule(MDLSTMLayer(1, 1, name='hidden')) net.addOutputModule(LinearLayer(1, name='out')) net.addConnection(FullConnection(net['in'], net['hidden'])) net.addConnection(FullConnection(net['hidden'], net['out'])) net.sortModules() self.equivalence_feed_forward(net, net.convertToFastNetwork())
def testMdlstm(self): net = FeedForwardNetwork() net.addInputModule(LinearLayer(1, name='in')) net.addModule(MDLSTMLayer(1, 1, name='hidden')) net.addOutputModule(LinearLayer(1, name='out')) net.addConnection(FullConnection(net['in'], net['hidden'])) net.addConnection(FullConnection(net['hidden'], net['out'])) net.sortModules() self.equivalence_feed_forward(net, net.convertToFastNetwork())
def main(f_samples): f_reading = open(f_samples, 'r') global data data = [] for line in f_reading: line = line.split() data.append( (float(line[0]), float(line[-1])) ) #function data_module = lambda x: map( lambda z: data[z], filter( lambda y: y% 5 == x, xrange(len(data)) ) ) global data1 data1 = [data_module(0), data_module(1), data_module(2), data_module(3), data_module(4)] global data_transformed data_transformed = take(data, rate = 60) global data_transformed_training data_transformed_training = map( lambda x: data_transformed[x], filter( lambda x: uniform(0, 1) > 0.3, xrange(len(data_transformed)) )) #Learning process----------------------------------------------------------------- global net, samples, trainer net = FeedForwardNetwork() inLayer = LinearLayer(3) hiddenLayer0 = SigmoidLayer(1) hiddenLayer1 = SigmoidLayer(3) outLayer = LinearLayer(1) net.addInputModule(inLayer) # net.addModule(hiddenLayer0) # net.addModule(hiddenLayer1) net.addOutputModule(outLayer) # net.addConnection(FullConnection(inLayer, hiddenLayer0)) net.addConnection(FullConnection(inLayer, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1)) # net.addConnection(FullConnection(hiddenLayer1, outLayer)) net.sortModules() print net ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out. #net = buildNetwork(3,8,8,1) ##Set with 2 inputs and one output for each sample samples = SupervisedDataSet(3,1) for i in data_transformed_training: samples.addSample(i['past'], i['next'] - i['average']) trainer = BackpropTrainer(net, samples) print 'Training' trainer.trainUntilConvergence(maxEpochs= 10) print 'Comparing' compare_net_samples(net, data_transformed) print "Number of samples %d for training." %len(data_transformed_training)
def buildNN(indim=4, hiddim=6, outdim=3): net = FeedForwardNetwork() net.addInputModule(TanhLayer(indim, name = 'i')) net.addModule(TanhLayer(hiddim, name = 'h')) net.addOutputModule(ThresholdLayer(outdim, name = 'o', threshold=0.5)) net.addConnection(FullConnection(net['i'], net['h'])) net.addConnection(FullConnection(net['h'], net['o'])) net.sortModules() return net
def fromModules(cls, visible, hidden, bias, con, biascon): net = FeedForwardNetwork() net.addInputModule(visible) net.addModule(bias) net.addOutputModule(hidden) net.addConnection(con) net.addConnection(biascon) net.sortModules() return cls(net)
def buildNN(indim=4, hiddim=6, outdim=3): net = FeedForwardNetwork() net.addInputModule(TanhLayer(indim, name='i')) net.addModule(TanhLayer(hiddim, name='h')) net.addOutputModule(ThresholdLayer(outdim, name='o', threshold=0.5)) net.addConnection(FullConnection(net['i'], net['h'])) net.addConnection(FullConnection(net['h'], net['o'])) net.sortModules() return net
def fromModules(cls, visible, hidden, bias, con, biascon): net = FeedForwardNetwork() net.addInputModule(visible) net.addModule(bias) net.addOutputModule(hidden) net.addConnection(con) net.addConnection(biascon) net.sortModules() return cls(net)
def initMaxentNetwork(): """Builds a network with just a sigmoid output layer, i.e. a multi-class maximum entropy model.""" fnn = FeedForwardNetwork() inLayer = LinearLayer(numFeatures) fnn.addInputModule(inLayer) outLayer = SigmoidLayer(3) fnn.addOutputModule(outLayer) fnn.addConnection(FullConnection(inLayer, outLayer)) fnn.sortModules() return fnn
def mlpClassifier(X,y,train_indices, test_indices, mom=0.1,weightd=0.01, epo=5): X_train, y_train, X_test, y_test = X[train_indices],y[train_indices], X[test_indices], y[test_indices] #Converting the data into a dataset which is easily understood by PyBrain. tstdata = ClassificationDataSet(X.shape[1],target=1,nb_classes=8) trndata = ClassificationDataSet(X.shape[1],target=1,nb_classes=8) # print "shape of X_train & y_train: " + str(X_train.shape) + str(y_train.shape) for i in range(y_train.shape[0]): trndata.addSample(X_train[i,:], y_train[i]) for i in range(y_test.shape[0]): tstdata.addSample(X_test[i,:], y_test[i]) trndata._convertToOneOfMany() tstdata._convertToOneOfMany() #printing the specs of data # print "Number of training patterns: ", len(trndata) # print "Input and output dimensions: ", trndata.indim, trndata.outdim # print "First sample (input, target, class):" # print trndata['input'][0], trndata['target'][0], trndata['class'][0] #The neural-network used # print "Building Network..." #input layer, hidden layer of size 10(very small), output layer ANNc = FeedForwardNetwork() inLayer = LinearLayer(trndata.indim, name="ip") hLayer1 = TanhLayer(100, name = "h1") hLayer2 = SigmoidLayer(100, name = "h2") outLayer = SoftmaxLayer(trndata.outdim, name = "op") ANNc.addInputModule(inLayer) ANNc.addModule(hLayer1) ANNc.addModule(hLayer2) ANNc.addOutputModule(outLayer) ip_to_h1 = FullConnection(inLayer, hLayer1, name = "ip->h1") h1_to_h2 = FullConnection(hLayer1, hLayer2, name = "h1->h2") h2_to_op = FullConnection(hLayer2, outLayer, name = "h2->op") ANNc.addConnection(ip_to_h1) ANNc.addConnection(h1_to_h2) ANNc.addConnection(h2_to_op) ANNc.sortModules() # print "Done. Training the network." #The trainer used, in our case Back-propagation trainer trainer = BackpropTrainer( ANNc, dataset=trndata, momentum=mom, verbose=True, weightdecay=weightd) trainer.trainEpochs( epo ) #The error trnresult = percentError( trainer.testOnClassData(dataset=trndata), trndata['class'] ) tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] ) # print "Done." return ANNc, trainer.totalepochs, (100 - trnresult), (100 - tstresult)
def getFitness(self, smMatrix): #Store the sm state into memory fit = 0 #Fitness function (3) ************************************************************* #Record the sm data for this loop and consider its properties #print(smMatrix) #print(len(smMatrix)) #net = buildNetwork(3,10,1, bias = True) net = FeedForwardNetwork() inp = LinearLayer(3) h1 = SigmoidLayer(10) outp = LinearLayer(1) # add modules net.addOutputModule(outp) net.addInputModule(inp) net.addModule(h1) # create connections iToH = FullConnection(inp, h1) hToO = FullConnection(h1, outp) net.addConnection(iToH) net.addConnection(hToO) # finish up net.sortModules() ds = SupervisedDataSet(3, 1) trainSet = [] for index_x, x in enumerate(smMatrix): if index_x > 0 and index_x < len(smMatrix) - 1: #trainSet.append( [smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2], smMatrix[index_x+1][3] ] ) ds.addSample(([ smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2] ]), (smMatrix[index_x + 1][3])) #print(trainSet) #print(ds) trainer = BackpropTrainer(net, ds, weightdecay=0.01) err = trainer.trainUntilConvergence(maxEpochs=100) #Visualize the network performance and structure. #nn = NNregression(ds, epoinc = 10) #nn.setupNN() #nn.runTraining() #self.pesos_conexiones(net) print("Input to hidden", iToH.params) #print("H to output", hToO.params) #print(iToH.params) n1 = iToH.params n1a = zip(*[iter(n1)] * 3) n2 = hToO.params fit = sum(n1a[:]) + sum(n2[:]) print fit return fit
def importCatDogANN(fileName=root.path() + "/res/recCatDogANN"): n = FeedForwardNetwork() n.addInputModule(LinearLayer(7500, name='in')) n.addModule(SigmoidLayer(9000, name='hidden')) n.addOutputModule(LinearLayer(2, name='out')) n.addConnection(FullConnection(n['in'], n['hidden'], name='c1')) n.addConnection(FullConnection(n['hidden'], n['out'], name='c2')) n.sortModules() params = np.load(root.path() + '/res/cat_dog_params.txt.npy') n._setParameters(params) return n
def fit_predict(xTrain,yTrain,xTest,epochs,neurons): # Check edge cases if (not len(xTrain) == len(yTrain) or len(xTrain) == 0 or len(xTest) == 0 or epochs <= 0): return # Randomize the training data (probably not necessary but pybrain might # not shuffle the data itself, so perform as safety check) indices = np.arange(len(xTrain)) np.random.shuffle(indices) trainSwapX = [xTrain[x] for x in indices] trainSwapY = [yTrain[x] for x in indices] supTrain = SupervisedDataSet(len(xTrain[0]),1) for x in range(len(trainSwapX)): supTrain.addSample(trainSwapX[x],trainSwapY[x]) # Construct the feed-forward neural network n = FeedForwardNetwork() inLayer = LinearLayer(len(xTrain[0])) hiddenLayer1 = SigmoidLayer(neurons) outLayer = LinearLayer(1) n.addInputModule(inLayer) n.addModule(hiddenLayer1) n.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer1) hidden_to_out = FullConnection(hiddenLayer1, outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) n.sortModules() # Train the neural network on the training partition, validating # the training progress on the validation partition trainer = BackpropTrainer(n,dataset=supTrain,momentum=0.1,learningrate=0.01 ,verbose=False,weightdecay=0.01) trainer.trainUntilConvergence(dataset=supTrain, maxEpochs=epochs,validationProportion=0.30) outputs = [] for x in xTest: outputs.append(n.activate(x)) return outputs
def importCatDogANN(fileName = root.path()+"/res/recCatDogANN"): n = FeedForwardNetwork() n.addInputModule(LinearLayer(7500, name='in')) n.addModule(SigmoidLayer(9000, name='hidden')) n.addOutputModule(LinearLayer(2, name='out')) n.addConnection(FullConnection(n['in'], n['hidden'], name='c1')) n.addConnection(FullConnection(n['hidden'], n['out'], name='c2')) n.sortModules() params = np.load(root.path()+'/res/cat_dog_params.txt.npy') n._setParameters(params) return n
def getFitness(self, smMatrix): #Store the sm state into memory fit = 0 #Fitness function (3) ************************************************************* #Record the sm data for this loop and consider its properties #print(smMatrix) #print(len(smMatrix)) #net = buildNetwork(3,10,1, bias = True) net = FeedForwardNetwork() inp = LinearLayer(3) h1 = SigmoidLayer(10) outp = LinearLayer(1) # add modules net.addOutputModule(outp) net.addInputModule(inp) net.addModule(h1) # create connections iToH = FullConnection(inp, h1) hToO = FullConnection(h1, outp) net.addConnection(iToH) net.addConnection(hToO) # finish up net.sortModules() ds = SupervisedDataSet(3, 1) trainSet = [] for index_x, x in enumerate(smMatrix): if index_x > 0 and index_x < len(smMatrix)-1: #trainSet.append( [smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2], smMatrix[index_x+1][3] ] ) ds.addSample(([smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2]]), (smMatrix[index_x+1][3])) #print(trainSet) #print(ds) trainer = BackpropTrainer(net, ds, weightdecay=0.01) err = trainer.trainUntilConvergence(maxEpochs = 100) #Visualize the network performance and structure. #nn = NNregression(ds, epoinc = 10) #nn.setupNN() #nn.runTraining() #self.pesos_conexiones(net) print("Input to hidden", iToH.params) #print("H to output", hToO.params) #print(iToH.params) n1 = iToH.params n1a = zip(*[iter(n1)]*3) n2 = hToO.params fit = sum(n1a[:]) + sum(n2[:]) print fit return fit
def neuralNet(info, test_data): ann = FeedForwardNetwork() ''' Initiate the input nodes, hidden layer nodes, and the output layer nodes. ''' inputLayer = LinearLayer(5) hiddenLayer = SigmoidLayer(20) outputLayer = LinearLayer(1) ''' Add the nodes to the corresponding layer ''' ann.addInputModule(inputLayer) ann.addModule(hiddenLayer) ann.addOutputModule(outputLayer) ''' Connect the input layer to hidden layer, then connect hidden layer to output layer ''' in_to_hidden = FullConnection(inputLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outputLayer) ann.addConnection(in_to_hidden) ann.addConnection(hidden_to_out) ann.sortModules () data_set = SupervisedDataSet(5, 1) for data in info: data_set.addSample(data[:-1], data[-1]) trainer = BackpropTrainer(ann, data_set, verbose=False) #test_data, train_data = data_set.splitWithProportion(0.2) train_data = data_set test_data = test_data ''' Using 50 epochs for testing purposes, it will train the network until convergence within the first 50 epochs ''' train = trainer.trainUntilConvergence(dataset=train_data, maxEpochs=10) NetworkWriter.writeToFile(ann, 'filename5.xml') for d in test_data: out = ann.activate(d) #print (train) print (out) '''
def _new_1h_net(window): net = FeedForwardNetwork() inl = SigmoidLayer(window*window*2+1) hidden1 = SigmoidLayer(window*window*2) outl = SigmoidLayer(1) net.addInputModule(inl) net.addModule(hidden1) net.addOutputModule(outl) c1 = FullConnection(inl, hidden1) c2 = FullConnection(hidden1, outl) net.addConnection(c1) net.addConnection(c2) return net
def createNNLong(trndata): nn = FeedForwardNetwork() inLayer = LinearLayer(trndata.indim, name='in') hiddenLayer = TanhLayer(6, name='hidden0') outLayer = TanhLayer(trndata.outdim, name='out') nn.addInputModule(inLayer) nn.addModule(hiddenLayer) nn.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) nn.addConnection(in_to_hidden) nn.addConnection(hidden_to_out) nn.sortModules() return nn
def createNNLong(trndata): nn= FeedForwardNetwork() inLayer = LinearLayer(trndata.indim, name='in') hiddenLayer = TanhLayer(6, name='hidden0') outLayer = TanhLayer(trndata.outdim, name='out') nn.addInputModule(inLayer) nn.addModule(hiddenLayer) nn.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) nn.addConnection(in_to_hidden) nn.addConnection(hidden_to_out) nn.sortModules() return nn
def buildNestedNetwork(): """ build a nested network. """ N = FeedForwardNetwork('outer') a = LinearLayer(1, name='a') b = LinearLayer(2, name='b') c = buildNetwork(2, 3, 1) c.name = 'inner' N.addInputModule(a) N.addModule(c) N.addOutputModule(b) N.addConnection(FullConnection(a, b)) N.addConnection(FullConnection(b, c)) N.sortModules() return N
def trainSupervised(self, num, ds, initialLearningrate=0.002, decay=0.9999, myWeightdecay=0.8, momentum=0): n = FeedForwardNetwork() n.addInputModule(self.inLayer) n.addModule(self.hiddenLayer) n.addModule(self.b) n.addOutputModule(self.outLayer) n.addConnection(self.in_to_hidden) n.addConnection(self.hidden_to_out) n.addConnection(self.b_to_hidden) n.addConnection(self.b_to_out) n.sortModules() self.supervisedNet = n self.supervisedTrainer = BackpropTrainer(n, ds, learningrate=initialLearningrate, lrdecay=decay, verbose=True, weightdecay=myWeightdecay, batchlearning=True, momentum=momentum) self.supervisedTrainer.trainEpochs(num)
def buildNestedNetwork(): """ build a nested network. """ N = FeedForwardNetwork('outer') a = LinearLayer(1, name = 'a') b = LinearLayer(2, name = 'b') c = buildNetwork(2, 3, 1) c.name = 'inner' N.addInputModule(a) N.addModule(c) N.addOutputModule(b) N.addConnection(FullConnection(a,b)) N.addConnection(FullConnection(b,c)) N.sortModules() return N
class RNA: def __init__(self): self.red = FeedForwardNetwork() self.error_minimo = 0.01 self.inputs = 0 self.outputs = 0 def configurar_RNA(self): self.inLayer = SigmoidLayer(42) self.hiddenLayer = SigmoidLayer(10) self.outLayer = SigmoidLayer(6) self.red.addInputModule(self.inLayer) self.red.addModule(self.hiddenLayer) self.red.addOutputModule(self.outLayer) self.in_to_hidden = FullConnection(self.inLayer,self.hiddenLayer) self.hidden_to_out = FullConnection(self.hiddenLayer,self.outLayer) self.red.addConnection(self.in_to_hidden) self.red.addConnection(self.hidden_to_out) self.red.sortModules() def ver_confi(self): print self.red def ver_pesos(self): print self.in_to_hidden.params print self.hidden_to_out.params def leer_archivo(self, nombre): f = open(nombre) for linea in f: linea = np.array(linea.split()).astype(int) self.inputs = np.array(linea[:-6]) self.outputs = np.array(linea[-6:]) def entrenamiento(self): ds = SupervisedDataSet(42, 6) ds.addSample(self.inputs, self.outputs) trainer = BackpropTrainer(self.red, ds) error = trainer.train() print error while error > self.error_minimo: error = trainer.train() print error
def generate_forecasters(data, dtt, alpha): #Learning process----------------------------------------------------------------- global net, samples, trainer net = FeedForwardNetwork() inLayer = LinearLayer(3) hiddenLayer0 = SigmoidLayer(1) hiddenLayer1 = SigmoidLayer(3) outLayer = LinearLayer(1) net.addInputModule(inLayer) net.addModule(hiddenLayer0) # net.addModule(hiddenLayer1) net.addOutputModule(outLayer) # net.addConnection(FullConnection(inLayer, hiddenLayer0)) net.addConnection(FullConnection(inLayer, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, outLayer)) # net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1)) # net.addConnection(FullConnection(hiddenLayer1, outLayer)) net.sortModules() print net ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out. #net = buildNetwork(3,8,8,1) ##Set with 2 inputs and one output for each sample samples = SupervisedDataSet(3, 1) for i in dtt: samples.addSample(i['past'], i['next'] - i['average']) trainer = BackpropTrainer(net, samples) print 'Training' #trainer.trainUntilConvergence(maxEpochs= 1) #Making Forecasters--------------------------------------------------------------- aux = map(lambda x: x[0], data) def exp(self, a, x): self.exp = a * data[aux.index(x) - 1][1] + (1 - a) * self.exp return self.exp naive = Forecaster(name='Naive', predict_function=lambda x: data[aux.index(x) - 1][1]) exponential = Forecaster(name='Exponential') exponential.exp = data[0][1] exponential.predict = lambda x: exp(exponential, alpha, x) network = Forecaster(name='Network', predict_function=net.activate) return naive, exponential, network
def classify(imSize, dataset, hidden_neurons, initial_error): tstdata, trndata = dataset.splitWithProportion(0.25) # nos da una proporcion de data de entrenamiento de .75 y prueba .25 # imSize es el tamano de las capas de entrada # define layer structures inLayer = LinearLayer(imSize) hiddenLayer = SigmoidLayer(imSize / 3) outLayer = SoftmaxLayer(1) # add layers to network net = FeedForwardNetwork() net.addInputModule(inLayer) net.addModule(hiddenLayer) net.addOutputModule(outLayer) # define conncections for network theta1 = FullConnection(inLayer, hiddenLayer) theta2 = FullConnection(hiddenLayer, outLayer) # add connections to network net.addConnection(theta1) net.addConnection(theta2) # sort module net.sortModules() dataset._convertToOneOfMany() fnn = buildNetwork(dataset.indim, imSize / 3, dataset.outdim, outclass=SoftmaxLayer) #Creamos un entrenador de retropropagacion usando el dataset y la red trainer = BackpropTrainer(fnn, dataset) error = initial_error iteration = 0 #iteramos mientras el error sea menor 0.001 while error > 0.01: error = trainer.train() iteration += 1 #print "Iteration: {0} Error {1}".format(iteration, error) print "Terminado luego de: ", iteration, " iteraciones" print "Con un error de: ", error return fnn
def gen_nn(in_size, hidden_size, out_size): nn = FeedForwardNetwork() inLayer = LinearLayer(in_size) hiddenLayer = SigmoidLayer(hidden_size) outLayer = LinearLayer(out_size) nn.addInputModule(inLayer) nn.addModule(hiddenLayer) nn.addOutputModule(outLayer) nn.addConnection(FullConnection(inLayer, hiddenLayer)) nn.addConnection(FullConnection(hiddenLayer, outLayer)) nn.sortModules() return nn
def train(self): # We will build up a network piecewise in order to create a new dataset # for each layer. dataset = self.dataset piecenet = FeedForwardNetwork() piecenet.addInputModule(copy.deepcopy(self.net.inmodules[0])) # Add a bias bias = BiasUnit() piecenet.addModule(bias) # Add the first visible layer firstRbm = self.iterRbms().next() visible = copy.deepcopy(firstRbm.visible) piecenet.addModule(visible) # For saving the rbms and their inverses self.invRbms = [] self.rbms = [] for rbm in self.iterRbms(): self.net.sortModules() # Train the first layer with an rbm trainer for `epoch` epochs. trainer = self.trainerKlass(rbm, dataset, self.cfg) for _ in xrange(self.epochs): trainer.train self.invRbms.append(trainer.invRbm) self.rbms.append(rbm) # Add the connections and the hidden layer of the rbm to the net. hidden = copy.deepcopy(rbm.hidden) biascon = FullConnection(bias, hidden) biascon.params[:] = rbm.biasWeights con = FullConnection(visible, hidden) con.params[:] = rbm.weights piecenet.addConnection(biascon) piecenet.addConnection(con) piecenet.addModule(hidden) # Overwrite old outputs piecenet.outmodules = [hidden] piecenet.outdim = rbm.hiddenDim piecenet.sortModules() dataset = UnsupervisedDataSet(rbm.hiddenDim) for sample, in self.dataset: new_sample = piecenet.activate(sample) dataset.addSample(new_sample) visible = hidden
class MLPNetwork(object): def __init__(self, hidden_layers, data_index_size): self.network = FeedForwardNetwork() connect_queue = Queue.Queue() for layer in xrange(0, hidden_layers): connect_queue.put( TanhLayer(data_index_size, name='hidden_layer_{}'.format(layer))) connect_queue.put(SigmoidLayer(1, name='output_layer')) prev_layer = LinearLayer(data_index_size, name='input_layer') self.network.addInputModule(prev_layer) while not connect_queue.empty(): print 'layer' current_layer = connect_queue.get() if current_layer.name == 'output_layer': self.network.addOutputModule(current_layer) else: self.network.addModule(current_layer) bias = BiasUnit() bias_connection = FullConnection( bias, current_layer, name="bias_to_{}_connection".format(current_layer.name)) self.network.addModule(bias) self.network.addConnection(bias_connection) connection = FullConnection(prev_layer, current_layer, name="{}_to_{}_connection".format( prev_layer.name, current_layer.name)) self.network.addConnection(connection) prev_layer = current_layer print 'sorting....' self.network.sortModules()
def build_network(input_dim, layers, output_dim): n = FeedForwardNetwork() inLayer = LinearLayer(input_dim) hiddenLayer = SigmoidLayer(layers) outLayer = LinearLayer(output_dim) n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) n.addConnection(in_to_hidden) n.addConnection(hidden_to_out) n.sortModules() return n
class FeedFoward: def __init__(self, network, camada_entrada, camada_oculta, camada_saida): self.network = network self.network = FeedForwardNetwork() self.camada_entrada = camada_entrada self.camada_oculta = camada_oculta self.camada_saida = camada_saida self.ligacao_entrada_oculta = None self.ligacao_oculta_saida = None self.defineArquitetura() def defineArquitetura(self): self.camada_entrada = LinearLayer(self.camada_entrada, name="entrada") self.camada_oculta = SigmoidLayer(self.camada_oculta, name="oculta") self.camada_saida = LinearLayer(self.camada_saida, name="saida") self.adicionaEstrutura() def adicionaEstrutura(self): self.network.addInputModule(self.camada_entrada) self.network.addModule(self.camada_oculta) self.network.addOutputModule(self.camada_saida) self.adicionaConexoes() def adicionaConexoes(self): self.ligacao_entrada_oculta = FullConnection(self.camada_entrada, self.camada_oculta) self.ligacao_oculta_saida = FullConnection(self.camada_oculta, self.camada_saida) self.network.addConnection(self.ligacao_oculta_saida) self.network.addConnection(self.ligacao_entrada_oculta) self.iniciaRede() def visualizaPesosSinapticos(self): print( 'peso camada_entrada_oculta', self.ligacao_entrada_oculta.params ) # mostra os pesos das conexões de entrada para camada oculta, todas interligadas entre sí. 3x4 = 12 pesos sinpaticos print( 'peso camada_oculta_saida', self.ligacao_oculta_saida.params ) # mostra os pesos das conexões da camada oculta para a camada de saída. Todas interligadas entre sí. 3x1 = 3 pesos sinapticos print('pesos rede', self.network.params) def iniciaRede(self): self.network.sortModules()
def logicTest(): inLayer = LinearLayer(2) hiddenLayer = SigmoidLayer(6) outLayer = LinearLayer(4) # OR, AND, NOT, XOR n=FeedForwardNetwork() n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addOutputModule(outLayer) inToHidden = FullConnection(inLayer, hiddenLayer) hiddenToOut = FullConnection(hiddenLayer, outLayer) n.addConnection(inToHidden) n.addConnection(hiddenToOut) n.sortModules() print n.activate([0, 1])
def createNetwork(in_layer_size, hidden_layer_size, out_layer_size): network = FeedForwardNetwork() in_layer = LinearLayer(in_layer_size) hidden_layer = SigmoidLayer(hidden_layer_size) out_layer = LinearLayer(out_layer_size) network.addInputModule(in_layer) network.addModule(hidden_layer) network.addOutputModule(out_layer) in_to_hidden = FullConnection(in_layer, hidden_layer) hidden_to_out = FullConnection(hidden_layer, out_layer) network.addConnection(in_to_hidden) network.addConnection(hidden_to_out) network.sortModules() return network
def build_network(): n = FeedForwardNetwork() inLayer = LinearLayer(2) hiddenLayer = SigmoidLayer(5) outLayer = LinearLayer(1) #outLayer = SigmoidLayer(1) n.addInputModule(inLayer) n.addModule(hiddenLayer) n.addOutputModule(outLayer) in2hidden = FullConnection(inLayer, hiddenLayer) hidden2out = FullConnection(hiddenLayer, outLayer) n.addConnection(in2hidden) n.addConnection(hidden2out) n.sortModules() return n