Beispiel #1
0
    def build_network(self, layers=None, end=1):
        layerobjects = []
        for item in layers:
            try:
                t, n = item
                if t == "sig":
                    if n == 0:
                        continue
                    layerobjects.append(SigmoidLayer(n))
            except TypeError:
                layerobjects.append(LinearLayer(item))

        n = FeedForwardNetwork()
        n.addInputModule(layerobjects[0])

        for i, layer in enumerate(layerobjects[1:-1]):
            n.addModule(layer)
            connection = FullConnection(layerobjects[i], layerobjects[i+1])
            n.addConnection(connection)

        n.addOutputModule(layerobjects[-1])
        connection = FullConnection(layerobjects[-2], layerobjects[-1])
        n.addConnection(connection)

        n.sortModules()
        return n
Beispiel #2
0
def constructPerceptron(name, numNeurons):
    """Возвращает необученную сеть
    Аргументы:
    name -- имя сети, строка
    numNeurons -- число нейронов в каждом слое, список из целых чисел
    """
    # Создаём сеть
    net = FeedForwardNetwork(name)
    # Создаём слои и добавляем их в сеть
    prevLayer = None
    newLayer = None
    for i, val in enumerate(numNeurons):
        # Если слой входной, он линейный
        if (i == 0):
            newLayer = LinearLayer(val, 'input')
            net.addInputModule(newLayer)
            prevLayer = newLayer
        # Если слой выходной, он линейный
        elif (i == len(numNeurons) - 1):
            newLayer = LinearLayer(val, 'output')
            net.addOutputModule(newLayer)
        # Иначе - слой сигмоидный
        else:
            newLayer = SigmoidLayer(val, 'hidden_' + str(i))
            net.addModule(newLayer)
            # Если слой не входной, создаём связь между новым и предыдущим слоями
        if (i > 0):
            conn = FullConnection(prevLayer, newLayer, 'conn_' + str(i))
            net.addConnection(conn)
            prevLayer = newLayer
    # Готовим сеть к активации, упорядочивая её внутреннюю структуру
    net.sortModules()
    # Готово
    return net
class NNet(FunctionApproximator):
	def __init__(self, num_features, num_hidden_neurons):
		super(NNet,self).__init__(num_features)

		self.ds = SupervisedDataSet(num_features, 1)

		self.net = FeedForwardNetwork()
		self.net.addInputModule(LinearLayer(num_features, name='in'))
		self.net.addModule(LinearLayer(num_hidden_neurons, name='hidden'))
		self.net.addOutputModule(LinearLayer(1, name='out'))
		self.net.addConnection(FullConnection(self.net['in'], self.net['hidden'], name='c1'))
		self.net.addConnection(FullConnection(self.net['hidden'], self.net['out'], name='c2'))
		self.net.sortModules()

	def getY(self, inpt):
		#giving NAN
		return self.net.activate(inpt)

	def update(self, inpt, target):
		q_old = self.qvalue(state, action)
		q_new = self.qvalue(new_state, new_action)
		target = q_old + self.alpha*(reward + (self.gamma*q_new)-q_old)
		

		self.ds.addSample(inpt, target)
		# print inpt.shape, target.shape
		# print inpt, target
		trainer = BackpropTrainer(self.net, self.ds)
		# try:
		# 	trainer.trainUntilConvergence()
		# except:
		trainer.train()
Beispiel #4
0
def initalize_nn():
    global in_to_hidden
    global hidden_to_hidden2
    global hidden_to_out
    
    # Old code (regression)        
    n = FeedForwardNetwork()
    # n = buildNetwork( 2, 3, data.outdim, outclass=SoftmaxLayer )

    inLayer = LinearLayer(2)
    hiddenLayer = SigmoidLayer(3)
    hiddenLayer2 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)
        
        
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_hidden2)
    n.addConnection(hidden_to_out)
        
    n.sortModules()
    return n
Beispiel #5
0
class ANNApproximator(object):
    def __init__(self, alpha):
        self.name = "ANNApprox"
        self.network = FeedForwardNetwork()
        inLayer = LinearLayer(4)
        hiddenLayer = SigmoidLayer(12)
        outLayer = LinearLayer(1)
        self.network.addInputModule(inLayer)
        self.network.addModule(hiddenLayer)
        self.network.addOutputModule(outLayer)
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)
        self.network.addConnection(in_to_hidden)
        self.network.addConnection(hidden_to_out)

        # Last step to make sure everything works in the connections
        self.network.sortModules()

        self.dataset = SupervisedDataSet(4, 1)
        self.trainer = BackpropTrainer(self.network,
                                       self.dataset,
                                       learningrate=alpha,
                                       momentum=0.0,
                                       verbose=True)

    def computeOutput(self, state_features):
        return self.network.activate(state_features)[0]

    def updateWeights(self, features, desired_output):
        print("updateWeights: features: {0}".format(features))
        print("updateWeights: value: {0}".format(desired_output))
        self.dataset.addSample(features, desired_output)
        # self.trainer.train()
        self.trainer.trainEpochs(10)
        self.dataset.clear()
Beispiel #6
0
def training(d):
    # net = buildNetwork(d.indim, 55, d.outdim, bias=True,recurrent=False, hiddenclass =SigmoidLayer , outclass = SoftmaxLayer)
    net = FeedForwardNetwork()
    inLayer = SigmoidLayer(d.indim)
    hiddenLayer1 = SigmoidLayer(d.outdim)
    hiddenLayer2 = SigmoidLayer(d.outdim)
    outLayer = SigmoidLayer(d.outdim)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer1)
    net.addModule(hiddenLayer2)
    net.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    hidden_to_hidden = FullConnection(hiddenLayer1, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_hidden)
    net.addConnection(hidden_to_out)

    net.sortModules()
    print net

    t = BackpropTrainer(net, d, learningrate = 0.9,momentum=0.9, weightdecay=0.01, verbose = True)
    t.trainUntilConvergence(continueEpochs=1200, maxEpochs=1000)
    NetworkWriter.writeToFile(net, 'myNetwork'+str(time.time())+'.xml')
    return t
Beispiel #7
0
def crearRN():
    #Se crea la red neuronal
    n = FeedForwardNetwork()

    #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal
    inLayer = LinearLayer(4096)
    hiddenLayer = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    #Se agregan los layers a la red neuronal
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addOutputModule(outLayer)

    #Se declaran las conexiones de los nodos
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)

    #Se establecen las conexiones en los layers de la red neuronal
    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    #Red neuronal lista para usar
    n.sortModules()

    return n
def main():
    n = FeedForwardNetwork()

    in_layer = LinearLayer(2)
    hidden_layer = SigmoidLayer(3)
    out_layer = LinearLayer(1)

    n.addInputModule(in_layer)
    n.addModule(hidden_layer)
    n.addOutputModule(out_layer)

    in_to_hidden = FullConnection(in_layer, hidden_layer)
    hidden_to_out = FullConnection(hidden_layer, out_layer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    n.sortModules()

    print(">>> print n")
    print(n)

    print(">>> n.activate([1, 2])")
    print(n.activate([1, 2]))

    print(">>> in_to_hidden.params")
    print(in_to_hidden.params)

    print(">>> hidden_to_out.params")
    print(hidden_to_out.params)

    print(">>> n.params")
    print(n.params)
def train_net(data_set, n, epochs=1):
    num_inputs = len(data_set[0][0][n])
    ds = SupervisedDataSet(num_inputs, 2)
    for i in range(len(data_set)):
        try:
            ds.appendLinked(data_set[i][0][n],
                            (data_set[i][1], data_set[i][2]))
        except:
            continue
    print str(len(ds)) + ' points successfully aquired'

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(num_inputs, name='input'))
    net.addInputModule(BiasUnit(name='bias'))
    net.addOutputModule(LinearLayer(2, name='output'))
    net.addModule(SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid'))
    net.addModule(TanhLayer(10, name='tanh'))
    net.addConnection(FullConnection(net['bias'], net['sigmoid']))
    net.addConnection(FullConnection(net['bias'], net['tanh']))
    net.addConnection(FullConnection(net['input'], net['sigmoid']))
    net.addConnection(FullConnection(net['sigmoid'], net['tanh']))
    net.addConnection(FullConnection(net['tanh'], net['output']))
    net.sortModules()

    trainer = BackpropTrainer(net,
                              learningrate=0.01,
                              momentum=0.1,
                              verbose=True)

    trainer.trainOnDataset(ds)
    trainer.trainEpochs(epochs)

    return net
Beispiel #10
0
 def trainSupervised(self, 
         num,
         ds,
         initialLearningrate=0.002,
         decay=0.9999,
         myWeightdecay=0.8,
         momentum=0):
     n = FeedForwardNetwork()
     n.addInputModule(self.inLayer)
     n.addModule(self.hiddenLayer)
     n.addModule(self.b)
     n.addOutputModule(self.outLayer)
     n.addConnection(self.in_to_hidden)
     n.addConnection(self.hidden_to_out)
     n.addConnection(self.b_to_hidden)
     n.addConnection(self.b_to_out)
     n.sortModules()
     self.supervisedNet = n
     self.supervisedTrainer = BackpropTrainer(n, ds,  
         learningrate=initialLearningrate,
         lrdecay=decay, 
         verbose=True, 
         weightdecay=myWeightdecay,
         batchlearning=True,
         momentum=momentum) 
     self.supervisedTrainer.trainEpochs(num)
def buildMLP(dataSet, num_hidden):
    '''
    Function that builds a feed forward network based
    on the datset inputed.
    The hidden layer has nodes equal to num_hidden.
    '''
    #make the network
    network = FeedForwardNetwork()
    #make network layers
    inputLayer = LinearLayer(dataSet.indim)
    hiddenLayer = SigmoidLayer(num_hidden)
    outputLayer = LinearLayer(dataSet.outdim)

    #add the layers to the network
    network.addInputModule(inputLayer)
    network.addModule(hiddenLayer)
    network.addOutputModule(outputLayer)

    #add bias
    network.addModule(BiasUnit(name='bias'))

    #create connections between layers
    inToHidden = FullConnection(inputLayer, hiddenLayer)
    hiddenToOut = FullConnection(hiddenLayer, outputLayer)

    #connect bias
    network.addConnection(FullConnection(network['bias'], outputLayer))
    network.addConnection(FullConnection(network['bias'], hiddenLayer))

    #add connections to the network
    network.addConnection(inToHidden)
    network.addConnection(hiddenToOut)

    network.sortModules()
    return network
Beispiel #12
0
    def create(number_of_hidden_layers, activation_function, input_length,
               output_length, network_file, classify):
        n = FeedForwardNetwork()
        in_layer = LinearLayer(input_length)
        n.addInputModule(in_layer)

        layer_to_connect_to = in_layer
        for x in range(0, number_of_hidden_layers):
            if activation_function == 'sigmoid':
                hidden_layer = SigmoidLayer(input_length)
            else:
                hidden_layer = TanhLayer(input_length)

            n.addModule(hidden_layer)
            hidden_layer_connection = FullConnection(layer_to_connect_to,
                                                     hidden_layer)
            n.addConnection(hidden_layer_connection)
            layer_to_connect_to = hidden_layer

        if classify:
            out_layer = SoftmaxLayer(output_length)
        else:
            out_layer = LinearLayer(output_length)
        n.addOutputModule(out_layer)

        hidden_to_out = FullConnection(layer_to_connect_to, out_layer)
        n.addConnection(hidden_to_out)
        n.sortModules()
        save_network(n, network_file)
def BackupNetwork(genome=None):
	#initial a network [12,12,4] and initial weights are baseline policy versions
	
	from pybrain.structure import FeedForwardNetwork,LinearLayer,TanhLayer,FullConnection
	network = FeedForwardNetwork()
	inLayer= LinearLayer(12)
	hiddenLayer = LinearLayer(12)
	outLayer = TanhLayer(4)
	network.addInputModule(inLayer)
	network.addModule(hiddenLayer)
	network.addOutputModule(outLayer)
	
	weights = [] 	
	if(genome == None):
		import pickle
		weights = pickle.load(open("seed"))
	else:
		weights = genome
	 
	in_to_hidden = FullConnection(inLayer,hiddenLayer)   
	hidden_to_out = FullConnection(hiddenLayer,outLayer)
	for i in range(0,144):
		in_to_hidden.params[i] = weights[i]
	for j in range(0,48):
		hidden_to_out.params[j] = weights[j+144] 		
	network.addConnection(in_to_hidden)
	network.addConnection(hidden_to_out)
	network.sortModules()
	return network 		
def buildNet(input_size, hidden_size):
    n = FeedForwardNetwork()
    in1Layer = LinearLayer(input_size)
    in2Layer = LinearLayer(input_size)
    hidden1Layer = SigmoidLayer(hidden_size)
    hidden2Layer = SigmoidLayer(hidden_size)
    hidden3Layer = SigmoidLayer(2)
    outLayer = LinearLayer(1)
    
    n.addInputModule(in1Layer)
    n.addInputModule(in2Layer)
    n.addModule(hidden1Layer)
    n.addModule(hidden2Layer)
    n.addModule(hidden3Layer)
    n.addOutputModule(outLayer)
    
    in1_to_hidden1 = FullConnection(in1Layer, hidden1Layer)
    in2_to_hidden2 = FullConnection(in2Layer, hidden2Layer)
    hidden1_to_hidden3 = FullConnection(hidden1Layer, hidden3Layer)
    hidden2_to_hidden3 = FullConnection(hidden2Layer, hidden3Layer)
    hidden3_to_out = FullConnection(hidden3Layer, outLayer)
    
    n.addConnection(in1_to_hidden1)
    n.addConnection(in2_to_hidden2)
    n.addConnection(hidden1_to_hidden3)
    n.addConnection(hidden2_to_hidden3)
    n.addConnection(hidden3_to_out)
    n.sortModules()
    
    return n
Beispiel #15
0
def crearRN():
    #Se crea la red neuronal
    n = FeedForwardNetwork()

    #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal
    inLayer = LinearLayer(4096)
    hiddenLayer = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    #Se agregan los layers a la red neuronal
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addOutputModule(outLayer)

    #Se declaran las conexiones de los nodos
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)

    #Se establecen las conexiones en los layers de la red neuronal
    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    #Red neuronal lista para usar
    n.sortModules()

    return n
Beispiel #16
0
    def buildNN(self, net, functions, inp, out):
        layers = []

        inLayer = self.func[functions[0]](inp)
        layers.append(inLayer)
        outLayer = self.func[functions[-1]](out)

        for neural in range(1, len(net) - 1):
            layers.append(self.func[functions[neural]](1))
        layers.append(outLayer)

        connections, recConnections = self.fillConnections(net, [], [0], layers)
        if len(recConnections) == 0:
            n = FeedForwardNetwork()
        else:
            n = RecurrentNetwork()
        n.addInputModule(inLayer)
        for layer in range(1, len(layers) - 1):
            n.addModule(layers[layer])
        n.addOutputModule(outLayer)

        for con in connections:
            n.addConnection(con)
        for rcon in recConnections:
            n.addRecurrentConnection(rcon)
        n.sortModules()
        return n
Beispiel #17
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break


    exportCatDogANN(n)
    return n
Beispiel #18
0
class MyNet:

	def __init__(self, file='config.xml'):
		self.net = FeedForwardNetwork()
		self.file = file


	def constructNet(self, input, hidden, output): 
		inputLayer = LinearLayer(input)
		hiddenLayer = TanhLayer(hidden)
		outputLayer = LinearLayer(output)

		self.net.addInputModule(inputLayer)
		self.net.addModule(hiddenLayer)
		self.net.addOutputModule(outputLayer)

		conn1 = FullConnection(inputLayer, hiddenLayer)
		conn2 = FullConnection(hiddenLayer, outputLayer)

		self.net.addConnection(conn1)
		self.net.addConnection(conn2)

	
	def setup(self):
		self.net.sortModules()

	
	def saveToFile(self,file='config.xml'):
		NetworkWriter.writeToFile(self.net, file)


	def loadFromFile(self, file='config.xml'):
		self.net = NetworkReader.readFrom(file)
Beispiel #19
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Beispiel #20
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Beispiel #21
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogANN(n)
    return n
Beispiel #22
0
def getMultiplayerFeedForwardNetwork(inputLayerLen,
                                     hiddenLayersLenList,
                                     outLayerLen=1):
    #create net
    net = FeedForwardNetwork()
    #create layers
    inLayer = LinearLayer(inputLayerLen, name='inLinearLayer')
    hiddenLayers = [
        SigmoidLayer(n, name='sigmoidLayer' + str(i))
        for i, n in enumerate(hiddenLayersLenList)
    ]
    outLayer = LinearLayer(outLayerLen, name='outLinearLayer')
    #add layers to net
    net.addInputModule(inLayer)
    for l in hiddenLayers:
        net.addModule(l)
    net.addOutputModule(outLayer)
    #create connections
    layers = [inLayer] + hiddenLayers + [outLayer]
    connections = [
        FullConnection(layers[i], layers[i + 1], name='connection' + str(i))
        for i in range(len(layers) - 1)
    ]
    #add connections to net
    for c in connections:
        net.addConnection(c)
    #do some required initialization
    net.sortModules()

    return net
def encoderdecoder(outersize,innersize,indata,
                   fname):
    # create network
    n = FeedForwardNetwork()

    inLayer = LinearLayer(outersize)
    hiddenLayer = SigmoidLayer(innersize)
    outLayer = LinearLayer(outersize)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    n.sortModules()
    
    # create dataset
    ds = SupervisedDataSet(outersize,outersize)
    for x,y in indata,indata:
        ds.addSample(x,y)

    # train network
    trainer = BackpropTrainer(n,ds)
    trainer.trainUntilConvergence()

    n.saveNetwork(fname)
    
    return [[in_to_hidden,hidden_to_out],
            [inLayer,hiddenLayer,outLayer],
            n]
def setupNetwork(numHiddenNodes, numHiddenLayers, numFeatures, numSpeakers):

    nn = FeedForwardNetwork()
    inputLayer = LinearLayer(numFeatures)
    nn.addInputModule(inputLayer)

    hiddenLayers = []
    for x in range(numHiddenLayers):
        hiddenLayer = TanhLayer(numHiddenNodes)
        nn.addModule(hiddenLayer)
        hiddenLayers.append(hiddenLayer)
    outputLayer = SoftmaxLayer(numSpeakers)
    nn.addOutputModule(outputLayer)

    inputConnection = FullConnection(inputLayer, hiddenLayers[0])
    nn.addConnection(inputConnection)

    for x in range(numHiddenLayers - 1):
        connect = FullConnection(hiddenLayers[x], hiddenLayers[x - 1])
        nn.addConnection(connect)

    outputConnection = FullConnection(hiddenLayers[numHiddenLayers - 1],
                                      outputLayer)
    nn.addConnection(outputConnection)
    nn.sortModules()

    return nn
Beispiel #25
0
 def _constructNetwork(self, nIn, nOut, params):
     ''' Construct the network '''
     nHidden = params.setdefault('nHidden', 2)
     hiddenSize = np.empty(nHidden)
     for i in range(nHidden):
         pstr = 'hiddenSize[' + str(i) + ']'
         hiddenSize[i] = params.setdefault(pstr, nIn + nOut)
     # Construct network
     ann = FeedForwardNetwork()
     
     # Add layers
     layers = []
     layers.append(LinearLayer(nIn))
     for nHid in hiddenSize:
         layers.append(SoftmaxLayer(nHid))
     layers.append(LinearLayer(nOut))
     ann.addOutputModule(layers[-1])
     ann.addInputModule(layers[0])
     for mod in layers[1:-1]:
         ann.addModule(mod)
     
     # Connections
     for i, mod in enumerate(layers):
         if i < len(layers) - 1:
             conn = FullConnection(mod, layers[i+1])
             ann.addConnection(conn)
     
     # Sort the modules
     ann.sortModules()
     return ann
   def __init__(self, index, name, params):
        self.name = name
        self.index = index
        
        self.status_good = True

        n = FeedForwardNetwork()
        
        self.inLayer = LinearLayer(17)
        self.hiddenLayer = SigmoidLayer(5)
        self.outLayer = LinearLayer(4)
     
        
        n.addInputModule(self.inLayer)
        n.addModule(self.hiddenLayer)
        n.addOutputModule(self.outLayer)
        
        from pybrain.structure import FullConnection
        in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer)
        hidden_to_out = FullConnection(self.hiddenLayer, self.outLayer)
        
        n.addConnection(in_to_hidden)
        n.addConnection(hidden_to_out)
        
        n.sortModules()
        
        for j, i in enumerate(params[0]):
            n.connections[self.hiddenLayer][0].params[j] = i  
            
        for j, i in enumerate(params[1]):
            n.connections[self.inLayer][0].params[j] = i

        self.n = n
Beispiel #27
0
def _init_net(params_len, output_layer_num, hidden_size):
    # init and train
    net = FeedForwardNetwork()
    """ Next, we're constructing the input, hidden and output layers. """
    inLayer = LinearLayer(params_len)
    hiddenLayer = SigmoidLayer(hidden_size)
    hiddenLayer1 = SigmoidLayer(hidden_size)
    outLayer = LinearLayer(output_layer_num)
    """ (Note that we could also have used a hidden layer of type TanhLayer, LinearLayer, etc.)
    Let's add them to the network: """
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)
    """ We still need to explicitly determine how they should be connected. For this we use the most
    common connection type, which produces a full connectivity between two layers (or Modules, in general):
    the 'FullConnection'. """

    in2hidden = FullConnection(inLayer, hiddenLayer)
    hidden2hidden = FullConnection(hiddenLayer, hiddenLayer1)
    hidden2out = FullConnection(hiddenLayer1, outLayer)

    net.addConnection(in2hidden)
    net.addConnection(hidden2hidden)
    net.addConnection(hidden2out)
    """ All the elements are in place now, so we can do the final step that makes our MLP usable,
    which is to call the 'sortModules()' method. """

    net.sortModules()

    # net = buildNetwork( params_len, hidden_size, 601, bias = True )
    return net
def create_network():
    # Create the network itself
    network = FeedForwardNetwork()
    # Create layers
    NUMBER_OF_INPUT_BYTES = 1600 # because at input we have picture 40x40 size
    NUMBER_OF_HIDDEN_LAYERS = 10  # number of hidden layers
    NUMBER_OF_OUTPUT_CLASSES = 8 # because in output we have 8 classes
    inLayer = LinearLayer( NUMBER_OF_INPUT_BYTES )
    hiddenLayer = SigmoidLayer( NUMBER_OF_HIDDEN_LAYERS )
    outLayer = LinearLayer( NUMBER_OF_OUTPUT_CLASSES )
    # Create connections between layers
    # We create FullConnection - each neuron of one layer is connected to each neuron of other layer
    in_to_hidden = FullConnection( inLayer, hiddenLayer )
    hidden_to_out = FullConnection( hiddenLayer, outLayer )
    # Add layers to our network
    network.addInputModule( inLayer )
    network.addModule( hiddenLayer )
    network.addOutputModule( outLayer )
    # Add connections to network
    network.addConnection( in_to_hidden )
    network.addConnection( hidden_to_out )
    # Sort modules to make multilayer perceptron usable
    network.sortModules()
    # prepare array to activate network
    d_letter_array = read_array( "d" )
    # activate network
    network.activate( d_letter_array )
    return network
Beispiel #29
0
def create_network():
    # Create the network itself
    network = FeedForwardNetwork()
    # Create layers
    NUMBER_OF_INPUT_BYTES = 1600  # because at input we have picture 40x40 size
    NUMBER_OF_HIDDEN_LAYERS = 10  # number of hidden layers
    NUMBER_OF_OUTPUT_CLASSES = 8  # because in output we have 8 classes
    inLayer = LinearLayer(NUMBER_OF_INPUT_BYTES)
    hiddenLayer = SigmoidLayer(NUMBER_OF_HIDDEN_LAYERS)
    outLayer = LinearLayer(NUMBER_OF_OUTPUT_CLASSES)
    # Create connections between layers
    # We create FullConnection - each neuron of one layer is connected to each neuron of other layer
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    # Add layers to our network
    network.addInputModule(inLayer)
    network.addModule(hiddenLayer)
    network.addOutputModule(outLayer)
    # Add connections to network
    network.addConnection(in_to_hidden)
    network.addConnection(hidden_to_out)
    # Sort modules to make multilayer perceptron usable
    network.sortModules()
    # prepare array to activate network
    d_letter_array = read_array("d")
    # activate network
    network.activate(d_letter_array)
    return network
Beispiel #30
0
def create_ff_network(options):
    """Create the FeedForware network
    :param options: The input options.
    :return:
    """

    # Create FF network
    net = FeedForwardNetwork()

    # Create each Layer instance
    in_layer = LinearLayer(options['inUnitCount'])
    hidden_layer = SigmoidLayer(options['hiddenUnitCount'])
    out_layer = LinearLayer(options['outUnitCount'])

    # Build network layer topology
    net.addInputModule(in_layer)
    net.addModule(hidden_layer)
    net.addOutputModule(out_layer)

    in_to_hidden = FullConnection(in_layer, hidden_layer)
    hidden_to_out = FullConnection(hidden_layer, out_layer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_out)

    # Complete structure network
    net.sortModules()

    return net
Beispiel #31
0
def ann_network():
    nn = FeedForwardNetwork()

    # define the activation function and # of nodes per layer
    in_layer = LinearLayer(13)
    hidden_layer = SigmoidLayer(5)
    bias_unit = BiasUnit(name='bias')
    out_layer = LinearLayer(1)

    # add modules to the network
    nn.addInputModule(in_layer)
    nn.addModule(hidden_layer)
    nn.addModule(bias_unit)
    nn.addOutputModule(out_layer)

    # define connections between the nodes
    hidden_with_bias = FullConnection(hidden_layer, bias_unit)
    in_to_hidden = FullConnection(in_layer, hidden_layer)
    hidden_to_out = FullConnection(hidden_layer, out_layer)

    # add connections to the network
    nn.addConnection(in_to_hidden)
    nn.addConnection(hidden_with_bias)
    nn.addConnection(hidden_to_out)

    # perform network interal initialization
    nn.sortModules()

    return nn
Beispiel #32
0
	def _createRBF(self):

        	# choose random centers on map
		for i in range(self.numCenters):        	
			self.centers.append(self.env._randomInitPose())

		# create an RBF network
		params = FeedForwardNetwork()
		
		inLayer = LinearLayer(self.task.outdim)
		hiddenLayer = RBFLayer(self.numCenters, self.centers)
		#inLayer = RBFLayer(self.numCenters, self.centers)
		outLayer = LinearLayer(self.task.indim)

		params.addInputModule(inLayer)
		params.addModule(hiddenLayer)
		params.addOutputModule(outLayer)

		in_to_hidden = FullConnection(inLayer,hiddenLayer)
		hidden_to_out = FullConnection(hiddenLayer,outLayer)
		params.addConnection(in_to_hidden)
		params.addConnection(hidden_to_out)

		params.sortModules()

		return params
Beispiel #33
0
def buildNN2HiddenLayer(trnData, netNo):
    from pybrain.structure import FeedForwardNetwork, RecurrentNetwork
    from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer
    from pybrain.structure import FullConnection

    n = FeedForwardNetwork()
    inLayer = LinearLayer(trnData.indim)  # Define Layer Types
    if netNo == 1 or netNo == 3:
        hiddenLayer0 = TanhLayer(hiddenLayer0neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer1neurons)  # Sigmoid
    elif netNo == 2:
        hiddenLayer0 = TanhLayer(hiddenLayer1neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer0neurons)  # Sigmoid

    outLayer = SoftmaxLayer(trnData.outdim)  # SoftmaxLayer

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer0)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden0 = FullConnection(inLayer, hiddenLayer0)  # Define connections
    hidden0_to_hidden1 = FullConnection(hiddenLayer0, hiddenLayer1)
    hidden1_to_out = FullConnection(hiddenLayer1, outLayer)
    n.addConnection(in_to_hidden0)
    n.addConnection(hidden0_to_hidden1)
    n.addConnection(hidden1_to_out)
    n.sortModules()
    return n
Beispiel #34
0
def createNLayerFFNet(historySize, n, k):
	net = FeedForwardNetwork()

	# Create and add layers
	net.addInputModule(LinearLayer(historySize * 2, name='in'))
	net.addOutputModule(LinearLayer(1, name='out'))

	# Create and add connections between the layers
	baseLayerName = 'hidden%i'
	connectionName = 'c%i'

	net.addModule(SigmoidLayer(k, name=baseLayerName % 0))
	net.addConnection(FullConnection(net['in'], net[baseLayerName % 0], name=connectionName % 0))
	
	for i in xrange(1, n):
		layerName = baseLayerName % i
		inLayerName = baseLayerName % (i-1)

		net.addModule(SigmoidLayer(k, name=layerName))
		net.addConnection(FullConnection(net[inLayerName], net[layerName], name=connectionName % (i-1)))

	net.addConnection(FullConnection(net[baseLayerName % (n-1)], net['out'], name=connectionName % (n-1)))

	# Preps the net for use
	net.sortModules()

	return net
def build_deep_network(linear_dimensions):
    neural_net = FeedForwardNetwork()

    inLayer = LinearLayer(linear_dimensions)
    hiddenLayer_1 = SigmoidLayer(100)
    hiddenLayer_2 = SigmoidLayer(100)
    hiddenLayer_3 = SigmoidLayer(50)
    outLayer = LinearLayer(1)

    neural_net.addInputModule(inLayer)
    neural_net.addModule(hiddenLayer_1)
    neural_net.addModule(hiddenLayer_2)
    neural_net.addModule(hiddenLayer_3)
    neural_net.addOutputModule(outLayer)

    in_to_hidden_1 = FullConnection(inLayer, hiddenLayer_1)
    hidden_1_to_hidden_2 = FullConnection(hiddenLayer_1, hiddenLayer_2)
    hidden_2_to_hidden_3 = FullConnection(hiddenLayer_2, hiddenLayer_3)
    hidden_3_to_output = FullConnection(hiddenLayer_3, outLayer)

    neural_net.addConnection(in_to_hidden_1)
    neural_net.addConnection(hidden_1_to_hidden_2)
    neural_net.addConnection(hidden_2_to_hidden_3)
    neural_net.addConnection(hidden_3_to_output)

    neural_net.sortModules()
    return neural_net
Beispiel #36
0
def construct_neural_network(number_of_hidden_nodes, number_of_hidden_layers, inputdim, outputdim):
    """
    Constructs a neural network with a given amount of hidden layers and nodes per hidden layer
    """
    input_layer = LinearLayer(inputdim)
    hidden_layers = []
    output_layer = SoftmaxLayer(outputdim)
    # Nodes of the neural network
    fnn = FeedForwardNetwork()
    fnn.addInputModule(input_layer)
    for i in range(number_of_hidden_layers):
        sigm = SigmoidLayer(number_of_hidden_nodes)
        hidden_layers.append(sigm)
        fnn.addModule(sigm)
    fnn.addOutputModule(output_layer)
    bias = BiasUnit()
    fnn.addModule(bias)
    # Connections of the neural network
    input_connection = FullConnection(input_layer, hidden_layers[0])
    fnn.addConnection(input_connection)
    fnn.addConnection(FullConnection(bias, hidden_layers[0]))
    for i in range(len(hidden_layers) - 1):
        full = FullConnection(hidden_layers[i], hidden_layers[i+1])
        fnn.addConnection(full)
        fnn.addConnection(FullConnection(bias, hidden_layers[i+1]))
    output_connection = FullConnection(hidden_layers[-1], output_layer)
    fnn.addConnection(output_connection)
    fnn.addConnection(FullConnection(bias, hidden_layers[0]))
    fnn.sortModules()
    return fnn
Beispiel #37
0
class MyNet:
    def __init__(self, file='config.xml'):
        self.net = FeedForwardNetwork()
        self.file = file

    def constructNet(self, input, hidden, output):
        inputLayer = LinearLayer(input)
        hiddenLayer = TanhLayer(hidden)
        outputLayer = LinearLayer(output)

        self.net.addInputModule(inputLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outputLayer)

        conn1 = FullConnection(inputLayer, hiddenLayer)
        conn2 = FullConnection(hiddenLayer, outputLayer)

        self.net.addConnection(conn1)
        self.net.addConnection(conn2)

    def setup(self):
        self.net.sortModules()

    def saveToFile(self, file='config.xml'):
        NetworkWriter.writeToFile(self.net, file)

    def loadFromFile(self, file='config.xml'):
        self.net = NetworkReader.readFrom(file)
def evalFunc(ds):
    trains = []
    tests = []
    epochsNums = []
    parameters = range(1, 40)
    testAmount = 10
    for i in parameters:
        trainError = 0
        testError = 0
        for testNum in range(testAmount):
            tstdata, trndata = ds.splitWithProportion(0.25)
            hidden_size = i
            numOfEpocs = 10
            """
            n = buildNetwork( 1, hidden_size, 1, bias = True )
            
            
            """

            inLayer = LinearLayer(len(ds.getSample(0)[0]))
            hiddenLayer = SigmoidLayer(hidden_size)
            outLayer = LinearLayer(len(ds.getSample(0)[1]))
            n = FeedForwardNetwork()
            n.addInputModule(inLayer)
            n.addModule(hiddenLayer)
            b = BiasUnit()
            n.addModule(b)
            n.addOutputModule(outLayer)
            in_to_hidden = FullConnection(inLayer, hiddenLayer)
            hidden_to_out = FullConnection(hiddenLayer, outLayer)
            b_to_hidden = FullConnection(b, hiddenLayer)
            b_to_out = FullConnection(b, outLayer)

            n.addConnection(in_to_hidden)
            n.addConnection(hidden_to_out)
            n.addConnection(b_to_hidden)
            n.addConnection(b_to_out)

            n.sortModules()
            # print n.activate([1, 2])

            trainer = BackpropTrainer(n, trndata)  # , verbose=True, weightdecay=0)
            trainer.trainUntilConvergence(
                verbose=True, validationProportion=0.15, maxEpochs=epochsNum, continueEpochs=10
            )
            trainError += printError(n, trndata, "trndata", ds.outputMax, ds.outputMin)
            testError += printError(n, tstdata, "tstdata", ds.outputMax, ds.outputMin)
            epochsNums.append(epochsNum)
            # print n.activateOnDataset(tstdata)
        trains.append(trainError / testAmount)
        tests.append(testError / testAmount)

    plt.plot(parameters, trains, label="train " + ds.label)
    plt.plot(parameters, tests, label="test " + ds.label)
    plt.legend().draggable()
    plt.title("Hidden layer size influance (" + str(epochsNum) + " epochs)")
    plt.xlabel("Hidden layer size")
    plt.ylabel("Normalized RMSE")
    plt.grid()
Beispiel #39
0
class NeuralNetwork(BaseEstimator, RegressorMixin):
    def __init__(
        self,
        inp_neu=4,
        hid_neu=3,
        out_neu=1,
        learn_rate=0.1,
        nomentum=0.5,
        weight_dec=0.0001,
        epochs=100,
        split_prop=0.25,
    ):
        self.inp_neu = inp_neu
        self.hid_neu = hid_neu
        self.out_neu = out_neu
        self.learn_rate = learn_rate
        self.nomentum = nomentum
        self.weight_dec = weight_dec
        self.epochs = epochs
        self.split_prop = split_prop

    def data(self, X, y=None):
        DS = SupervisedDataSet(self.inp_neu, self.out_neu)
        for i in range(0, len(X)):
            DS.addSample((X[i][0], X[i][1], X[i][2], X[i][3]), y[i])  # ATTENTION pas optimisé pour toutes les tailles
        return DS

    def fit(self, X, y):
        self.n = FeedForwardNetwork()

        self.n.addInputModule(SigmoidLayer(self.inp_neu, name="in"))
        self.n.addModule(SigmoidLayer(self.hid_neu, name="hidden"))
        self.n.addOutputModule(LinearLayer(self.out_neu, name="out"))
        self.n.addConnection(FullConnection(self.n["in"], self.n["hidden"], name="c1"))
        self.n.addConnection(FullConnection(self.n["hidden"], self.n["out"], name="c2"))

        self.n.sortModules()  # initialisation

        self.tstdata, trndata = self.data(X, y).splitWithProportion(self.split_prop)

        trainer = BackpropTrainer(
            self.n, trndata, learningrate=self.learn_rate, momentum=self.nomentum, weightdecay=self.weight_dec
        )
        trainer.trainUntilConvergence(verbose=True, maxEpochs=self.epochs)

        return self

    def predict(self, X):
        self.yhat = []
        for i in X:
            self.yhat.append(float(self.n.activate(i)))
        self.yhat = np.array(self.yhat)
        return self.yhat

    def score(self, y):
        vect_se = (self.yhat - y) ** 2
        mse = float(np.sum(vect_se)) / float(len(vect_se))
        return mse
def fit_predict(xTrain, yTrain, xTest, epochs, neurons):

    # Check edge cases
    if (not len(xTrain) == len(yTrain) or len(xTrain) == 0 or len(xTest) == 0
            or epochs <= 0):
        return

    # Randomize the training data (probably not necessary but pybrain might
    # not shuffle the data itself, so perform as safety check)
    indices = np.arange(len(xTrain))
    np.random.shuffle(indices)

    trainSwapX = [xTrain[x] for x in indices]
    trainSwapY = [yTrain[x] for x in indices]

    supTrain = SupervisedDataSet(len(xTrain[0]), 1)
    for x in range(len(trainSwapX)):
        supTrain.addSample(trainSwapX[x], trainSwapY[x])

    # Construct the feed-forward neural network

    n = FeedForwardNetwork()

    inLayer = LinearLayer(len(xTrain[0]))
    hiddenLayer1 = SigmoidLayer(neurons)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    n.sortModules()

    # Train the neural network on the training partition, validating
    # the training progress on the validation partition

    trainer = BackpropTrainer(n,
                              dataset=supTrain,
                              momentum=0.1,
                              learningrate=0.01,
                              verbose=False,
                              weightdecay=0.01)

    trainer.trainUntilConvergence(dataset=supTrain,
                                  maxEpochs=epochs,
                                  validationProportion=0.30)

    outputs = []
    for x in xTest:
        outputs.append(n.activate(x))

    return outputs
Beispiel #41
0
def main(f_samples):
    f_reading = open(f_samples, 'r')
    global data
    data = []

    for line in f_reading:
        line = line.split()
        data.append( (float(line[0]), float(line[-1])) )

    #function
    data_module = lambda x: map( lambda z: data[z], filter( lambda y: y% 5 == x, xrange(len(data)) ) )

    global data1
    data1 = [data_module(0), data_module(1), data_module(2), data_module(3), data_module(4)]

    global data_transformed
    data_transformed = take(data, rate = 60)

    global data_transformed_training
    data_transformed_training = map( lambda x: data_transformed[x], filter( lambda x: uniform(0, 1) > 0.3, xrange(len(data_transformed)) ))

    #Learning process-----------------------------------------------------------------

    global net, samples, trainer
    net = FeedForwardNetwork()
    inLayer = LinearLayer(3)
    hiddenLayer0 = SigmoidLayer(1)
    hiddenLayer1 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    net.addInputModule(inLayer)
#    net.addModule(hiddenLayer0)
#    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

#    net.addConnection(FullConnection(inLayer, hiddenLayer0))
    net.addConnection(FullConnection(inLayer, outLayer))
#    net.addConnection(FullConnection(hiddenLayer0, outLayer))
#    net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1))
#    net.addConnection(FullConnection(hiddenLayer1, outLayer))
    net.sortModules()
    print net
    ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out.
    #net = buildNetwork(3,8,8,1)
    ##Set with 2 inputs and one output for each sample
    samples = SupervisedDataSet(3,1)

    for i in data_transformed_training:
        samples.addSample(i['past'], i['next'] - i['average'])
    trainer = BackpropTrainer(net, samples)

    print 'Training'
    trainer.trainUntilConvergence(maxEpochs= 10)

    print 'Comparing'
    compare_net_samples(net, data_transformed)
    print "Number of samples %d for training." %len(data_transformed_training)
Beispiel #42
0
 def testMdlstm(self):
     net = FeedForwardNetwork()
     net.addInputModule(LinearLayer(1, name='in'))
     net.addModule(MDLSTMLayer(1, 1, name='hidden'))
     net.addOutputModule(LinearLayer(1, name='out'))
     net.addConnection(FullConnection(net['in'], net['hidden']))
     net.addConnection(FullConnection(net['hidden'], net['out']))
     net.sortModules()
     self.equivalence_feed_forward(net, net.convertToFastNetwork())
Beispiel #43
0
 def testMdlstm(self):
     net = FeedForwardNetwork()
     net.addInputModule(LinearLayer(1, name='in'))
     net.addModule(MDLSTMLayer(1, 1, name='hidden'))
     net.addOutputModule(LinearLayer(1, name='out'))
     net.addConnection(FullConnection(net['in'], net['hidden']))
     net.addConnection(FullConnection(net['hidden'], net['out']))
     net.sortModules()
     self.equivalence_feed_forward(net, net.convertToFastNetwork())
Beispiel #44
0
 def fromModules(cls, visible, hidden, bias, con, biascon):
     net = FeedForwardNetwork()
     net.addInputModule(visible)
     net.addModule(bias)
     net.addOutputModule(hidden)
     net.addConnection(con)
     net.addConnection(biascon)
     net.sortModules()
     return cls(net)
def buildNN(indim=4, hiddim=6, outdim=3):
    net = FeedForwardNetwork()
    net.addInputModule(TanhLayer(indim, name='i'))
    net.addModule(TanhLayer(hiddim, name='h'))
    net.addOutputModule(ThresholdLayer(outdim, name='o', threshold=0.5))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.sortModules()
    return net
Beispiel #46
0
 def fromModules(cls, visible, hidden, bias, con, biascon):
     net = FeedForwardNetwork()
     net.addInputModule(visible)
     net.addModule(bias)
     net.addOutputModule(hidden)
     net.addConnection(con)
     net.addConnection(biascon)
     net.sortModules()
     return cls(net)
def buildNN(indim=4, hiddim=6, outdim=3):
    net = FeedForwardNetwork()
    net.addInputModule(TanhLayer(indim, name = 'i'))
    net.addModule(TanhLayer(hiddim, name = 'h'))
    net.addOutputModule(ThresholdLayer(outdim, name = 'o', threshold=0.5))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.sortModules()
    return net
def initMaxentNetwork():
  """Builds a network with just a sigmoid output layer, i.e. a multi-class maximum entropy model."""
  fnn = FeedForwardNetwork()
  inLayer = LinearLayer(numFeatures)
  fnn.addInputModule(inLayer)
  outLayer = SigmoidLayer(3)
  fnn.addOutputModule(outLayer)
  fnn.addConnection(FullConnection(inLayer, outLayer))
  fnn.sortModules()
  return fnn
    def getFitness(self, smMatrix):  #Store the sm state into memory
        fit = 0

        #Fitness function (3) *************************************************************
        #Record the sm data for this loop and consider its properties
        #print(smMatrix)
        #print(len(smMatrix))

        #net = buildNetwork(3,10,1, bias = True)
        net = FeedForwardNetwork()
        inp = LinearLayer(3)
        h1 = SigmoidLayer(10)
        outp = LinearLayer(1)
        # add modules
        net.addOutputModule(outp)
        net.addInputModule(inp)
        net.addModule(h1)
        # create connections
        iToH = FullConnection(inp, h1)
        hToO = FullConnection(h1, outp)
        net.addConnection(iToH)
        net.addConnection(hToO)
        # finish up
        net.sortModules()

        ds = SupervisedDataSet(3, 1)

        trainSet = []
        for index_x, x in enumerate(smMatrix):
            if index_x > 0 and index_x < len(smMatrix) - 1:
                #trainSet.append( [smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2], smMatrix[index_x+1][3] ] )
                ds.addSample(([
                    smMatrix[index_x][0], smMatrix[index_x][1],
                    smMatrix[index_x][2]
                ]), (smMatrix[index_x + 1][3]))
        #print(trainSet)
        #print(ds)
        trainer = BackpropTrainer(net, ds, weightdecay=0.01)
        err = trainer.trainUntilConvergence(maxEpochs=100)
        #Visualize the network performance and structure.

        #nn = NNregression(ds, epoinc = 10)
        #nn.setupNN()
        #nn.runTraining()
        #self.pesos_conexiones(net)
        print("Input to hidden", iToH.params)
        #print("H to output", hToO.params)
        #print(iToH.params)
        n1 = iToH.params
        n1a = zip(*[iter(n1)] * 3)
        n2 = hToO.params
        fit = sum(n1a[:]) + sum(n2[:])
        print fit
        return fit
def mlpClassifier(X,y,train_indices, test_indices, mom=0.1,weightd=0.01, epo=5):
    X_train, y_train, X_test, y_test = X[train_indices],y[train_indices], X[test_indices], y[test_indices]

    #Converting the data into a dataset which is easily understood by PyBrain. 
    tstdata = ClassificationDataSet(X.shape[1],target=1,nb_classes=8)
    trndata = ClassificationDataSet(X.shape[1],target=1,nb_classes=8)
 #   print "shape of X_train & y_train: " + str(X_train.shape) + str(y_train.shape)
    for i in range(y_train.shape[0]):
        trndata.addSample(X_train[i,:], y_train[i])
    for i in range(y_test.shape[0]):
        tstdata.addSample(X_test[i,:], y_test[i])
    trndata._convertToOneOfMany()
    tstdata._convertToOneOfMany()

    #printing the specs of data
#    print "Number of training patterns: ", len(trndata)
#    print "Input and output dimensions: ", trndata.indim, trndata.outdim
#    print "First sample (input, target, class):"
#    print trndata['input'][0], trndata['target'][0], trndata['class'][0]

    #The neural-network used
 #   print "Building Network..."
    #input layer, hidden layer of size 10(very small), output layer
    ANNc = FeedForwardNetwork()
    inLayer = LinearLayer(trndata.indim, name="ip")
    hLayer1 = TanhLayer(100, name = "h1")
    hLayer2 = SigmoidLayer(100, name = "h2")
    outLayer = SoftmaxLayer(trndata.outdim, name = "op")

    ANNc.addInputModule(inLayer)
    ANNc.addModule(hLayer1)
    ANNc.addModule(hLayer2)
    ANNc.addOutputModule(outLayer)

    ip_to_h1 = FullConnection(inLayer, hLayer1, name = "ip->h1")
    h1_to_h2 = FullConnection(hLayer1, hLayer2, name = "h1->h2")
    h2_to_op = FullConnection(hLayer2, outLayer, name = "h2->op")

    ANNc.addConnection(ip_to_h1)
    ANNc.addConnection(h1_to_h2)
    ANNc.addConnection(h2_to_op)
    ANNc.sortModules()

#    print "Done. Training the network."

    #The trainer used, in our case Back-propagation trainer
    trainer = BackpropTrainer( ANNc, dataset=trndata, momentum=mom, verbose=True, weightdecay=weightd)
    trainer.trainEpochs( epo )

    #The error
    trnresult = percentError( trainer.testOnClassData(dataset=trndata), trndata['class'] )
    tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] )
 #   print "Done."
    return ANNc, trainer.totalepochs, (100 - trnresult), (100 - tstresult) 
class UnmannedNet:
    def __init__(self, n_in, n_hidden, n_out):
        self.net = FeedForwardNetwork()
        inLayer = LinearLayer(n_in)
        hiddenLayer1 = SigmoidLayer(n_hidden)
        hiddenLayer2 = SigmoidLayer(n_hidden)
        outLayer = LinearLayer(n_out)
        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer1)
        self.net.addModule(hiddenLayer2)
        self.net.addOutputModule(outLayer)
        in_to_hidden = FullConnection(inLayer, hiddenLayer1)
        hidden_to_out = FullConnection(hiddenLayer2, outLayer)
        hidden_to_hidden = FullConnection(hiddenLayer1, hiddenLayer2)
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_hidden)
        self.net.addConnection(hidden_to_out)
        self.net.sortModules()
        #self.net.params
        self.ds = SupervisedDataSet(n_in, n_out)

    def load_network(self, fName='./data/mynetwork.xml'):
        self.net = NetworkReader.readFrom(fName)

    def save_network(self, fName='./data/mynetwork.xml'):
        NetworkWriter.writeToFile(self.net, fName)

    def train(self, number):
        self.trainer = BackpropTrainer(self.net, self.ds)
        self.trainer.trainEpochs(number)

    def add_data(self, image, control):
        self.ds.addSample(image, control)

    def save_data(self, fName="./data/mydata"):
        SupervisedDataSet.saveToFile(self.ds, fName)

    def read_data(self, fName="./data/mydata"):
        self.ds = SupervisedDataSet.loadFromFile(fName)

    def prediction(self, image):
        return self.net.activate(image)

    def evaluate(self, valueFaultTolerant):
        target = self.ds.data.get('target')
        inputvalue = self.ds.data.get('input')
        numberOfSample = target.shape[0]
        numberOfCorrect = 0
        for i in range(0, numberOfSample):
            if (abs(target[i] - self.prediction(inputvalue[i])) <=
                    valueFaultTolerant):
                numberOfCorrect += 1
        print "Correct rate is" + str(
            float(numberOfCorrect) / float(numberOfSample))
Beispiel #52
0
def importCatDogANN(fileName=root.path() + "/res/recCatDogANN"):
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(7500, name='in'))
    n.addModule(SigmoidLayer(9000, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()
    params = np.load(root.path() + '/res/cat_dog_params.txt.npy')
    n._setParameters(params)
    return n
def createNNLong(trndata):
    nn = FeedForwardNetwork()
    inLayer = LinearLayer(trndata.indim, name='in')
    hiddenLayer = TanhLayer(6, name='hidden0')
    outLayer = TanhLayer(trndata.outdim, name='out')
    nn.addInputModule(inLayer)
    nn.addModule(hiddenLayer)
    nn.addOutputModule(outLayer)
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    nn.addConnection(in_to_hidden)
    nn.addConnection(hidden_to_out)
    nn.sortModules()
    return nn
Beispiel #54
0
def buildNestedNetwork():
    """ build a nested network. """
    N = FeedForwardNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.sortModules()
    return N
Beispiel #55
0
def generate_forecasters(data, dtt, alpha):
    #Learning process-----------------------------------------------------------------
    global net, samples, trainer
    net = FeedForwardNetwork()
    inLayer = LinearLayer(3)
    hiddenLayer0 = SigmoidLayer(1)
    hiddenLayer1 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer0)
    #    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

    #    net.addConnection(FullConnection(inLayer, hiddenLayer0))
    net.addConnection(FullConnection(inLayer, outLayer))
    #    net.addConnection(FullConnection(hiddenLayer0, outLayer))
    #    net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1))
    #    net.addConnection(FullConnection(hiddenLayer1, outLayer))
    net.sortModules()
    print net
    ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out.
    #net = buildNetwork(3,8,8,1)
    ##Set with 2 inputs and one output for each sample
    samples = SupervisedDataSet(3, 1)

    for i in dtt:
        samples.addSample(i['past'], i['next'] - i['average'])
    trainer = BackpropTrainer(net, samples)

    print 'Training'
    #trainer.trainUntilConvergence(maxEpochs= 1)

    #Making Forecasters---------------------------------------------------------------
    aux = map(lambda x: x[0], data)

    def exp(self, a, x):
        self.exp = a * data[aux.index(x) - 1][1] + (1 - a) * self.exp
        return self.exp

    naive = Forecaster(name='Naive',
                       predict_function=lambda x: data[aux.index(x) - 1][1])
    exponential = Forecaster(name='Exponential')
    exponential.exp = data[0][1]
    exponential.predict = lambda x: exp(exponential, alpha, x)
    network = Forecaster(name='Network', predict_function=net.activate)

    return naive, exponential, network
Beispiel #56
0
def trained3ONN():
    n = FeedForwardNetwork()

    inp = LinearLayer(176850, name='input')
    hid = LinearLayer(3, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid, inSliceTo=100, outSliceTo=1))
    n.addConnection(
        FullConnection(inp,
                       hid,
                       inSliceFrom=100,
                       inSliceTo=5150,
                       outSliceFrom=1,
                       outSliceTo=2))
    n.addConnection(FullConnection(inp, hid, inSliceFrom=5150, outSliceFrom=2))
    n.addConnection(FullConnection(hid, out))

    n.sortModules()
    print "Network created"
    d = load3OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count, " error = ", globErr
        if globErr < 0.01:
            break
        count = count + 1
        # if (count == 100):
        #     break

    # for i in range(100):
    #     print t.train()

    exportANN(n)

    return n
Beispiel #57
0
def classify(imSize, dataset, hidden_neurons, initial_error):

    tstdata, trndata = dataset.splitWithProportion(0.25)
    # nos da una proporcion de data de entrenamiento de .75 y prueba .25

    # imSize es el tamano de las capas de entrada
    # define layer structures
    inLayer = LinearLayer(imSize)
    hiddenLayer = SigmoidLayer(imSize / 3)
    outLayer = SoftmaxLayer(1)

    # add layers to network
    net = FeedForwardNetwork()
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addOutputModule(outLayer)

    # define conncections for network
    theta1 = FullConnection(inLayer, hiddenLayer)
    theta2 = FullConnection(hiddenLayer, outLayer)

    # add connections to network
    net.addConnection(theta1)
    net.addConnection(theta2)

    # sort module
    net.sortModules()

    dataset._convertToOneOfMany()

    fnn = buildNetwork(dataset.indim,
                       imSize / 3,
                       dataset.outdim,
                       outclass=SoftmaxLayer)

    #Creamos un entrenador de retropropagacion usando el dataset y la red
    trainer = BackpropTrainer(fnn, dataset)

    error = initial_error
    iteration = 0
    #iteramos mientras el error sea menor 0.001
    while error > 0.01:
        error = trainer.train()
        iteration += 1
        #print "Iteration: {0} Error {1}".format(iteration, error)
    print "Terminado luego de: ", iteration, " iteraciones"
    print "Con un error de: ", error
    return fnn
Beispiel #58
0
def gen_nn(in_size, hidden_size, out_size):
    nn = FeedForwardNetwork()

    inLayer = LinearLayer(in_size)
    hiddenLayer = SigmoidLayer(hidden_size)
    outLayer = LinearLayer(out_size)

    nn.addInputModule(inLayer)
    nn.addModule(hiddenLayer)
    nn.addOutputModule(outLayer)

    nn.addConnection(FullConnection(inLayer, hiddenLayer))
    nn.addConnection(FullConnection(hiddenLayer, outLayer))
    nn.sortModules()

    return nn