Exemplo n.º 1
0
 def _constructNetwork(self, nIn, nOut, params):
     ''' Construct the network '''
     nHidden = params.setdefault('nHidden', 2)
     hiddenSize = np.empty(nHidden)
     for i in range(nHidden):
         pstr = 'hiddenSize[' + str(i) + ']'
         hiddenSize[i] = params.setdefault(pstr, nIn + nOut)
     # Construct network
     ann = FeedForwardNetwork()
     
     # Add layers
     layers = []
     layers.append(LinearLayer(nIn))
     for nHid in hiddenSize:
         layers.append(SoftmaxLayer(nHid))
     layers.append(LinearLayer(nOut))
     ann.addOutputModule(layers[-1])
     ann.addInputModule(layers[0])
     for mod in layers[1:-1]:
         ann.addModule(mod)
     
     # Connections
     for i, mod in enumerate(layers):
         if i < len(layers) - 1:
             conn = FullConnection(mod, layers[i+1])
             ann.addConnection(conn)
     
     # Sort the modules
     ann.sortModules()
     return ann
Exemplo n.º 2
0
def ann_network():
    nn = FeedForwardNetwork()

    # define the activation function and # of nodes per layer
    in_layer = LinearLayer(13)
    hidden_layer = SigmoidLayer(5)
    bias_unit = BiasUnit(name='bias')
    out_layer = LinearLayer(1)

    # add modules to the network
    nn.addInputModule(in_layer)
    nn.addModule(hidden_layer)
    nn.addModule(bias_unit)
    nn.addOutputModule(out_layer)

    # define connections between the nodes
    hidden_with_bias = FullConnection(hidden_layer, bias_unit)
    in_to_hidden = FullConnection(in_layer, hidden_layer)
    hidden_to_out = FullConnection(hidden_layer, out_layer)

    # add connections to the network
    nn.addConnection(in_to_hidden)
    nn.addConnection(hidden_with_bias)
    nn.addConnection(hidden_to_out)

    # perform network interal initialization
    nn.sortModules()

    return nn
def buildMLP(dataSet, num_hidden):
    '''
    Function that builds a feed forward network based
    on the datset inputed.
    The hidden layer has nodes equal to num_hidden.
    '''
    #make the network
    network = FeedForwardNetwork()
    #make network layers
    inputLayer = LinearLayer(dataSet.indim)
    hiddenLayer = SigmoidLayer(num_hidden)
    outputLayer = LinearLayer(dataSet.outdim)

    #add the layers to the network
    network.addInputModule(inputLayer)
    network.addModule(hiddenLayer)
    network.addOutputModule(outputLayer)

    #add bias
    network.addModule(BiasUnit(name='bias'))

    #create connections between layers
    inToHidden = FullConnection(inputLayer, hiddenLayer)
    hiddenToOut = FullConnection(hiddenLayer, outputLayer)

    #connect bias
    network.addConnection(FullConnection(network['bias'], outputLayer))
    network.addConnection(FullConnection(network['bias'], hiddenLayer))

    #add connections to the network
    network.addConnection(inToHidden)
    network.addConnection(hiddenToOut)

    network.sortModules()
    return network
def encoderdecoder(outersize,innersize,indata,
                   fname):
    # create network
    n = FeedForwardNetwork()

    inLayer = LinearLayer(outersize)
    hiddenLayer = SigmoidLayer(innersize)
    outLayer = LinearLayer(outersize)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    n.sortModules()
    
    # create dataset
    ds = SupervisedDataSet(outersize,outersize)
    for x,y in indata,indata:
        ds.addSample(x,y)

    # train network
    trainer = BackpropTrainer(n,ds)
    trainer.trainUntilConvergence()

    n.saveNetwork(fname)
    
    return [[in_to_hidden,hidden_to_out],
            [inLayer,hiddenLayer,outLayer],
            n]
def BackupNetwork(genome=None):
	#initial a network [12,12,4] and initial weights are baseline policy versions
	
	from pybrain.structure import FeedForwardNetwork,LinearLayer,TanhLayer,FullConnection
	network = FeedForwardNetwork()
	inLayer= LinearLayer(12)
	hiddenLayer = LinearLayer(12)
	outLayer = TanhLayer(4)
	network.addInputModule(inLayer)
	network.addModule(hiddenLayer)
	network.addOutputModule(outLayer)
	
	weights = [] 	
	if(genome == None):
		import pickle
		weights = pickle.load(open("seed"))
	else:
		weights = genome
	 
	in_to_hidden = FullConnection(inLayer,hiddenLayer)   
	hidden_to_out = FullConnection(hiddenLayer,outLayer)
	for i in range(0,144):
		in_to_hidden.params[i] = weights[i]
	for j in range(0,48):
		hidden_to_out.params[j] = weights[j+144] 		
	network.addConnection(in_to_hidden)
	network.addConnection(hidden_to_out)
	network.sortModules()
	return network 		
def buildNet(input_size, hidden_size):
    n = FeedForwardNetwork()
    in1Layer = LinearLayer(input_size)
    in2Layer = LinearLayer(input_size)
    hidden1Layer = SigmoidLayer(hidden_size)
    hidden2Layer = SigmoidLayer(hidden_size)
    hidden3Layer = SigmoidLayer(2)
    outLayer = LinearLayer(1)
    
    n.addInputModule(in1Layer)
    n.addInputModule(in2Layer)
    n.addModule(hidden1Layer)
    n.addModule(hidden2Layer)
    n.addModule(hidden3Layer)
    n.addOutputModule(outLayer)
    
    in1_to_hidden1 = FullConnection(in1Layer, hidden1Layer)
    in2_to_hidden2 = FullConnection(in2Layer, hidden2Layer)
    hidden1_to_hidden3 = FullConnection(hidden1Layer, hidden3Layer)
    hidden2_to_hidden3 = FullConnection(hidden2Layer, hidden3Layer)
    hidden3_to_out = FullConnection(hidden3Layer, outLayer)
    
    n.addConnection(in1_to_hidden1)
    n.addConnection(in2_to_hidden2)
    n.addConnection(hidden1_to_hidden3)
    n.addConnection(hidden2_to_hidden3)
    n.addConnection(hidden3_to_out)
    n.sortModules()
    
    return n
def main():
    n = FeedForwardNetwork()

    in_layer = LinearLayer(2)
    hidden_layer = SigmoidLayer(3)
    out_layer = LinearLayer(1)

    n.addInputModule(in_layer)
    n.addModule(hidden_layer)
    n.addOutputModule(out_layer)

    in_to_hidden = FullConnection(in_layer, hidden_layer)
    hidden_to_out = FullConnection(hidden_layer, out_layer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    n.sortModules()

    print(">>> print n")
    print(n)

    print(">>> n.activate([1, 2])")
    print(n.activate([1, 2]))

    print(">>> in_to_hidden.params")
    print(in_to_hidden.params)

    print(">>> hidden_to_out.params")
    print(hidden_to_out.params)

    print(">>> n.params")
    print(n.params)
Exemplo n.º 8
0
    def __init__(self, index, name, params):
        self.name = name
        self.index = index
        self.liste = []#ClassificationDataSet(17, 1, nb_classes=4)        
        self.status_good = True

        self.number_of_moves = 0
        self.number_of_sound_moves = 0

        n = FeedForwardNetwork()
        
        self.inLayer = LinearLayer(5)
        self.hiddenLayer1 = SigmoidLayer(15)
        self.hiddenLayer2 = SigmoidLayer(15)        
        self.hiddenLayer3 = SigmoidLayer(15)
        self.outLayer = LinearLayer(4)
     
        
        n.addInputModule(self.inLayer)
        n.addModule(self.hiddenLayer1)
        n.addModule(self.hiddenLayer2)
        n.addModule(self.hiddenLayer3)
        n.addOutputModule(self.outLayer)
        
        from pybrain.structure import FullConnection
        in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer1)
        hidden_to_hidden1 = FullConnection(self.hiddenLayer1, self.outLayer2)
        hidden_to_hidden2 = FullConnection(self.hiddenLayer2, self.outLayer3)
                
        hidden_to_out = FullConnection(self.hiddenLayer3, self.outLayer)
        
        n.addConnection(in_to_hidden)
         n.addConnection(hidden_to_hidden1)
Exemplo n.º 9
0
def construct_neural_network(number_of_hidden_nodes, number_of_hidden_layers, inputdim, outputdim):
    """
    Constructs a neural network with a given amount of hidden layers and nodes per hidden layer
    """
    input_layer = LinearLayer(inputdim)
    hidden_layers = []
    output_layer = SoftmaxLayer(outputdim)
    # Nodes of the neural network
    fnn = FeedForwardNetwork()
    fnn.addInputModule(input_layer)
    for i in range(number_of_hidden_layers):
        sigm = SigmoidLayer(number_of_hidden_nodes)
        hidden_layers.append(sigm)
        fnn.addModule(sigm)
    fnn.addOutputModule(output_layer)
    bias = BiasUnit()
    fnn.addModule(bias)
    # Connections of the neural network
    input_connection = FullConnection(input_layer, hidden_layers[0])
    fnn.addConnection(input_connection)
    fnn.addConnection(FullConnection(bias, hidden_layers[0]))
    for i in range(len(hidden_layers) - 1):
        full = FullConnection(hidden_layers[i], hidden_layers[i+1])
        fnn.addConnection(full)
        fnn.addConnection(FullConnection(bias, hidden_layers[i+1]))
    output_connection = FullConnection(hidden_layers[-1], output_layer)
    fnn.addConnection(output_connection)
    fnn.addConnection(FullConnection(bias, hidden_layers[0]))
    fnn.sortModules()
    return fnn
Exemplo n.º 10
0
class NNet(FunctionApproximator):
	def __init__(self, num_features, num_hidden_neurons):
		super(NNet,self).__init__(num_features)

		self.ds = SupervisedDataSet(num_features, 1)

		self.net = FeedForwardNetwork()
		self.net.addInputModule(LinearLayer(num_features, name='in'))
		self.net.addModule(LinearLayer(num_hidden_neurons, name='hidden'))
		self.net.addOutputModule(LinearLayer(1, name='out'))
		self.net.addConnection(FullConnection(self.net['in'], self.net['hidden'], name='c1'))
		self.net.addConnection(FullConnection(self.net['hidden'], self.net['out'], name='c2'))
		self.net.sortModules()

	def getY(self, inpt):
		#giving NAN
		return self.net.activate(inpt)

	def update(self, inpt, target):
		q_old = self.qvalue(state, action)
		q_new = self.qvalue(new_state, new_action)
		target = q_old + self.alpha*(reward + (self.gamma*q_new)-q_old)
		

		self.ds.addSample(inpt, target)
		# print inpt.shape, target.shape
		# print inpt, target
		trainer = BackpropTrainer(self.net, self.ds)
		# try:
		# 	trainer.trainUntilConvergence()
		# except:
		trainer.train()
Exemplo n.º 11
0
def createNLayerFFNet(historySize, n, k):
	net = FeedForwardNetwork()

	# Create and add layers
	net.addInputModule(LinearLayer(historySize * 2, name='in'))
	net.addOutputModule(LinearLayer(1, name='out'))

	# Create and add connections between the layers
	baseLayerName = 'hidden%i'
	connectionName = 'c%i'

	net.addModule(SigmoidLayer(k, name=baseLayerName % 0))
	net.addConnection(FullConnection(net['in'], net[baseLayerName % 0], name=connectionName % 0))
	
	for i in xrange(1, n):
		layerName = baseLayerName % i
		inLayerName = baseLayerName % (i-1)

		net.addModule(SigmoidLayer(k, name=layerName))
		net.addConnection(FullConnection(net[inLayerName], net[layerName], name=connectionName % (i-1)))

	net.addConnection(FullConnection(net[baseLayerName % (n-1)], net['out'], name=connectionName % (n-1)))

	# Preps the net for use
	net.sortModules()

	return net
Exemplo n.º 12
0
   def __init__(self, index, name, params):
        self.name = name
        self.index = index
        
        self.status_good = True

        n = FeedForwardNetwork()
        
        self.inLayer = LinearLayer(17)
        self.hiddenLayer = SigmoidLayer(5)
        self.outLayer = LinearLayer(4)
     
        
        n.addInputModule(self.inLayer)
        n.addModule(self.hiddenLayer)
        n.addOutputModule(self.outLayer)
        
        from pybrain.structure import FullConnection
        in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer)
        hidden_to_out = FullConnection(self.hiddenLayer, self.outLayer)
        
        n.addConnection(in_to_hidden)
        n.addConnection(hidden_to_out)
        
        n.sortModules()
        
        for j, i in enumerate(params[0]):
            n.connections[self.hiddenLayer][0].params[j] = i  
            
        for j, i in enumerate(params[1]):
            n.connections[self.inLayer][0].params[j] = i

        self.n = n
Exemplo n.º 13
0
class MyNet:

	def __init__(self, file='config.xml'):
		self.net = FeedForwardNetwork()
		self.file = file


	def constructNet(self, input, hidden, output): 
		inputLayer = LinearLayer(input)
		hiddenLayer = TanhLayer(hidden)
		outputLayer = LinearLayer(output)

		self.net.addInputModule(inputLayer)
		self.net.addModule(hiddenLayer)
		self.net.addOutputModule(outputLayer)

		conn1 = FullConnection(inputLayer, hiddenLayer)
		conn2 = FullConnection(hiddenLayer, outputLayer)

		self.net.addConnection(conn1)
		self.net.addConnection(conn2)

	
	def setup(self):
		self.net.sortModules()

	
	def saveToFile(self,file='config.xml'):
		NetworkWriter.writeToFile(self.net, file)


	def loadFromFile(self, file='config.xml'):
		self.net = NetworkReader.readFrom(file)
def build_deep_network(linear_dimensions):
    neural_net = FeedForwardNetwork()

    inLayer = LinearLayer(linear_dimensions)
    hiddenLayer_1 = SigmoidLayer(100)
    hiddenLayer_2 = SigmoidLayer(100)
    hiddenLayer_3 = SigmoidLayer(50)
    outLayer = LinearLayer(1)

    neural_net.addInputModule(inLayer)
    neural_net.addModule(hiddenLayer_1)
    neural_net.addModule(hiddenLayer_2)
    neural_net.addModule(hiddenLayer_3)
    neural_net.addOutputModule(outLayer)

    in_to_hidden_1 = FullConnection(inLayer, hiddenLayer_1)
    hidden_1_to_hidden_2 = FullConnection(hiddenLayer_1, hiddenLayer_2)
    hidden_2_to_hidden_3 = FullConnection(hiddenLayer_2, hiddenLayer_3)
    hidden_3_to_output = FullConnection(hiddenLayer_3, outLayer)

    neural_net.addConnection(in_to_hidden_1)
    neural_net.addConnection(hidden_1_to_hidden_2)
    neural_net.addConnection(hidden_2_to_hidden_3)
    neural_net.addConnection(hidden_3_to_output)

    neural_net.sortModules()
    return neural_net
Exemplo n.º 15
0
def crearRN():
    #Se crea la red neuronal
    n = FeedForwardNetwork()

    #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal
    inLayer = LinearLayer(4096)
    hiddenLayer = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    #Se agregan los layers a la red neuronal
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addOutputModule(outLayer)

    #Se declaran las conexiones de los nodos
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)

    #Se establecen las conexiones en los layers de la red neuronal
    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    #Red neuronal lista para usar
    n.sortModules()

    return n
Exemplo n.º 16
0
	def _createRBF(self):

        	# choose random centers on map
		for i in range(self.numCenters):        	
			self.centers.append(self.env._randomInitPose())

		# create an RBF network
		params = FeedForwardNetwork()
		
		inLayer = LinearLayer(self.task.outdim)
		hiddenLayer = RBFLayer(self.numCenters, self.centers)
		#inLayer = RBFLayer(self.numCenters, self.centers)
		outLayer = LinearLayer(self.task.indim)

		params.addInputModule(inLayer)
		params.addModule(hiddenLayer)
		params.addOutputModule(outLayer)

		in_to_hidden = FullConnection(inLayer,hiddenLayer)
		hidden_to_out = FullConnection(hiddenLayer,outLayer)
		params.addConnection(in_to_hidden)
		params.addConnection(hidden_to_out)

		params.sortModules()

		return params
Exemplo n.º 17
0
    def build_network(self, layers=None, end=1):
        layerobjects = []
        for item in layers:
            try:
                t, n = item
                if t == "sig":
                    if n == 0:
                        continue
                    layerobjects.append(SigmoidLayer(n))
            except TypeError:
                layerobjects.append(LinearLayer(item))

        n = FeedForwardNetwork()
        n.addInputModule(layerobjects[0])

        for i, layer in enumerate(layerobjects[1:-1]):
            n.addModule(layer)
            connection = FullConnection(layerobjects[i], layerobjects[i+1])
            n.addConnection(connection)

        n.addOutputModule(layerobjects[-1])
        connection = FullConnection(layerobjects[-2], layerobjects[-1])
        n.addConnection(connection)

        n.sortModules()
        return n
Exemplo n.º 18
0
def initalize_nn():
    global in_to_hidden
    global hidden_to_hidden2
    global hidden_to_out
    
    # Old code (regression)        
    n = FeedForwardNetwork()
    # n = buildNetwork( 2, 3, data.outdim, outclass=SoftmaxLayer )

    inLayer = LinearLayer(2)
    hiddenLayer = SigmoidLayer(3)
    hiddenLayer2 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)
        
        
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_hidden2)
    n.addConnection(hidden_to_out)
        
    n.sortModules()
    return n
Exemplo n.º 19
0
def create_ff_network(options):
    """Create the FeedForware network
    :param options: The input options.
    :return:
    """

    # Create FF network
    net = FeedForwardNetwork()

    # Create each Layer instance
    in_layer = LinearLayer(options['inUnitCount'])
    hidden_layer = SigmoidLayer(options['hiddenUnitCount'])
    out_layer = LinearLayer(options['outUnitCount'])

    # Build network layer topology
    net.addInputModule(in_layer)
    net.addModule(hidden_layer)
    net.addOutputModule(out_layer)

    in_to_hidden = FullConnection(in_layer, hidden_layer)
    hidden_to_out = FullConnection(hidden_layer, out_layer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_out)

    # Complete structure network
    net.sortModules()

    return net
Exemplo n.º 20
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Exemplo n.º 21
0
def create_network():
    # Create the network itself
    network = FeedForwardNetwork()
    # Create layers
    NUMBER_OF_INPUT_BYTES = 1600 # because at input we have picture 40x40 size
    NUMBER_OF_HIDDEN_LAYERS = 10  # number of hidden layers
    NUMBER_OF_OUTPUT_CLASSES = 8 # because in output we have 8 classes
    inLayer = LinearLayer( NUMBER_OF_INPUT_BYTES )
    hiddenLayer = SigmoidLayer( NUMBER_OF_HIDDEN_LAYERS )
    outLayer = LinearLayer( NUMBER_OF_OUTPUT_CLASSES )
    # Create connections between layers
    # We create FullConnection - each neuron of one layer is connected to each neuron of other layer
    in_to_hidden = FullConnection( inLayer, hiddenLayer )
    hidden_to_out = FullConnection( hiddenLayer, outLayer )
    # Add layers to our network
    network.addInputModule( inLayer )
    network.addModule( hiddenLayer )
    network.addOutputModule( outLayer )
    # Add connections to network
    network.addConnection( in_to_hidden )
    network.addConnection( hidden_to_out )
    # Sort modules to make multilayer perceptron usable
    network.sortModules()
    # prepare array to activate network
    d_letter_array = read_array( "d" )
    # activate network
    network.activate( d_letter_array )
    return network
Exemplo n.º 22
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break


    exportCatDogANN(n)
    return n
Exemplo n.º 23
0
def training(d):
    # net = buildNetwork(d.indim, 55, d.outdim, bias=True,recurrent=False, hiddenclass =SigmoidLayer , outclass = SoftmaxLayer)
    net = FeedForwardNetwork()
    inLayer = SigmoidLayer(d.indim)
    hiddenLayer1 = SigmoidLayer(d.outdim)
    hiddenLayer2 = SigmoidLayer(d.outdim)
    outLayer = SigmoidLayer(d.outdim)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer1)
    net.addModule(hiddenLayer2)
    net.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    hidden_to_hidden = FullConnection(hiddenLayer1, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_hidden)
    net.addConnection(hidden_to_out)

    net.sortModules()
    print net

    t = BackpropTrainer(net, d, learningrate = 0.9,momentum=0.9, weightdecay=0.01, verbose = True)
    t.trainUntilConvergence(continueEpochs=1200, maxEpochs=1000)
    NetworkWriter.writeToFile(net, 'myNetwork'+str(time.time())+'.xml')
    return t
Exemplo n.º 24
0
 def trainSupervised(self, 
         num,
         ds,
         initialLearningrate=0.002,
         decay=0.9999,
         myWeightdecay=0.8,
         momentum=0):
     n = FeedForwardNetwork()
     n.addInputModule(self.inLayer)
     n.addModule(self.hiddenLayer)
     n.addModule(self.b)
     n.addOutputModule(self.outLayer)
     n.addConnection(self.in_to_hidden)
     n.addConnection(self.hidden_to_out)
     n.addConnection(self.b_to_hidden)
     n.addConnection(self.b_to_out)
     n.sortModules()
     self.supervisedNet = n
     self.supervisedTrainer = BackpropTrainer(n, ds,  
         learningrate=initialLearningrate,
         lrdecay=decay, 
         verbose=True, 
         weightdecay=myWeightdecay,
         batchlearning=True,
         momentum=momentum) 
     self.supervisedTrainer.trainEpochs(num)
Exemplo n.º 25
0
def evalFunc(ds):
    trains = []
    tests = []
    epochsNums = []
    parameters = range(1, 40)
    testAmount = 10
    for i in parameters:
        trainError = 0
        testError = 0
        for testNum in range(testAmount):
            tstdata, trndata = ds.splitWithProportion(0.25)
            hidden_size = i
            numOfEpocs = 10
            """
            n = buildNetwork( 1, hidden_size, 1, bias = True )
            
            
            """

            inLayer = LinearLayer(len(ds.getSample(0)[0]))
            hiddenLayer = SigmoidLayer(hidden_size)
            outLayer = LinearLayer(len(ds.getSample(0)[1]))
            n = FeedForwardNetwork()
            n.addInputModule(inLayer)
            n.addModule(hiddenLayer)
            b = BiasUnit()
            n.addModule(b)
            n.addOutputModule(outLayer)
            in_to_hidden = FullConnection(inLayer, hiddenLayer)
            hidden_to_out = FullConnection(hiddenLayer, outLayer)
            b_to_hidden = FullConnection(b, hiddenLayer)
            b_to_out = FullConnection(b, outLayer)

            n.addConnection(in_to_hidden)
            n.addConnection(hidden_to_out)
            n.addConnection(b_to_hidden)
            n.addConnection(b_to_out)

            n.sortModules()
            # print n.activate([1, 2])

            trainer = BackpropTrainer(n, trndata)  # , verbose=True, weightdecay=0)
            trainer.trainUntilConvergence(
                verbose=True, validationProportion=0.15, maxEpochs=epochsNum, continueEpochs=10
            )
            trainError += printError(n, trndata, "trndata", ds.outputMax, ds.outputMin)
            testError += printError(n, tstdata, "tstdata", ds.outputMax, ds.outputMin)
            epochsNums.append(epochsNum)
            # print n.activateOnDataset(tstdata)
        trains.append(trainError / testAmount)
        tests.append(testError / testAmount)

    plt.plot(parameters, trains, label="train " + ds.label)
    plt.plot(parameters, tests, label="test " + ds.label)
    plt.legend().draggable()
    plt.title("Hidden layer size influance (" + str(epochsNum) + " epochs)")
    plt.xlabel("Hidden layer size")
    plt.ylabel("Normalized RMSE")
    plt.grid()
Exemplo n.º 26
0
Arquivo: nn.py Projeto: pcolo/regret
class NeuralNetwork(BaseEstimator, RegressorMixin):
    def __init__(
        self,
        inp_neu=4,
        hid_neu=3,
        out_neu=1,
        learn_rate=0.1,
        nomentum=0.5,
        weight_dec=0.0001,
        epochs=100,
        split_prop=0.25,
    ):
        self.inp_neu = inp_neu
        self.hid_neu = hid_neu
        self.out_neu = out_neu
        self.learn_rate = learn_rate
        self.nomentum = nomentum
        self.weight_dec = weight_dec
        self.epochs = epochs
        self.split_prop = split_prop

    def data(self, X, y=None):
        DS = SupervisedDataSet(self.inp_neu, self.out_neu)
        for i in range(0, len(X)):
            DS.addSample((X[i][0], X[i][1], X[i][2], X[i][3]), y[i])  # ATTENTION pas optimisé pour toutes les tailles
        return DS

    def fit(self, X, y):
        self.n = FeedForwardNetwork()

        self.n.addInputModule(SigmoidLayer(self.inp_neu, name="in"))
        self.n.addModule(SigmoidLayer(self.hid_neu, name="hidden"))
        self.n.addOutputModule(LinearLayer(self.out_neu, name="out"))
        self.n.addConnection(FullConnection(self.n["in"], self.n["hidden"], name="c1"))
        self.n.addConnection(FullConnection(self.n["hidden"], self.n["out"], name="c2"))

        self.n.sortModules()  # initialisation

        self.tstdata, trndata = self.data(X, y).splitWithProportion(self.split_prop)

        trainer = BackpropTrainer(
            self.n, trndata, learningrate=self.learn_rate, momentum=self.nomentum, weightdecay=self.weight_dec
        )
        trainer.trainUntilConvergence(verbose=True, maxEpochs=self.epochs)

        return self

    def predict(self, X):
        self.yhat = []
        for i in X:
            self.yhat.append(float(self.n.activate(i)))
        self.yhat = np.array(self.yhat)
        return self.yhat

    def score(self, y):
        vect_se = (self.yhat - y) ** 2
        mse = float(np.sum(vect_se)) / float(len(vect_se))
        return mse
Exemplo n.º 27
0
 def fromModules(cls, visible, hidden, bias, con, biascon):
     net = FeedForwardNetwork()
     net.addInputModule(visible)
     net.addModule(bias)
     net.addOutputModule(hidden)
     net.addConnection(con)
     net.addConnection(biascon)
     net.sortModules()
     return cls(net)
Exemplo n.º 28
0
def buildNN(indim=4, hiddim=6, outdim=3):
    net = FeedForwardNetwork()
    net.addInputModule(TanhLayer(indim, name = 'i'))
    net.addModule(TanhLayer(hiddim, name = 'h'))
    net.addOutputModule(ThresholdLayer(outdim, name = 'o', threshold=0.5))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.sortModules()
    return net
Exemplo n.º 29
0
 def testMdlstm(self):
     net = FeedForwardNetwork()
     net.addInputModule(LinearLayer(1, name='in'))
     net.addModule(MDLSTMLayer(1, 1, name='hidden'))
     net.addOutputModule(LinearLayer(1, name='out'))
     net.addConnection(FullConnection(net['in'], net['hidden']))
     net.addConnection(FullConnection(net['hidden'], net['out']))
     net.sortModules()
     self.equivalence_feed_forward(net, net.convertToFastNetwork())
def mlpClassifier(X,y,train_indices, test_indices, mom=0.1,weightd=0.01, epo=5):
    X_train, y_train, X_test, y_test = X[train_indices],y[train_indices], X[test_indices], y[test_indices]

    #Converting the data into a dataset which is easily understood by PyBrain. 
    tstdata = ClassificationDataSet(X.shape[1],target=1,nb_classes=8)
    trndata = ClassificationDataSet(X.shape[1],target=1,nb_classes=8)
 #   print "shape of X_train & y_train: " + str(X_train.shape) + str(y_train.shape)
    for i in range(y_train.shape[0]):
        trndata.addSample(X_train[i,:], y_train[i])
    for i in range(y_test.shape[0]):
        tstdata.addSample(X_test[i,:], y_test[i])
    trndata._convertToOneOfMany()
    tstdata._convertToOneOfMany()

    #printing the specs of data
#    print "Number of training patterns: ", len(trndata)
#    print "Input and output dimensions: ", trndata.indim, trndata.outdim
#    print "First sample (input, target, class):"
#    print trndata['input'][0], trndata['target'][0], trndata['class'][0]

    #The neural-network used
 #   print "Building Network..."
    #input layer, hidden layer of size 10(very small), output layer
    ANNc = FeedForwardNetwork()
    inLayer = LinearLayer(trndata.indim, name="ip")
    hLayer1 = TanhLayer(100, name = "h1")
    hLayer2 = SigmoidLayer(100, name = "h2")
    outLayer = SoftmaxLayer(trndata.outdim, name = "op")

    ANNc.addInputModule(inLayer)
    ANNc.addModule(hLayer1)
    ANNc.addModule(hLayer2)
    ANNc.addOutputModule(outLayer)

    ip_to_h1 = FullConnection(inLayer, hLayer1, name = "ip->h1")
    h1_to_h2 = FullConnection(hLayer1, hLayer2, name = "h1->h2")
    h2_to_op = FullConnection(hLayer2, outLayer, name = "h2->op")

    ANNc.addConnection(ip_to_h1)
    ANNc.addConnection(h1_to_h2)
    ANNc.addConnection(h2_to_op)
    ANNc.sortModules()

#    print "Done. Training the network."

    #The trainer used, in our case Back-propagation trainer
    trainer = BackpropTrainer( ANNc, dataset=trndata, momentum=mom, verbose=True, weightdecay=weightd)
    trainer.trainEpochs( epo )

    #The error
    trnresult = percentError( trainer.testOnClassData(dataset=trndata), trndata['class'] )
    tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] )
 #   print "Done."
    return ANNc, trainer.totalepochs, (100 - trnresult), (100 - tstresult) 
Exemplo n.º 31
0
net = FeedForwardNetwork()
inl = LinearLayer(2)
hidl = SigmoidLayer(2)
outl = LinearLayer(1)
b = BiasUnit()

#6.7
#Create connections
in_to_h = FullConnection(inl, hidl)
h_to_out = FullConnection(hidl, outl)
bias_to_h = FullConnection(b, hidl)
bias_to_out = FullConnection(b, outl)

#Add modules to net
net.addInputModule(inl)
net.addModule(hidl)
net.addModule(b)
net.addOutputModule(outl)

#Add connections to net and sort
net.addConnection(in_to_h)
net.addConnection(h_to_out)
net.addConnection(bias_to_h)
net.addConnection(bias_to_out)
net.sortModules()

#6.8
#input data
d = [(0, 0), (0, 1), (1, 0), (1, 1)]

#target class
Exemplo n.º 32
0
def test_neural_nets(ds):

	def plot_errors(x, train_err, test_err):
		plt.plot(x, train_err, label='Training error')
		plt.xlabel('Epochs')
		plt.ylabel('Error')
		plt.title('Training error using backpropagation')
		plt.legend()
		plt.show()

	input_size = len(ds.train.x[0]) # no. of attributes
	target_size = 1
	hidden_size = 5
	iterations = 1000

	n = FeedForwardNetwork()
	in_layer = LinearLayer(34)
	hidden_layer = [SigmoidLayer(20), SigmoidLayer(20), SigmoidLayer(20)]
	out_layer = LinearLayer(1)

	n.addInputModule(in_layer)
	for layer in hidden_layer:
		n.addModule(layer)
	n.addOutputModule(out_layer)
	in_to_hidden = FullConnection(in_layer, hidden_layer[0])
	h1 = FullConnection(hidden_layer[0], hidden_layer[1])
	h2 = FullConnection(hidden_layer[1], hidden_layer[2])
	hidden_to_out = FullConnection(hidden_layer[2], out_layer)

	n.addConnection(in_to_hidden)
	n.addConnection(h1)
	n.addConnection(h2)
	n.addConnection(hidden_to_out)

	n.sortModules()

	print n

	train_nnds = SupervisedDataSet(input_size, target_size)
	train_nnds.setField('input', ds.train.x)
	one_train_reshaped = np.array(ds.train.y).reshape(-1,1) 
	train_nnds.setField('target', one_train_reshaped)

	trainer = BackpropTrainer( n, train_nnds )
	epochs, train_acc, test_acc = [], [], []
	
	for i in xrange(iterations):
		trainer.train()
		train_pred_y = []
		# Compute percent training error
		for row in ds.train.x:
			p = int( round( n.activate(row)[0] ) )
			if p >= 1: p = 1 
			else: p = 0 # sometimes rounding takes us to 2 or -1
			train_pred_y.append(p)
		train_error = percentError(train_pred_y, ds.train.y)

		if i%25 == 0 or i==iterations-1:
			epochs.append(i)
			train_acc.append(train_error)
			print "Train error", train_error
	
	plot_errors(epochs, train_acc, test_acc)
Exemplo n.º 33
0
def trainet2(data, nhide=8, nhide1=8, epo=10, wd=.1, fn=''):

    alldata = data
    tstdata_temp, trndata_temp = alldata.splitWithProportion(0.5)

    tstdata = ClassificationDataSet(alldata.indim, nb_classes=alldata.nClasses)
    for n in range(0, tstdata_temp.getLength()):
        tstdata.addSample(
            tstdata_temp.getSample(n)[0],
            tstdata_temp.getSample(n)[1])

    trndata = ClassificationDataSet(alldata.indim, nb_classes=alldata.nClasses)
    for n in range(0, trndata_temp.getLength()):
        trndata.addSample(
            trndata_temp.getSample(n)[0],
            trndata_temp.getSample(n)[1])

    tstdata._convertToOneOfMany()
    trndata._convertToOneOfMany()

    net = FeedForwardNetwork()
    inLayer = LinearLayer(trndata.indim)
    hiddenLayer = TanhLayer(nhide)
    hiddenLayer1 = TanhLayer(nhide1)
    outLayer = LinearLayer(trndata.outdim)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_hidden = FullConnection(hiddenLayer, hiddenLayer1)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_hidden)
    net.addConnection(hidden_to_out)

    net.sortModules()
    net.bias = True

    trainer = BackpropTrainer(net,
                              dataset=trndata,
                              verbose=True,
                              weightdecay=wd,
                              momentum=0.1)
    edata = []
    msedata = []
    for i in range(epo):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(), trndata['class'])
        tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
                                 tstdata['class'])
        tod = trainer.testOnData(verbose=False)
        print("epoch: %4d" % trainer.totalepochs,
              "  train error: %5.2f%%" % trnresult,
              "  test error: %5.2f%%" % tstresult, "  layers: ", nhide1,
              "  N_tourn: ", alldata.indim / 2)
        edata.append([trnresult, tstresult])
        msedata.append([i, tod])
    with open(fn + ".dta", 'w') as fp:
        json.dump(edata, fp)
    with open(fn + ".mse", 'w') as fp:
        json.dump(msedata, fp)
    return net
Exemplo n.º 34
0
    def getFitness(self, smMatrix):  #Store the sm state into memory
        fit = 0

        #Fitness function (3) *************************************************************
        #Record the sm data for this loop and consider its properties
        #print(smMatrix)
        #print(len(smMatrix))

        #net = buildNetwork(3,10,1, bias = True)
        net = FeedForwardNetwork()
        inp = LinearLayer(3)
        h1 = SigmoidLayer(10)
        outp = LinearLayer(1)
        # add modules
        net.addOutputModule(outp)
        net.addInputModule(inp)
        net.addModule(h1)
        # create connections
        iToH = FullConnection(inp, h1)
        hToO = FullConnection(h1, outp)
        net.addConnection(iToH)
        net.addConnection(hToO)
        # finish up
        net.sortModules()

        ds = SupervisedDataSet(3, 1)

        trainSet = []
        for index_x, x in enumerate(smMatrix):
            if index_x > 0 and index_x < len(smMatrix) - 1:
                #trainSet.append( [smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2], smMatrix[index_x+1][3] ] )
                ds.addSample(([
                    smMatrix[index_x][0], smMatrix[index_x][1],
                    smMatrix[index_x][2]
                ]), (smMatrix[index_x + 1][3]))
        #print(trainSet)
        #print(ds)
        trainer = BackpropTrainer(net, ds, weightdecay=0.01)
        err = trainer.trainUntilConvergence(maxEpochs=50)
        #Visualize the network performance and structure.

        #nn = NNregression(ds, epoinc = 10)
        #nn.setupNN()
        #nn.runTraining()
        #self.pesos_conexiones(net)
        #print("Input to hidden", iToH.params)
        #print("H to output", hToO.params)
        #print(iToH.params)
        n1 = iToH.params
        n1a = zip(*[iter(n1)] * 3)
        n2 = hToO.params

        sums = []
        for x in n1a:
            sumr = 0
            for y in x:
                sumr = sumr + abs(y)
            sums.append(sumr)

        sums2 = []
        for x in n2:
            sums2.append(abs(x))

        #Choose those neurons that have inputs below a threshold value
        a1 = [index for index, value in enumerate(sums) if value > 2.0]
        a2 = [index for index, value in enumerate(sums2) if value > 0.5]
        inter = len(set(a1).intersection(set(a2)))
        fit = inter
        #fit = sum(n1a[:]) + sum(n2[:])
        print fit
        return fit
Exemplo n.º 35
0
    def BuildNN(self):
        """ 
                This function builds a FeedForwardNetwork object based on 
                the data that was used to create the NNBuilder object
            """
        nn = FeedForwardNetwork()

        # Set up the Layers
        inputLayer = LinearLayer(len(self.getInput()))
        outputLayer = SigmoidLayer(self.OUTPUT_NODES)

        # Add to NN
        nn.addInputModule(inputLayer)
        nn.addOutputModule(outputLayer)

        # Handle multiple hidden layers, add to NN
        topology = self.getHiddenLayers()
        hiddenLayers = []

        for i in range(0, len(topology)):
            size = int(topology[i])
            hlayer = SigmoidLayer(size)
            nn.addModule(hlayer)
            hiddenLayers.append(hlayer)

        # Get the bias for each hidden layer
        biasList = []
        for i in range(0, len(topology)):
            bias = BiasUnit(name="bias" + str(i))
            nn.addModule(bias)
            biasList.append(bias)

        # Manually connect input layer to first hidden,
        # and output layer to last hidden. Then connect all other
        # hidden layers
        input2hidden = FullConnection(inputLayer, hiddenLayers[0])
        hidden2output = FullConnection(hiddenLayers[-1], outputLayer)

        # If there was more than 1 hidden layer connect them together
        hiddenConList = []
        biasConList = []
        if len(topology) > 1:
            for i in range(0, len(topology) - 1):

                # Connect current layer to next layer
                connection = FullConnection(hiddenLayers[i],
                                            hiddenLayers[i + 1])
                hiddenConList.append(connection)

                # Make connection for bias
                biasConList.append(FullConnection(biasList[i],
                                                  hiddenLayers[i]))

        # Since we only looped to  < len(topology) - 1, have to get the last layer
        last = len(topology) - 1
        biasConList.append(FullConnection(biasList[last], hiddenLayers[last]))

        # Add connections to the NN
        nn.addConnection(input2hidden)
        for i in hiddenConList:
            nn.addConnection(i)
        for i in biasConList:
            nn.addConnection(i)
        nn.addConnection(hidden2output)

        # Not sure what this does but need to call it
        nn.sortModules()

        self.nn = nn
        return nn
Exemplo n.º 36
0
    out_vec = np.concatenate((out_vec, [inp[out, :]]), axis=0)
inp_vec = np.concatenate((inp1_vec, inp2_vec), axis=1)

#building the dataset
dataset = SupervisedDataSet(2 * num_words, num_words)
for i in range(len(sorted_list) + 1):
    dataset.addSample(inp_vec[i, :], out_vec[i, :])
tstdata, trndata = dataset.splitWithProportion(0.25)

#building the network
net = FeedForwardNetwork()
input_layer = LinearLayer(2 * num_words, name='input_layer')
hidden_layer = TanhLayer(num_words, name='hidden')
output_layer = SigmoidLayer(num_words, name='output_layer')
net.addInputModule(input_layer)
net.addModule(hidden_layer)
net.addOutputModule(output_layer)
net.addConnection(
    FullConnection(input_layer, hidden_layer, name='in_to_hidden'))
net.addConnection(
    FullConnection(hidden_layer, output_layer, name='hidden_to_out'))
net.sortModules()

#backpropagation
trainer = BackpropTrainer(net,
                          dataset=trndata,
                          momentum=0.1,
                          verbose=True,
                          weightdecay=0.01)
#error checking part
for i in range(10):
Exemplo n.º 37
0
# Camada de entrada LinearLayer
camadaEntrada = LinearLayer(2)

# Criação da camada oculta com 3 neuronios
camadaOculta = SigmoidLayer(3)

# Criação da saida com apenas um neuronio
camadaSaida = SigmoidLayer(1)

# Criação das unidades de Bias
bias1 = BiasUnit()
bias2 = BiasUnit()

# construindo a rede neural
rede.addModule(camadaEntrada)
rede.addModule(camadaOculta)
rede.addModule(camadaSaida)
rede.addModule(bias1)
rede.addModule(bias2)

# Ligação entre entrada e camada oculta
# FullConnection -> ligação de um neuronio
# com todos os outros da outra camada
entradaOculta = FullConnection(camadaEntrada, camadaOculta)
ocultaSaida = FullConnection(camadaOculta, camadaSaida)
biasOCulta = FullConnection(bias1, camadaOculta)
biasSaida = FullConnection(bias2, camadaSaida)

# Construir a rede neural
rede.sortModules()
Exemplo n.º 38
0
class BMTrainer:
    #隐藏层神经元节点数:
    hiddendim = 3
    #读取训练数据源文件:
    srcname = 'trainer.xlsx'
    #存储训练数据文件:
    destname = 'buildBMTrainer.xml'
    #源文件中结果列为几列(输出层节点数)
    rescol = 1
    #是否显示计算中间迭代过程
    verbose = True
    #总体容差
    finalerror = 0
    __fnn = None
    __sy = None

    def __init__(self,
                 _hiddendim=3,
                 _srcnmae='trainer.xlsx',
                 _destname='buildBMTrainer.xml'):
        self.hiddendim = _hiddendim
        self.srcname = _srcnmae
        self.destname = _destname

    def readexcel(self):
        workbook = xlrd.open_workbook(self.srcname)
        sheet1 = workbook.sheet_by_index(0)
        if (self.verbose):
            print('训练集共:' + str(sheet1.nrows) + '行,' + str(sheet1.ncols) +
                  '列;其中结果为:' + str(self.rescol) + '列')
        # data = np.empty()
        # target = np.empty()
        # for i, d in enumerate(sheet1):
        #     data[i] = np.asarray(d[:-1], dtype=np.float64)i
        #     target[i] = np.asarray(d[-1], dtype=np.float64)
        # test = [[0 for i in range(sheet1.nrows)] for j in range(sheet1.ncols)]
        if (sheet1.nrows > 1 and sheet1.ncols > self.rescol):
            x = np.zeros((sheet1.nrows - 1, sheet1.ncols - self.rescol),
                         dtype=np.float)
            y = np.zeros((sheet1.nrows - 1, self.rescol), dtype=np.float)
            for i in range(sheet1.nrows - 1):
                for j in range(sheet1.ncols):
                    if (j < sheet1.ncols - self.rescol):
                        # print sheet1.cell(i + 1, j).value
                        x[i][j] = sheet1.cell(i + 1, j).value
                    else:
                        y[i][j - sheet1.ncols + self.rescol] = sheet1.cell(
                            i + 1, j).value
        return x, y

    def buildBMTrainer(self):
        # print np.random.rand(1)
        # print np.random.rand(1)
        x, y = self.readexcel()
        # 从sklearn数据集中读取用来模拟的数据
        # boston = load_boston()
        # x = boston.data
        # y = boston.target.reshape(-1, 1)
        # for i in range(0,x.shape[0]):
        #     for j in range(0,x.shape[1]):
        #         print (x[i][j])
        # print x.shape
        # sys.exit();
        # for x in x:
        #     print x
        # print x
        # print y
        # sys.exit(0)
        # 直接采用不打乱的方式进行7:3分离训练集和测试集
        # per = int(len(x) * 0.7)
        per = int(len(x))
        # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
        sx = MinMaxScaler()
        sy = MinMaxScaler()
        xTrain = x[:per]
        xTrain = sx.fit_transform(xTrain)
        yTrain = y[:per]
        # print yTrain
        yTrain = sy.fit_transform(yTrain)
        # print yTrain
        # print sy.inverse_transform(yTrain)
        # sys.exit()
        # xTest = x[per:]
        # xTest = sx.transform(xTest)
        # yTest = y[per:]
        # yTest = sy.transform(yTest)
        # print xTest.shape
        # for x in xTest:
        #     print x
        # sys.exit()

        # 初始化前馈神经网络
        self.__fnn = FeedForwardNetwork()

        # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
        inLayer = LinearLayer(x.shape[1], 'inLayer')

        # hiddenLayer = TanhLayer(3, 'hiddenLayer')
        hiddenLayer = TanhLayer(self.hiddendim, 'hiddenLayer')
        outLayer = LinearLayer(self.rescol, 'outLayer')
        # hiddenLayer1 = TanhLayer(5, 'hiddenLayer1')
        # outLayer = LinearLayer(1, 'outLayer')

        # 将构建的输出层、隐藏层、输出层加入到fnn中
        self.__fnn.addInputModule(inLayer)
        self.__fnn.addModule(hiddenLayer)
        # fnn.addModule(hiddenLayer1)
        self.__fnn.addOutputModule(outLayer)

        # 对各层之间建立完全连接
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        # in_to_hidden.setName('in_to_hidden')
        # in_to_hidden._setParameters([0 for i in range(30)])
        hidden_to_out = FullConnection(hiddenLayer, outLayer)
        # hidden_to_out.setName('hidden_to_out')
        # hidden_to_out._setParameters([1 for i in range(3)])
        # hidden_to_hidden = FullConnection(hiddenLayer,hiddenLayer1 )
        # hidden_to_out = FullConnection(hiddenLayer1, outLayer)

        # 与fnn建立连接
        self.__fnn.addConnection(in_to_hidden)
        # fnn.addConnection(hidden_to_hidden)
        self.__fnn.addConnection(hidden_to_out)
        self.__fnn.sortModules()

        # 初始化监督数据集
        DS = SupervisedDataSet(x.shape[1], self.rescol)

        # 将训练的数据及标签加入到DS中
        # for i in range(len(xTrain)):
        #     DS.addSample(xTrain[i], yTrain[i])
        for i in range(len(xTrain)):
            DS.addSample(xTrain[i], yTrain[i])

        # 采用BP进行训练,训练至收敛,最大训练次数为1000
        trainer = BackpropTrainer(self.__fnn,
                                  DS,
                                  learningrate=0.001,
                                  verbose=self.verbose)
        trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000)
        self.finalError = trainingErrors[0][-2]
        if (self.verbose):
            print('最后总体容差:', self.finalError)
        self.__sy = sy
        # print "1"
        # print fnn.activate(x)
        for i in range(len(xTrain)):
            print(
                sy.inverse_transform(
                    self.__fnn.activate(xTrain[i]).reshape(-1, 1)))
        # sys.exit()
        # print sy.inverse_transform(fnn.activate(x))[0]
        # 在测试集上对其效果做验证
        # values = []
        # sy.inverse_transform()
        # for x in xTest:
        #     values.append(sy.inverse_transform(fnn.activate(x))[0])
        # for x in xTest:
        #     x1 = fnn.activate(x)
        #     x2 = sy.inverse_transform(x1.reshape(-1, 1))
        #     values.append(x2[0])
        # print "2"
        # 计算RMSE (Root Mean Squared Error)均方差
        # totalsum = sum(map(lambda x: x ** 0.5, map(lambda x, y: pow(x - y, 2), boston.target[per:], values))) / float(len(xTest))
        # print totalsum
        # print "3"
        # 将训练数据进行保存

    def saveresult(self):
        NetworkWriter.writeToFile(self.__fnn, self.destname)
        joblib.dump(self.__sy, 'sy.pkl', compress=3)
Exemplo n.º 39
0
class NeuralNet(regression):
    
    
    '''
    #deprecated
    def __init__(self, inputDim, outputDim):
        \'''
	Initializes class parameters
	
	Input:   

        \'''
        regression.__init__(self,inputDim, outputDim)
        #self.net = buildNetwork(inputDim, outputDim)
        self.net = FeedForwardNetwork()
        inLayer = LinearLayer(inputDim)
        hiddenLayer1 = TanhLayer(10)
        hiddenLayer2 = TanhLayer(10)
        outLayer = SigmoidLayer(outputDim)
        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer1)
        self.net.addModule(hiddenLayer2)
        self.net.addOutputModule(outLayer)

        in_to_hidden1 = FullConnection(inLayer, hiddenLayer1)
        hidden1_to_hidden2=FullConnection(hiddenLayer1,  hiddenLayer2)
        hidden2_to_out = FullConnection(hiddenLayer2, outLayer)
        self.net.addConnection(in_to_hidden1)
        self.net.addConnection(hidden1_to_hidden2)
        self.net.addConnection(hidden2_to_out)

        self.net.sortModules()
        self.shape=self.net.params.shape
        self.ds = SupervisedDataSet(self.inputDimension, self.outputDimension)
    
    '''
 
    def __init__(self, rs):
        regression.__init__(self,rs)
        self.learningRate=rs.learningRate
        self.momentum=rs.momentum
        
        self.net = FeedForwardNetwork()
        
        #input Layer
        inLayer = layersDict[rs.inputLayer](rs.inputDim)
        self.net.addInputModule(inLayer)
        
        #outputLayer
        outLayer = layersDict[rs.outputLayer](rs.outputDim)
        self.net.addOutputModule(outLayer)
        
        #no hidden Layer
        if(len(rs.hiddenLayers)==0):
            #connection between input and output Layer
            in_to_out = FullConnection(inLayer, outLayer)
            self.net.addConnection(in_to_out)
            if(rs.bias==True):
                bias= BiasUnit('bias')
                self.net.addModule(bias)
                bias_to_out = FullConnection(bias, outLayer)
                self.net.addConnection(bias_to_out)
        else :
            #hidden Layers
            hiddenLayers=[]
            for layer in rs.hiddenLayers:
                tmp=layersDict[layer[0]](layer[1])
                self.net.addModule(tmp)
                hiddenLayers.append(tmp)
             
            #connection between input and first hidden Layer  
            in_to_hidden=FullConnection(inLayer,hiddenLayers[0])
            self.net.addConnection(in_to_hidden)
            
            #connection between hidden Layers
            i=0
            for i in range(1,len(hiddenLayers)):
                hidden_to_hidden=FullConnection(hiddenLayers[i-1],hiddenLayers[i])
                self.net.addConnection(hidden_to_hidden)
            
            #connection between last hidden Layer and output Layer   
            hidden_to_out= FullConnection(hiddenLayers[i],outLayer)
            self.net.addConnection(hidden_to_out)     
            
            if(rs.bias==True):
                bias=BiasUnit('bias')
                self.net.addModule(bias)
                for layer in hiddenLayers :
                    bias_to_hidden = FullConnection(bias, layer)
                    self.net.addConnection(bias_to_hidden)
                
                bias_to_out = FullConnection(bias, outLayer)
                self.net.addConnection(bias_to_out)
                

        
        #initilisation of weight
        self.net.sortModules()
        self.shape=self.net.params.shape
        self.net._setParameters(np.random.normal(0.0,0.1,self.shape))
            
        
        self.ds = SupervisedDataSet(self.inputDimension, self.outputDimension)
        #print(self.net)
            
    def setTheta(self, theta):
        self.net._setParameters(theta.reshape(self.shape))

    def getTheta(self):
        return self.net.params

    def load(self,thetaFile):
        '''
        load wheight of the neural network from the thetafile
        '''
        self.net._setParameters(np.loadtxt(thetaFile+".theta"))
        #print ("theta LOAD : ", self.net.params)
        return self.net.params

    def getTrainingData(self, inputData, outputData):
        '''
        Verifies the validity of the given input and output data
        Data should be organized by columns
        
        Input:      -inputdata, numpy N-D array
                    -outputData, numpy N-D array
        '''
        regression.getTrainingData(self,inputData, outputData)

        for i in range(self.numberOfSamples):
            self.ds.addSample(inputData[i],outputData[i])

    def train(self):
        '''
        Perform batch regression
        '''
        trainer = BackpropTrainer(self.net, self.ds, learningrate=self.learningRate, momentum=self.momentum)

        minError=10
        while(True):
            error=trainer.train()
            print(self.meanSquareError())
            if(error<minError):
                minError=error
                self.saveTheta(self.rs.path+self.rs.thetaFile+".theta")

        
        #trainer.trainUntilConvergence(maxEpochs=10, verbose=True)
        #trainer.trainEpochs(10)
    def computeOutput(self, inputVal):
        '''
        Returns the output depending on the given input and theta
        
        Input:      -inputVal: numpy N-D array
                    -theta: numpy N-D array
        
        Output:     -fa_out: numpy N-D array, output approximated
        '''
        assert(inputVal.shape[0]==self.inputDimension), "NeuralNet: Bad input format : " + str(inputVal.shape[0])+"/"+str(self.inputDimension)
        output=self.net.activate(inputVal)
        #print(output)
        return output

    def meanSquareError(self):
        output=self.net.activateOnDataset(self.ds)
        return np.mean((self.outputData - output)**2)
Exemplo n.º 40
0
        for seq in item_seqs:
            for i in xrange(len(seq) - 1):
                inp = seq[:i] + [0] * (6 - i)
                print inp, seq[i]
                ds.addSample(inp, seq[i])

        net = FeedForwardNetwork()
        inp = LinearLayer(6)
        h1 = SigmoidLayer(6)
        outp = LinearLayer(1)

        # add modules
        net.addOutputModule(outp)
        net.addInputModule(inp)
        net.addModule(h1)

        # create connections
        net.addConnection(FullConnection(inp, h1))
        net.addConnection(FullConnection(h1, outp))

        # finish up
        net.sortModules()

        # initialize the backprop trainer and train
        trainer = BackpropTrainer(net, ds)
        trainer.trainOnDataset(ds, 1000)
        trainer.testOnData(verbose=False)

        print net.activate((0, 0, 0, 0, 0, 0))
        import pdb
Exemplo n.º 41
0
#初始化前馈神经网络
fnn = FeedForwardNetwork()

#构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
inLayer = LinearLayer(x.shape[1], 'inLayer')

# hiddenLayer = TanhLayer(3, 'hiddenLayer')
hiddenLayer = TanhLayer(12, 'hiddenLayer')
outLayer = LinearLayer(1, 'outLayer')
# hiddenLayer1 = TanhLayer(5, 'hiddenLayer1')
# outLayer = LinearLayer(1, 'outLayer')

#将构建的输出层、隐藏层、输出层加入到fnn中
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer)
# fnn.addModule(hiddenLayer1)
fnn.addOutputModule(outLayer)

#对各层之间建立完全连接
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
# hidden_to_hidden = FullConnection(hiddenLayer,hiddenLayer1 )
# hidden_to_out = FullConnection(hiddenLayer1, outLayer)

#与fnn建立连接
fnn.addConnection(in_to_hidden)
# fnn.addConnection(hidden_to_hidden)
fnn.addConnection(hidden_to_out)
fnn.sortModules()
Exemplo n.º 42
0
class MP_Pybrain(Regression):
    """
    Fully connected multilayer perceptron using pybrain library.
    """
    def __init__(self, train_data, hyper,  n_targets=None, label_targets=None):
        """
    ------------

    train_data: pandas DataFrame
                Contains columns for features and for target variables. The names of the target variables ends
                with the suffix "_tau"
    hyper:      dictionary
                It contains the hyperparameters necessary to run all the functionalities of the model.
                 They are the following:
                "structure" is a list of integers determining the number of neurons in each hidden layer
                "epochs" an integer specifying the maximum number of epochs to run during every training session
                "learning_rate" a float giving the learning rate of the gradient descend
                "momentum" a float giving the value of the momentum for the algorithm
                "batch" a bool. If True the method performs full batch learning, i.e. updates of the weights is done
                using all the instances of the training set. Else, normal online method is performed
                Other parameters regarding cross validation are explained in the base class

        """
        Regression.__init__(self, train_data, hyper, n_targets=n_targets, label_targets=label_targets)

        self.N = FeedForwardNetwork()
        self.structure = [self.n_feature] + hyper['structure'] + [self.n_target]

        self._build_net(self.structure)
        self.res_params = [self.N.params[i] for i in range(len(self.N.params))]

        self.train_fraction = hyper['train_fraction']
        self.seed = hyper['seed']
        self.epochs = hyper['epochs']
        self.learning_rate = hyper['learning_rate']
        self.momentum = hyper['momentum']
        self.batch = bool(hyper['batch'])

    def learn(self, train_data = None, seed = None):
        """
    Performs single run training, and it is designed to be called after network instantiation.

    ----------

    train_data: pandas Dataframe
            It needs to contain datetime objects on index, and both features and target variables.
            The target variables need to end with the suffix "_tau". If None the self.train_set
            variable passed at the moment of instantiation will be used.

    Returns: tuple(MP_Pybrain object,float)
            It returns the model with the lowest training error, and the value of the training error.

        """
        if train_data is not None:
            self.train_set = train_data
            self.randomize()
        ds_train, ds_valid = self._build_dataset(self.train_set)
        trainer = BackpropTrainer(self.N, ds_train, learningrate=self.learning_rate,
                                  momentum=self.momentum,batchlearning=self.batch)
        trainer.train()
        e_train = [self._error(ds_train)]
        e_valid = [self._error(ds_valid)]
        final_model = copy(self)
        fin_error_train = e_train[0]
        fin_error_valid = e_valid[0]
        for i in range(1,self.epochs):
            if i%10 == 0:
                print "epoch: ", i
            trainer.train()
            e_train.append(self._error(ds_train))
            e_valid.append(self._error(ds_valid))
            if e_train[-1] < fin_error_train:
                final_model = deepcopy(self)
                fin_error_train = e_train[-1]
                fin_error_valid = e_valid[-1]
        return final_model, fin_error_train, fin_error_valid

    def xvalidate(self, train_data = None, folds = None):
        """
    Performs n-folds cross-validation on the a data set. The method is designed to reset the network
    to an initial configuration (decided at the moment of instantiation) every time a new training is
    started. The purpose is to make model comparison and returning an average error given a specific
    data set and collection of hyper-parameters. At the moment training and validation sets are chosen
    based on the input sequence of data, i.e. there is no random shuffling of the instances of the data set.

    ----------

    train_data: pandas Dataframe
            It needs to contain datetime objects on index, and both features and target variables.
            The target variables need to end with the suffix "_tau". If None the self.train_set
            variable passed at the moment of instantiation will be used.

    folds: integer
            The number of training/validation partition used in the method. If None it needs to be
            passed in the constructor when instantiating the object for the first time. If not passed
            ever, the method cannot work and an exception needs to be thrown.
    Returns: list, float, float
            A list of all the models trained for each fold, the mean train error and the cross-validation error,
            i.e. the average of NRMSE for all the training/validation partitions created.

        """
        if train_data is not None:
            self.train_set = train_data
        if folds is not None:
            self.cv_folds = folds
        train, validation = self._build_folds(random=False)
        models = []
        train_error = []
        cv_error = []
        for i in range(self.cv_folds):
            print "Cross-validation Fold: ", i+1
            self.randomize()
            model, error, _ = self.learn(train_data=train[i])
            models.append(deepcopy(model))
            train_error.append(error)
            predicted, actual = self.test(validation[i])
            e = 0
            for k in predicted.keys():
                e += errors.RMSE(np.array(actual[k]),np.array(predicted[k]))
            cv_error.append(e)
        return models, np.mean(train_error), np.mean(cv_error)

    def test(self, data):
        """
    Tests the trained model on data. The usage is two fold: 1) Internal usage to calculate errors on validation
    sets. 2) For external usage when a test set is provided. Both the validation and test set need to contain target
    columns. For prediction, where target variables are unknown, please refer to the function self.predict below.
    ----------

    data:       pandas Dataframe
                A pandas dataframe. A deepcopy of it will be made and only the feature columns will be considered.
                Due to the functionality of the pyBrain library we require (at the moment) that the order of the
                colums is the same as the one of the training set used for training.

    Returns:    pandas Dataframe
                A Dataframe with columns containing the predictions of the different target variables and same index as
                the input DataFrame

        """
        data_x = data[self.features]
        data_y = data[self.targets]
        predicted = np.array([])
        for i in range(len(data_x)):
            predicted = np.append(predicted, self.N.activate(data_x.values[i]))
        return pd.DataFrame(predicted, index=data.index, columns=self.targets), data_y

    def predict(self, data):
        """
    It returns target variables given a set of features, using the model trained and saved.
    ---------

    data: pandas Dataframe
         It must contain all the feature columns used for training of the model

    Returns: pandas Dataframe
         It contains the prediction on the target variables. The name of the variables is the same as the one
         provided at the moment of instantiation of object.

        """
        data_x = data[self.features]
        predicted = np.array([])
        for i in range(len(data_x)):
            predicted = np.append(predicted, self.N.activate(data_x.values[i]))
        return pd.DataFrame(predicted, index=data_x.index, columns=self.targets)

    def randomize(self):
        self.N.randomize()
        pass


    ### Private functions ###
    def _error(self, ds):
        """
    Calculates the RMSE over an input dataset, given the current state of the network.

    ds: Supervised dataset pybrain style

    Returns: float
        The total error between prediction and actual values.

        """
        predicted = np.array([list(self.N.activate(x)) for x in ds['input']]).transpose()
        actual = np.array([list(x) for x in ds['target']]).transpose()
        total_error = [errors.RMSE(np.array(actual[i]),np.array(predicted[i])) for i in range(len(actual))]
        return sum(total_error)

    def _build_net(self,s):
        layers = [LinearLayer(s[0])]
        self.N.addInputModule(layers[0])
        for i in range(1,len(s)-1):
            layers.append(SigmoidLayer(s[i]))
            self.N.addModule(layers[i])
        layers.append(SigmoidLayer(s[-1]))
        self.N.addOutputModule(layers[-1])
        self._build_connections(layers)

    def _build_connections(self, l):
        for i,j in zip(l,l[1:]):
            a = FullConnection(i,j)
            self.N.addConnection(a)
        self.N.sortModules()

    def _build_dataset(self, data):
        """
    Given a input training Dataframe with features and targets it returns the formatted training and validation
    datasets for pybrain usage, and randomly shuffled according to the self.seed given at instantiation.

    ----------

    data: pandas Dataframe
        It must contains both features and target columns

    Returns: (pybrain dataset, pybrain dataset)
        The first is the training dataset and the second is the validation dataset

        """
        np.random.seed(self.seed)
        permutation = np.random.permutation(np.arange(len(data)))
        sep = int(self.train_fraction * len(data))
        x = data[self.features]
        y = data[self.targets]
        ds_train = SupervisedDataSet(self.n_feature, self.n_target)
        ds_valid = SupervisedDataSet(self.n_feature, self.n_target)
        for i in permutation[:sep]:
            ds_train.addSample(x.values[i], y.values[i])
        for i in permutation[sep:]:
            ds_valid.addSample(x.values[i], y.values[i])
        return ds_train, ds_valid

if (__name__) == '__main__':
    trainingData = getTrainingData()
    network = FeedForwardNetwork()

    inputLayer = LinearLayer(1)
    hiddenLayer = SigmoidLayer(3)
    outputLayer = LinearLayer(1)

    inToMid = FConnect(inputLayer, hiddenLayer)
    midToOut = FConnect(hiddenLayer, outputLayer)
    data = DataSet(1, 1)

    network.addInputModule(inputLayer)
    network.addModule(hiddenLayer)
    network.addOutputModule(outputLayer)

    network.addConnection(inToMid)
    network.addConnection(midToOut)
    network.sortModules()

    for item in trainingData:
        data.addSample((item[0], ), (item[1], ))

    trainer = BackpropTrainer(network,
                              dataset=data,
                              momentum=0.1,
                              verbose=True,
                              weightdecay=0.01)
    trainer.train()
Exemplo n.º 44
0
class ANN:
    def __init__(self):
        self.name = "ANN"

    def getParams(self):
        return self.in_to_hidden.params, self.hidden_to_out.params

    def create_network(self, nFeatures, hidden1Size=20, nClasses=1):
        # create network object
        self.ffn = FeedForwardNetwork()

        # create layer objects
        inLayer = LinearLayer(nFeatures, name="input")
        hiddenLayer = SigmoidLayer(hidden1Size, name="hidden1")
        #hiddenLayer2 = SigmoidLayer(hidden2Size, name="hidden2")
        outLayer = LinearLayer(nClasses, name="output")

        # add layers to feed forward network
        self.ffn.addInputModule(inLayer)
        self.ffn.addModule(hiddenLayer)
        #self.ffn.addModule(hiddenLayer2)
        self.ffn.addOutputModule(outLayer)

        # add bias unit to layers
        self.ffn.addModule(BiasUnit(name='bias'))

        # establish connections between layers
        self.in_to_hidden = FullConnection(inLayer, hiddenLayer)
        #hidden_to_hidden = FullConnection(hiddenLayer, hiddenLayer2)
        self.hidden_to_out = FullConnection(hiddenLayer, outLayer)

        # print "into hidden: {}".format(len(in_to_hidden.params))
        # print "into out: {}".format(len(hidden_to_out.params))

        # add connections to network
        self.ffn.addConnection(self.in_to_hidden)
        #self.ffn.addConnection(hidden_to_hidden)
        self.ffn.addConnection(self.hidden_to_out)

        # necessary, sort layers into correct/certain order
        self.ffn.sortModules()

        # dataset object
        self.train_ds = SupervisedDataSet(nFeatures, nClasses)
        self.validate_ds = SupervisedDataSet(nFeatures, nClasses)

    # train network
    def train(self, TrainX, TrainY, ValidateX, ValidateY):
        # clear old dataset
        self.train_ds.clear()
        self.validate_ds.clear()

        # add data to dataset object (ds)
        for i in range(TrainX.shape[0]):
            self.train_ds.addSample(TrainX[i], TrainY[i])

        for i in range(ValidateX.shape[0]):
            self.validate_ds.addSample(ValidateX[i], ValidateY[i])

        # randomiz weights
        self.ffn.randomize()

        # Backprop trainer object
        self.trainer = BackpropTrainer(self.ffn,
                                       learningrate=.0775,
                                       momentum=.1)
        try:
            with Timer() as t:
                self.train_errors, self.val_errors \
                    = self.trainer.trainUntilConvergence(trainingData=self.train_ds, \
                                                         validationData=self.validate_ds, \
                                                         maxEpochs=500, \
                                                         continueEpochs=10)

            #return self.train_errors, self.val_errors
        except:
            print "Error occured while training model in ANN."

        #finally:
        #    print("ANN.py - Time to trainUntilConvergence: {:.03f} sec.".format(t.interval))

        return 'ANN'

    # predict depenent variable for dataset
    def predict(self, data):
        # if only make prediction for one sample
        if (len(data.shape) == 1):
            return self.ffn.activate(data)
        else:
            outputs = np.zeros(data.shape[0])
            for i in range(data.shape[0]):
                outputs[i] = self.ffn.activate(data[i])
            return outputs
Exemplo n.º 45
0
# net = buildNetwork(len(t), len(t), 1)

#initialize a feed foward network
net = FeedForwardNetwork()

#create layers for FFN
inLayer = LinearLayer(
    len(t))  #sets up the number of nodes based on 'length' of the loaded image
hiddenLayer = SigmoidLayer(len(t))
outLayer = LinearLayer(
    10)  #you need ten outputs - one for each digit(0,1,2,3 etc)

# add layers to FFN
net.addInputModule(inLayer)
net.addModule(hiddenLayer)
net.addOutputModule(outLayer)

#create connections between the layers
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
#add connections
net.addConnection(in_to_hidden)
net.addConnection(hidden_to_out)

net.sortModules()

print net

digits = load_digits()
X, y = digits.data, digits.target
Exemplo n.º 46
0
def part2():
    '''
    Determine the minimal number of hidden units
    required to train the network successfully
    using multiple hidden layers
    '''
    '''
    # Parameters
    HIDDEN_NODES =          8
    LEARNING_DECAY =        0.9999    # Set in range [0.9, 1]
    LEARNING_RATE =         0.08    # Set in range [0, 1]
    MOMENTUM =              0.0    # Set in range [0, 0.5]
    TRAINING_ITERATIONS =   1000
    BATCH_LEARNING =        False
    VALIDATION_PROPORTION = 0.0
    SPARSE_LENGTH =         16
    '''

    # Parameters
    HIDDEN_NODES = 4
    LEARNING_DECAY = 0.9999  # Set in range [0.9, 1]
    LEARNING_RATE = 0.111  # Set in range [0, 1]
    MOMENTUM = 0.05  # Set in range [0, 0.5]
    TRAINING_ITERATIONS = 5000
    BATCH_LEARNING = False
    VALIDATION_PROPORTION = 0.0
    SPARSE_LENGTH = 16

    # Get the dataset
    dataset = sparse_coding.generateFull(SPARSE_LENGTH)
    validationSet = sparse_coding.generateFull(SPARSE_LENGTH)
    dataset, classes = sparse_coding.toClassificationDataset(dataset)
    inDimension = dataset.indim
    outDimension = dataset.outdim

    print inDimension
    print outDimension

    # Set up the neral network layers
    inLayer = LinearLayer(inDimension, name='input')
    hiddenLayer1 = SigmoidLayer(HIDDEN_NODES, name='hidden1')
    hiddenLayer2 = TanhLayer(HIDDEN_NODES, name='hidden2')
    outLayer = LinearLayer(outDimension, name='output')

    # Set up the connections
    input_to_hidden1 = FullConnection(inLayer, hiddenLayer1, name='in_h1')
    hidden1_to_hidden2 = FullConnection(hiddenLayer1,
                                        hiddenLayer2,
                                        name='h1_h2')
    hidden2_to_output = FullConnection(hiddenLayer2, outLayer, name='h2_out')
    hidden1_to_output = FullConnection(hiddenLayer1, outLayer, name='h2_out')

    # Create the network and add the information
    neuralNet = FeedForwardNetwork()
    neuralNet.addInputModule(inLayer)
    neuralNet.addModule(hiddenLayer1)
    neuralNet.addModule(hiddenLayer2)
    neuralNet.addOutputModule(outLayer)

    neuralNet.addConnection(input_to_hidden1)
    neuralNet.addConnection(hidden1_to_hidden2)
    neuralNet.addConnection(hidden2_to_output)
    neuralNet.addConnection(hidden1_to_output)
    neuralNet.sortModules()

    print neuralNet

    # Train the network
    trainer = BackpropTrainer(neuralNet,
                              dataset,
                              learningrate=LEARNING_RATE,
                              momentum=MOMENTUM,
                              lrdecay=LEARNING_DECAY,
                              batchlearning=BATCH_LEARNING)

    trainingErrors = []
    validationErrors = []

    for i in xrange(TRAINING_ITERATIONS):
        print "Training iteration: ", i

        # Check if VALIDATION_PROPORTION is not 0. This will split the input dataset into
        # VALIDATION_PROPORTION % for Validation Data and
        # (1 - VALIDATION_PROPORTION) % for Training Data
        # e.g. 25% Validation Data and 75% Training Data

        if VALIDATION_PROPORTION == 0.0 or VALIDATION_PROPORTION == 0:
            # Cannot split the data set into Training and Validation Data. Train the
            # Neural Network by standard means. This will not calculate Validatinon Error

            # The result of training is the proportional error for the number of epochs run
            trainingError = trainer.train()
            trainingErrors.append(trainingError)

            # Display the result of training for the iteration
            print "   Training error:    ", trainingError
        else:
            trainingErrors, validationErrors = trainer.trainUntilConvergence(
                validationProportion=VALIDATION_PROPORTION)

    # Create the output path if it doesn't exist
    generated_dir = path.abspath(
        path.join("generated", "Q2Task2-TrainedNN-{}".format(
            strftime("%Y-%m-%d_%H-%M-%S"))))
    if not path.exists(generated_dir):
        makedirs(generated_dir)

    # save parameters
    with open(path.normpath(path.join(generated_dir, "params.txt")), "a") as f:
        f.write("HIDDEN_LAYERS = {}\n".format(HIDDEN_NODES))
        f.write("LEARNING_DECAY = {}\n".format(LEARNING_DECAY))
        f.write("LEARNING_RATE = {}\n".format(LEARNING_RATE))
        f.write("MOMENTUM = {}\n".format(MOMENTUM))
        f.write("TRAINING_ITERATIONS = {}\n".format(TRAINING_ITERATIONS))
        f.write("BATCH_LEARNING = {}\n".format(BATCH_LEARNING))
        f.write("VALIDATION_PROPORTION = {}\n".format(VALIDATION_PROPORTION))

    # Save the Trained Neural Network
    uniqueFileName = path.normpath(path.join(generated_dir, "data.pkl"))
    writeMode = 'wb'  # Write Bytes
    pickle.dump(neuralNet, open(uniqueFileName, writeMode))

    # Plot the results of training
    plot.plot(trainingErrors, 'b')
    plot.ylabel("Training Error")
    plot.xlabel("Training Steps")
    plot.savefig(path.normpath(path.join(generated_dir, "errors.png")))
    plot.show()
    plot.clf()

    from mpl_toolkits.mplot3d import Axes3D
    figure = plot.figure()
    axis = figure.add_subplot(111, projection='3d')
    colors = ['r', 'y', 'g', 'c', 'b', 'k']

    for sample in validationSet:
        classifier = sparse_coding.getClassifier(sample)
        activationResult = neuralNet.activate(sample)
        axis.bar(range(len(sample)),
                 activationResult,
                 classifier,
                 zdir='y',
                 color=colors[:len(sample)])

    plot.savefig(path.normpath(path.join(generated_dir, "activations.png")))
    plot.show()
Exemplo n.º 47
0
net = FeedForwardNetwork()

# Layers
inputLayer = LinearLayer(
    2
)  # The input values wonn't pass by a activation function. 2 is the number of neurons
holdLayer = SigmoidLayer(3)
outputLayer = SigmoidLayer(1)

# Bias
bias1 = BiasUnit()
bias2 = BiasUnit()

# Add the net pieces
net.addModule(inputLayer)
net.addModule(holdLayer)
net.addModule(outputLayer)
net.addModule(bias1)
net.addModule(bias2)

# Connections
holdInput = FullConnection(inputLayer, holdLayer)
holdOutput = FullConnection(holdLayer, outputLayer)
holdBias = FullConnection(bias1, holdLayer)
outputBias = FullConnection(bias2, outputLayer)

# Create a neural net
net.sortModules()

print(net)
class NFQIteration:

    _gamma = 0.9
    _epochs = 500  #1000
    _epochsNN = 100

    def __init__(self):

        self.Q = FeedForwardNetwork()

        # La funcion de valor se representa con una red neuronal
        # Input: S = (Angulo, Velocidad angular, Posicion), A = accion
        # Output: Valor
        # 2 capas ocultas de 5 neuronas cada una
        # Funcion de activacion sigmoidea
        inLayer = SigmoidLayer(4, name="Input Layer")
        hiddenLayer1 = SigmoidLayer(5, name="Hidden Layer 1")
        hiddenLayer2 = SigmoidLayer(5, name="Hidden Layer 2")
        outLayer = SigmoidLayer(1, name="Output Layer")

        self.Q.addInputModule(inLayer)
        self.Q.addModule(hiddenLayer1)
        self.Q.addModule(hiddenLayer2)
        self.Q.addOutputModule(outLayer)

        connInToHidden1 = FullConnection(inLayer, hiddenLayer1)
        connHidden1ToHidden2 = FullConnection(hiddenLayer1, hiddenLayer2)
        connHidden2ToOut = FullConnection(hiddenLayer2, outLayer)

        self.Q.addConnection(connInToHidden1)
        self.Q.addConnection(connHidden1ToHidden2)
        self.Q.addConnection(connHidden2ToOut)

        self.Q.sortModules()

    def train(self, transitionSamples):

        print "Entrenando..."

        k = 0
        trainer = RPropMinusTrainer(self.Q, batchlearning=True)
        #trainer = BackpropTrainer(self.Q, batchlearning=False)
        TS = SupervisedDataSet(4, 1)

        while (k < self._epochs):

            if k % 10 == 0:
                print "\t ", k

            # Genero training set en base a las muestras
            # Input: Vector de 4 dimensiones (angulo, vel.angular, pos, accion)
            # Target: Valor

            TS.clear()

            for s, a, s_1, costo in transitionSamples:

                # Tomo Q para s', para todas las acciones posibles
                # (vector con el valor para s', para cada una de las 3 acciones posibles)
                # Q_s1 = [ self.Q.activate([s_1.angulo, s_1.velocidadAngular, s_1.posicion, b]) for b in range(Accion.maxValor + 1) ]
                valDerecha = self.Q.activate([
                    s_1.angulo, s_1.velocidadAngular, s_1.posicion,
                    Accion.DERECHA
                ])
                valIzquierda = self.Q.activate([
                    s_1.angulo, s_1.velocidadAngular, s_1.posicion,
                    Accion.IZQUIERDA
                ])

                if valDerecha >= 1 or valDerecha <= 0:
                    print "Q incorrecta: ", valDerecha

                if valIzquierda >= 1 or valIzquierda <= 0:
                    print "Q incorrecta: ", valIzquierda

                # Input y Target para la red neuronal
                inputVal = (s.angulo, s.velocidadAngular, s.posicion, a)

                if costo == 0:
                    targetVal = costo
                else:
                    targetVal = costo + self._gamma * min(
                        valDerecha, valIzquierda)

                if targetVal > 1 or targetVal < 0:
                    print "Target incorrecto: ", targetVal

                TS.addSample(inputVal, targetVal)

            # Entreno la red neuronal
            trainer.setData(TS)
            trainer.train()  # 1 epoch
            #trainer.trainEpochs(self._epochsNN)

            k = k + 1
Exemplo n.º 49
0
class NET():
    def __init__(self, arg):
        self.inputsize = arg[0]
        self.outputsize = arg[-1]
        self.hiden = arg[1:-1]
        self.err = 1
        self.old_err = 1
        b = []
        b.append(self.inputsize)
        b += self.hiden
        b.append(self.outputsize)
        #print b#"%s, %s, %s, hiddenclass=TanhLayer"%(self.inputsize, self.hiden, self.outputsize)
        self.net = FeedForwardNetwork()
        self.inputlayer = LinearLayer(self.inputsize, "Input")
        self.net.addInputModule(self.inputlayer)
        self.outputlayer = LinearLayer(self.outputsize, "Output")
        self.net.addOutputModule(self.outputlayer)
        self.hidenlayers = []
        for i in xrange(len(self.hiden)):
            self.hidenlayers.append(SigmoidLayer(self.hiden[i], "hiden%s" % i))
            self.net.addModule(self.hidenlayers[-1])
        self.net.addConnection(
            FullConnection(self.inputlayer, self.outputlayer))
        for i in xrange(len(self.hidenlayers)):
            self.net.addConnection(
                FullConnection(self.inputlayer, self.hidenlayers[i]))
            self.net.addConnection(
                FullConnection(self.hidenlayers[i], self.outputlayer))
        for i in xrange(len(self.hidenlayers)):
            for j in xrange(i + 1, len(self.hidenlayers)):
                self.net.addConnection(
                    FullConnection(self.hidenlayers[i], self.hidenlayers[j]))
                #self.print_conections(self.net)
        self.net.sortModules()
        self.ds = SupervisedDataSet(self.inputsize, self.outputsize)

    def Update(self, hiden, h):
        self.net = FeedForwardNetwork()
        self.inputlayer = LinearLayer(self.inputsize, "Input")
        self.net.addInputModule(self.inputlayer)
        self.outputlayer = LinearLayer(self.outputsize, "Output")
        self.net.addOutputModule(self.outputlayer)
        self.hidenlayers = []
        for i in xrange(len(hiden)):
            self.hidenlayers.append(SigmoidLayer(hiden[i], "hiden%s" % i))
            self.net.addModule(self.hidenlayers[-1])
        self.net.addConnection(
            FullConnection(self.inputlayer, self.outputlayer))
        for i in xrange(len(self.hidenlayers)):
            self.net.addConnection(
                FullConnection(self.inputlayer, self.hidenlayers[i]))
            self.net.addConnection(
                FullConnection(self.hidenlayers[i], self.outputlayer))
        for i in xrange(len(self.hidenlayers)):
            for j in xrange(i + 1, len(self.hidenlayers)):
                if i < h:
                    self.net.addConnection(
                        FullConnection(self.hidenlayers[i],
                                       self.hidenlayers[j]))
                elif i == h:
                    self.net.addConnection(
                        FullConnection(self.hidenlayers[i],
                                       self.hidenlayers[j],
                                       inSliceTo=hiden[i] - 1))
                else:
                    self.net.addConnection(
                        FullConnection(self.hidenlayers[i],
                                       self.hidenlayers[j]))
                #self.print_conections(self.net)
        self.net.sortModules()
        self.hiden = hiden

    def print_conections(self, n):
        print("BEGIN")
        for mod in n.modules:
            print(mod)
            for conn in n.connections[mod]:
                print(conn)
                for cc in range(len(conn.params)):
                    print(conn.whichBuffers(cc), conn.params[cc])
        print("END")

    def AddData(self, datainput, dataoutput, learningrate):
        if len(dataoutput) != len(datainput):
            print("Not equals data", len(dataoutput), len(datainput))
            return 1
        self.ds = SupervisedDataSet(self.inputsize, self.outputsize)
        for i in xrange(len(dataoutput)):
            self.ds.appendLinked(datainput[i], dataoutput[i])
        self.trainer = BackpropTrainer(self.net,
                                       dataset=self.ds,
                                       learningrate=learningrate)
        return 0

    def TrainNet(self, epoch, error):

        if epoch <= 5:
            epoch = 5
        i = 0
        count = 0
        while i < epoch:
            if error == self.err:
                break
            self.err = self.trainer.train()
            if self.err == self.old_err:
                count += 1
            else:
                count = 0
            if count == 3:
                self.err = self.old_err
                return (self.err, 1)
            self.old_err = self.err
            i += 1
        #self.SaveNet('%s  %s_%s_%s.work'%(self.err, self.inputsize, self.hiden, self.outputsize))
        return [self.err, 0]

    def TrainNetOnce(self):

        self.err = self.trainer.train()

        return self.err

    def SaveNet(self, filename=None):
        if filename == None:
            NetworkWriter.writeToFile(
                self.net, '%s  %s_%s_%s.xml' %
                (self.err, self.inputsize, self.hiden, self.outputsize))
        else:
            NetworkWriter.writeToFile(self.net, filename)

    def LoadNet(self, fname):
        self.net = NetworkReader.readFrom(fname)
        tree = ET.parse(fname)
        x = tree.getroot()
        l = []
        for modules in x.findall('Network/Modules/SigmoidLayer/dim'):
            l.append(int(modules.get("val")))
        self.hiden = l[:]
        self.inputsize = self.net.indim
        self.outputsize = self.net.outdim

    def TestNet(self, inp):
        if len(inp) != self.inputsize:
            return 0
        return self.net.activate(inp[:])

    def UpdateWeights(self, f1, f2=None):
        n = NetworkReader.readFrom(f1)
        if f2 != None:
            n2 = NetworkReader.readFrom(f2)

        def DictParams(n):
            l1 = []
            for mod in n.modules:
                l = []
                for conn in n.connections[mod]:

                    if conn.paramdim > 0:

                        l.append([conn.outmod.name, conn.params])
                d = dict(l)
                l1.append([mod.name, d])
            d1 = dict(l1)
            return d1

        d1 = DictParams(n)
        if f2 != None:
            d2 = DictParams(n2)
        d3 = DictParams(self.net)

        params = np.array([])
        if f2 != None:
            for i in d2:
                for j in d2[i]:
                    try:
                        b = d3[i][j][:]
                        b[:d2[i][j].size] = d2[i][j][:]
                        d3[i].update({j: b})
                    except:
                        pass
        for i in d1:
            for j in d1[i]:
                try:
                    b = d3[i][j][:]
                    b[:d1[i][j].size] = d1[i][j][:]
                    d3[i].update({j: b})
                except:
                    pass
        for i in d3["Input"]:
            params = np.hstack((params, d3["Input"][i]))
        for i in xrange(len(self.hiden)):
            for j in d3["hiden%s" % i]:
                params = np.hstack((params, d3["hiden%s" % i][j]))
        self.net._setParameters(params)
Exemplo n.º 50
0
# Here we instantiate a Feed-Forward Network.
annet = FeedForwardNetwork()

# Creation of the input layer: Here the integer denotes the number of nodes we wish to have in the layer.
inLayer = LinearLayer(2, 'inlyr')

# Creation of the hidden layer.
hid1Layer = SigmoidLayer(2, 'hiddenlyr')

# Creation of the Output layer.
outLayer = LinearLayer(1, 'outlyr')

# Instantiating the Bias Unit.
bias_val = BiasUnit()

# Adding the corresponding layers to the network.
annet.addInputModule(inLayer)
annet.addModule(hid1Layer)
annet.addModule(bias_val)
annet.addOutputModule(outLayer)

# Adding the connections b/w layers pair-wise.
# Note
# FullConnection denotes all the nodes of one layer are connected to all nodes in the next layer.
annet.addConnection(FullConnection(inLayer, hid1Layer))
annet.addConnection(FullConnection(bias_val, hid1Layer))
annet.addConnection(FullConnection(hid1Layer, outLayer))

# The method performs internal management of the specifications.
annet.sortModules()
Exemplo n.º 51
0
from pybrain.structure import FeedForwardNetwork
n = FeedForwardNetwork()

from pybrain.structure import LinearLayer, SigmoidLayer
inLayer = LinearLayer(2, name="Foo The II of LinearLayer")
hiddenLayer = SigmoidLayer(3, name="Bob the Pesant")
outLayer = LinearLayer(1, name="Foo The II Royal Decree")
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

from pybrain.structure import FullConnection
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
n.sortModules()

print n
Exemplo n.º 52
0
class NetworkManager(object):
    def __init__(self, hidden_layers, ally_champ_obj_list,
                 enemy_champ_obj_list):

        self.ally_champ_obj_list = ally_champ_obj_list
        self.enemy_champ_obj_list = enemy_champ_obj_list

        self.set_nodes()

        self.network = FeedForwardNetwork()

        connect_queue = Queue.Queue()

        for layer in xrange(0, hidden_layers):
            connect_queue.put(
                TanhLayer(self.input_node_count,
                          name='hidden_layer_{}'.format(layer)))

        connect_queue.put(SigmoidLayer(1, name='output_layer'))

        prev_layer = LinearLayer(self.input_node_count, name='input_layer')
        self.network.addInputModule(prev_layer)

        while not connect_queue.empty():

            current_layer = connect_queue.get()
            if current_layer.name == 'output_layer':
                self.network.addOutputModule(current_layer)
            else:
                self.network.addModule(current_layer)

            bias = BiasUnit()
            bias_connection = FullConnection(
                bias,
                current_layer,
                name="bias_to_{}_connection".format(current_layer.name))
            self.network.addModule(bias)
            self.network.addConnection(bias_connection)

            connection = FullConnection(prev_layer,
                                        current_layer,
                                        name="{}_to_{}_connection".format(
                                            prev_layer.name,
                                            current_layer.name))
            self.network.addConnection(connection)

            prev_layer = current_layer

        self.network.sortModules()

    def get_node_count(self):
        return (len(self.ally_champ_obj_list) + len(self.enemy_champ_obj_list))

    def set_nodes(
        self
    ):  #TEMPORARY SET_NODE - PERFORMS QUERYSET EVERYTIME - SHOULD EVENTUALLY STORE VALUES AS NODES
        node_list = {}
        match_count = 0
        champion_list = list(
            enumerate(self.ally_champ_obj_list + self.enemy_champ_obj_list))
        self.input_node_count = len(champion_list)
        while len(champion_list) != 0:
            pid, prime = champion_list.pop()
            data = {}
            queryset = Player.objects.filter(champion=prime)

            for (cid, champ) in champion_list:

                if pid <= 4 and cid <= 4:
                    ally = True
                elif pid > 4 and cid > 4:
                    ally = True
                else:
                    ally = False

                if ally:
                    matches = queryset.filter(ally_heroes=champ)
                else:
                    matches = queryset.filter(enemy_heroes=champ)
                match_count += len(matches)

                data[champ] = {
                    'ally': ally,
                    'wins': matches.filter(winner=True).count(),
                    'loses': matches.filter(winner=False).count()
                }

                node_list[prime] = data

        self.node_set = node_list

    def train_network(self):

        training_set = SupervisedDataSet(self.input_node_count, 1)
        validation_set = SupervisedDataSet(self.input_node_count, 1)

        champion_list = list(
            enumerate(self.ally_champ_obj_list + self.enemy_champ_obj_list))
        while len(champion_list) != 0:
            pid, prime = champion_list.pop()
            for (cid, champ) in champion_list:

                input_set = [0] * self.input_node_count
                input_set[pid] = 1
                input_set[cid] = 1

                wins = self.node_set[prime][champ]['wins']
                loses = self.node_set[prime][champ]['loses']

                for win in xrange(0, wins):
                    training_set.addSample(input_set, [1])
                for loss in xrange(0, loses):
                    training_set.addSample(input_set, [0])

        print 'Training Set Length = ', len(training_set)

        ally_list = self.ally_champ_obj_list
        enemy_list = self.enemy_champ_obj_list
        prime = ally_list.pop()
        validation_queryset = Player.objects.filter(champion=prime)
        print len(validation_queryset)
        for ally in ally_list:
            validation_queryset = validation_queryset.filter(ally_heroes=ally)
            print len(validation_queryset)
        for enemy in enemy_list:
            validation_queryset = validation_queryset.filter(
                enemy_heroes=enemy)
            print len(validation_queryset)

        print 'Validation Set Length = ', len(validation_queryset)

        validation_wins = validation_queryset.filter(winner=True).count()
        validation_loses = validation_queryset.filter(winner=False).count()

        for win in xrange(0, validation_wins):
            validation_set.addSample([1] * self.input_node_count, [1])
        for loss in xrange(0, validation_loses):
            validation_set.addSample([1] * self.input_node_count, [0])

        if not validation_set:
            print 'There is no Validation Set, more error in output'
        else:
            print 'Raw Win Rate = ', str(
                float(validation_wins) /
                float(validation_wins + validation_loses))

        trainer = BackpropTrainer(self.network, learningrate=0.5)
        trainer.trainUntilConvergence(
            validationData=validation_set,
            trainingData=training_set,
            dataset=training_set,
            continueEpochs=10,
            maxEpochs=50,
            convergence_threshold=1,
        )

        return str(
            float(validation_wins) / float(validation_wins + validation_loses))

    def run_network(self):

        input_set = [1] * self.input_node_count
        return self.network.activate(input_set)
# 2 é a quantidade de neurônio para camada de entrada
EntryLayer = LinearLayer(2)

# camada oculta com 3 neurônios
HideLayer = SigmoidLayer(3)

# camada de saída com 1 neurônio
ExitLayer = SigmoidLayer(1)

# unidades de Bias com neurônios extras
bias1 = BiasUnit()
bias2 = BiasUnit()

# Adicionando os modulos a rede com as configurações iniciais
network.addModule(EntryLayer)
network.addModule(HideLayer)
network.addModule(ExitLayer)
network.addModule(bias1)
network.addModule(bias2)

# Conectando as camadas
EntryToHideLayer = FullConnection(EntryLayer, HideLayer)
HideToExitLayer = FullConnection(HideLayer, ExitLayer)
biasToHideLayer = FullConnection(bias1, HideLayer)
biasToExitLayer = FullConnection(bias2, ExitLayer)

# rede neural e suas estruturas iram ser criadas
network.sortModules()

print(network)
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, BiasUnit
from pybrain.structure import FullConnection

network = FeedForwardNetwork()

inputLayer = LinearLayer(2)
hiddenLayer = SigmoidLayer(3)
outputLayer = SigmoidLayer(1)

bias1 = BiasUnit()
bias2 = BiasUnit()

network.addModule(bias1)
network.addModule(bias2)

network.addModule(inputLayer)
network.addModule(hiddenLayer)
network.addModule(outputLayer)

inputHidden = FullConnection(inputLayer, hiddenLayer)
hiddenOutput = FullConnection(hiddenLayer, outputLayer)

biasToHidden = FullConnection(bias1, hiddenLayer)
biasToOutput = FullConnection(bias2, outputLayer)

#initialize layers + modules are sorted topologically
network.sortModules()

print(network)
print(biasToHidden.params)
Exemplo n.º 55
0
"""

from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, BiasUnit
from pybrain.structure import FullConnection

rede = FeedForwardNetwork()

camada_de_entrada = LinearLayer(2)
camada_oculta = SigmoidLayer(3)
camada_saida = SigmoidLayer(1)

bias_1 = BiasUnit()
bias_2 = BiasUnit()

rede.addModule(camada_de_entrada)
rede.addModule(camada_oculta)
rede.addModule(camada_saida)
rede.addModule(bias_1)
rede.addModule(bias_2)

entradaOculta = FullConnection(camada_de_entrada, camada_oculta)
Oculta_a_saida = FullConnection(camada_oculta, camada_saida)

biasOculta = FullConnection(bias_1, camada_oculta)
biasSaida = FullConnection(bias_2, camada_saida)

rede.sortModules()

print(rede)
print(entradaOculta.params)
Exemplo n.º 56
0
class NNInitializer:

    #A few constants for this function....
    NEURAL_NET_OBJECT_PATH = "../PickledObjects/NeuralNets/"
    TRAINING_SET_OBJECT_PATH = "../PickledObjects/TrainingSets/"
    NUMBER_OF_DATA_SETS = 100000
    NUMBER_OF_INPUTS = 5
    NUMBER_OF_OUTPUTS = 5

    def __init__(self):
        pass

    def initNetwork(self):
        #Intiailize Neural Nets
        self.neuralNet = FeedForwardNetwork()

        #Define and add each set of layers
        inLayer = LinearLayer(5)
        hiddenLayer = SigmoidLayer(15)
        outLayer = LinearLayer(5)

        self.neuralNet.addInputModule(inLayer)
        self.neuralNet.addModule(hiddenLayer)
        self.neuralNet.addOutputModule(outLayer)

        #Create conenctions
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)
        self.neuralNet.addConnection(in_to_hidden)
        self.neuralNet.addConnection(hidden_to_out)

        #Sort the NeuralNet
        self.neuralNet.sortModules()

        #Add supervised data sets
        ds = SupervisedDataSet(NNInitializer.NUMBER_OF_INPUTS,
                               NNInitializer.NUMBER_OF_OUTPUTS)
        inputSet, outputSet = self.loadTrainingSet('scents_based_input',
                                                   'scents_based_output')
        #inputSet, outputSet = self.generateTrainingSet()

        print "Adding samples to data set...."
        for i, val in enumerate(inputSet):
            #	print "Input Set: "
            #	print inputSet[i]
            #	print "Output Set: "
            #	print outputSet[i]
            ds.addSample(inputSet[i], outputSet[i])
        print "Done."

        print "Starting training...."
        #Perform Training
        trainer = BackpropTrainer(self.neuralNet, ds)
        trainer.train()
        print "Done."

    #Generate a training set for the neural net
    def generateTrainingSet(self):
        print "Generating random training samples...."

        inputSet = []
        outputSet = []

        for i in range(0, NNInitializer.NUMBER_OF_DATA_SETS):
            trainingInput = []  #start w/ an empty list
            trainingOutput = [
                0
            ] * NNInitializer.NUMBER_OF_OUTPUTS  #start w/ list of all 0's

            for j in range(0, NNInitializer.NUMBER_OF_INPUTS):
                trainingInput.append(random.triangular(
                    0.0, 1.0,
                    0.985))  #add randomInputs between 0 -> 1, inclusive

            #print trainingInput
            maxVal = max(trainingInput)
            maxIndex = trainingInput.index(maxVal)
            trainingInput = [i / maxVal for i in trainingInput]
            trainingOutput[
                maxIndex] = 1  #the max sense should be the correct movement output

            inputSet.append(tuple(trainingInput))
            outputSet.append(tuple(trainingOutput))

        print "Done."
        return (inputSet, outputSet)

    def loadTrainingSet(self, inputFilename, outputFilename):
        file_path = NNInitializer.TRAINING_SET_OBJECT_PATH + inputFilename
        fd = open(file_path, "rb")
        inputSet = pickle.load(fd)
        fd.close()

        file_path = NNInitializer.TRAINING_SET_OBJECT_PATH + outputFilename
        fd = open(file_path, "rb")
        outputSet = pickle.load(fd)
        fd.close()

        return (inputSet, outputSet)

    def saveNetwork(self, filename):
        file_path = NNInitializer.NEURAL_NET_OBJECT_PATH + filename
        fd = open(file_path, "wb")
        pickle.dump(self.neuralNet, fd)
        fd.close()
        print "Neural net saved at: " + file_path

    def readNetwork(self, filename):
        file_path = NNInitializer.NEURAL_NET_OBJECT_PATH + filename
        fd = open(file_path, "rb")
        self.neuralNet = pickle.load(fd)
        fd.close()
        return self.neuralNet
Exemplo n.º 57
0
def set_pybrain_nn(X, y):

    params_len = len(X[0])

    print(params_len)
    hidden_size = 100
    output_layer_num = 2
    epochs = 200

    # init and train
    net = FeedForwardNetwork()

    """ Next, we're constructing the input, hidden and output layers. """
    inLayer = LinearLayer(params_len)
    hiddenLayer = SigmoidLayer(hidden_size)
    hiddenLayer1 = SigmoidLayer(hidden_size)
    hiddenLayer2 = SigmoidLayer(hidden_size)
    outLayer = LinearLayer(output_layer_num)


    """ (Note that we could also have used a hidden layer of type TanhLayer, LinearLayer, etc.)
    Let's add them to the network: """
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addModule(hiddenLayer1)
    net.addModule(hiddenLayer2)
    net.addOutputModule(outLayer)

    """ We still need to explicitly determine how they should be connected. For this we use the most
    common connection type, which produces a full connectivity between two layers (or Modules, in general):
    the 'FullConnection'. """

    in2hidden = FullConnection(inLayer, hiddenLayer)
    hidden2hidden = FullConnection(hiddenLayer, hiddenLayer1)
    hidden2hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
    hidden2out = FullConnection(hiddenLayer2, outLayer)

    net.addConnection(in2hidden)
    net.addConnection(hidden2hidden)
    net.addConnection(hidden2hidden1)
    net.addConnection(hidden2out)

    """ All the elements are in place now, so we can do the final step that makes our MLP usable,
    which is to call the 'sortModules()' method. """

    net.sortModules()

    #ds = SupervisedDataSet(params_len, output_layer_num)
    ds = ClassificationDataSet(params_len, output_layer_num, nb_classes=2)
    ds.setField('input', X)
    ds.setField('target', y)

    trainer = BackpropTrainer(net, ds)

    print("training for {} epochs...".format(epochs))

    #trainer.trainUntilConvergence(verbose=True)
    #trainer.train()

    for i in range(epochs):
        mse = trainer.train()
        rmse = sqrt(mse)
        print("training RMSE, epoch {}: {}".format(i + 1, rmse))

    pickle.dump(net, open('model/nn_brain', 'wb'))
Exemplo n.º 58
0
print trainer.train()
#    tuple containing the errors for every training epoch.
print trainer.trainUntilConvergence()

############### Feed forward networks ######################

n = FeedForwardNetwork()

#   Constructing input, output & hidden layers & giving names to the network
inLayer = LinearLayer(2, name='in')
hiddenLayer = SigmoidLayer(3, name='hidden')
outLayer = LinearLayer(1, name='out')

n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

#   Full Connection class - add connections/synapses

in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
#makes our MLP usable,
n.sortModules()
print n.activate([1, 2])

print n

#Recureent Connection Class -which looks back in time one timestep.
Exemplo n.º 59
0
fnn = FeedForwardNetwork()

# create layers: 9 input layer nodes (8 features + 1 bias), 3 hidden layer nodes, 10 output layer nodes
bias = BiasUnit(name='bias unit')
input_layer = LinearLayer(64 * 64 * 3, name='input layer')
hidden_layer = SigmoidLayer(64 * 64 * 3 / 2, name='hidden layer')
output_layer = SigmoidLayer(2, name='output layer')

# create connections with full connectivity between layers
bias_to_hidden = FullConnection(bias, hidden_layer, name='bias-hid')
bias_to_output = FullConnection(bias, output_layer, name='bias-out')
input_to_hidden = FullConnection(input_layer, hidden_layer, name='in-hid')
hidden_to_output = FullConnection(hidden_layer, output_layer, name='hid-out')

# add layers & connections to network
fnn.addModule(bias)
fnn.addInputModule(input_layer)
fnn.addModule(hidden_layer)
fnn.addOutputModule(output_layer)
fnn.addConnection(bias_to_hidden)
fnn.addConnection(input_to_hidden)
fnn.addConnection(hidden_to_output)
fnn.addConnection(bias_to_output)
fnn.sortModules()

# set up trainer that takes network & training dataset as input

# train model until convergence
trainer = BackpropTrainer(fnn,
                          dataset=trainingData,
                          verbose=True,
#----------------------------------------------
#   Feed Forward Networks
#----------------------------------------------
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer
from pybrain.structure import FullConnection

n = FeedForwardNetwork()

#   Construct input, hidden and output layers
inLayer = LinearLayer(2)
hiddenlayer = SigmoidLayer(3)
outLayer = LinearLayer(1)

#   Add layers to the network
n.addInputModule(inLayer)
n.addModule(hiddenlayer)
n.addOutputModule(outLayer)

#   Add full connection between the neurons of each layer
in_to_hidden = FullConnection(inLayer, hiddenlayer)
hidden_to_out = FullConnection(hiddenlayer, outLayer)

#   Final step: Necessary for sorting the modules, and other internal initialization
n.sortModules()

#----------------------------------------------
#   Examining a Network
#----------------------------------------------

n.activate([1, 2])