Beispiel #1
0
    ga = Genetic_Algorithm(individuals = 1000, 
                           mutation_rate = 0.1,
                           # (4 caracteristicas + 1 do bias) * 2 neuronios = 10
                           # (2 saidas de neuronio + 1 do bias) * 3 neuronios de saida = 9
                           # total de pesos 10+9 = 19
                           genotype = 19, 
                           fenotype = fenotype, 
                           genotype_type = Genetic_Algorithm.TYPE_FLOAT)
    
#     ga.print_individuals()
    ga.execute(generations = 100, log=True)
#     ga.print_individuals()
    print "Best Individual for all generations", ga.get_best_individual().get_genotype(), ga.get_best_individual_fenotype()
    
    genotype = ga.get_best_individual().get_genotype()
        
    pos = 0
    
    for i, layer in enumerate(mlp.get_layers()):
        for j, neuron in enumerate(layer.get_neurons()):
            
            end_neuron_position = pos + len(neuron.get_weights())
            
            neuron.get_weights()[:] = genotype[pos: end_neuron_position]
                                                    
            neuron.set_bias(genotype[end_neuron_position])
            
            pos = end_neuron_position + 1
            
    mlp.get_weights('pesos_ga_iris.mlp')
    
Beispiel #2
0
    xor = numpy.ndarray((4, 1))
    xor[:, 0] = numpy.logical_xor(inputs[:, 0], inputs[:, 1])

    return inputs, xor


def test(mlp):
    inputs = numpy.ndarray((4, 2))

    inputs[:, 0] = numpy.array([0, 0, 1, 1])
    inputs[:, 1] = numpy.array([0, 1, 0, 1])

    xor = numpy.ndarray((4, 1))
    xor[:, 0] = numpy.logical_xor(inputs[:, 0], inputs[:, 1])

    for inp, x in zip(inputs, xor):
        print inp, x, mlp.output(inp)


if __name__ == '__main__':
    inputs, xor = createData()

    mlp = MLP(n=0.3, features=2, topology=[4, 2, 1])

    errors = mlp.train_data(train=inputs, target=xor, epochs=10000, log=True)
    print errors
    #     print mlp.get_weights()
    test(mlp)

    mlp.get_weights('xor.mlp')
Beispiel #3
0
        mutation_rate=0.1,
        # (4 caracteristicas + 1 do bias) * 2 neuronios = 10
        # (2 saidas de neuronio + 1 do bias) * 3 neuronios de saida = 9
        # total de pesos 10+9 = 19
        genotype=19,
        fenotype=fenotype,
        genotype_type=Genetic_Algorithm.TYPE_FLOAT)

    #     ga.print_individuals()
    ga.execute(generations=100, log=True)
    #     ga.print_individuals()
    print "Best Individual for all generations", ga.get_best_individual(
    ).get_genotype(), ga.get_best_individual_fenotype()

    genotype = ga.get_best_individual().get_genotype()

    pos = 0

    for i, layer in enumerate(mlp.get_layers()):
        for j, neuron in enumerate(layer.get_neurons()):

            end_neuron_position = pos + len(neuron.get_weights())

            neuron.get_weights()[:] = genotype[pos:end_neuron_position]

            neuron.set_bias(genotype[end_neuron_position])

            pos = end_neuron_position + 1

    mlp.get_weights('pesos_ga_iris.mlp')
Beispiel #4
0
    
    
    return inputs, xor

def test(mlp):
    inputs = numpy.ndarray((4,2))
    
    inputs[:,0] = numpy.array([0,0,1,1])
    inputs[:,1] = numpy.array([0,1,0,1])
    
    xor = numpy.ndarray((4,1))
    xor[:,0] = numpy.logical_xor(inputs[:,0], inputs[:,1])
    
    for inp, x in zip(inputs, xor):
        print inp, x, mlp.output(inp)
    

if __name__ == '__main__':
    inputs, xor = createData()
    
    mlp = MLP(n = 0.3,
              features = 2, 
              topology = [4,2,1])
    
    errors = mlp.train_data(train = inputs, target = xor, epochs = 10000, log=True)
    print errors
#     print mlp.get_weights()
    test(mlp)

    mlp.get_weights('xor.mlp')