Example #1
0
def getbest(i):
    g = NEAT.Genome(0, 3, 0, 1, False, NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params)
    pop = NEAT.Population(g, params, True, 1.0, i)
    pop.RNG.Seed(i)

    generations = 0
    for generation in range(200):
        # print("generation #", format(generation))
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = EvaluateGenomeList_Serial(genome_list, evaluate, display=False)
        NEAT.ZipFitness(genome_list, fitness_list)
        pop.Epoch()
        generations = generation
        best = max(fitness_list)
        # print("best fitness ", best)
        if best > 15.0:
            break

    return generations
Example #2
0
 def sparseness(genome):
     distances = []
     for g in NEAT.GetGenomeList(pop):
         d = genome.behavior.distance_to(g.behavior)
         distances.append(d)
     # get the distances from the archive as well
     for ab in archive:
         distances.append(genome.behavior.distance_to(ab))
     distances = sorted(distances)
     sp = np.mean(distances[1:ns_K + 1])
     return sp
Example #3
0
def evolve(env_name, seed, params, max_evaluations, num_batches):
    env = gym.make(env_name)
    discrete_output = isinstance(env.action_space,
                                 gym.spaces.discrete.Discrete)

    train_network, test_network = configure_train_test(env_name, seed)

    if discrete_output:
        g = NEAT.Genome(0, len(np.reshape(env.observation_space.sample(),
                                          -1)), 0, env.action_space.n, False,
                        NEAT.ActivationFunction.LINEAR,
                        NEAT.ActivationFunction.RELU, 0, params)
    else:
        g = NEAT.Genome(0, len(np.reshape(env.observation_space.sample(), -1)),
                        0, 2 * len(np.reshape(env.action_space.sample(), -1)),
                        False, NEAT.ActivationFunction.LINEAR,
                        NEAT.ActivationFunction.RELU, 0, params)

    env.close()
    population = NEAT.Population(g, params, True, 1.0, seed)
    population.RNG.Seed(seed)

    run_neat = configure_neat(population, train_network)
    iterator = run_neat()

    generations_per_batch = max(
        (max_evaluations / num_batches) / params.PopulationSize, 1)

    current_best = None
    i = 0

    while i * params.PopulationSize < max_evaluations:
        for _ in xrange(generations_per_batch):
            generation, current_best = iterator.next()
            i += 1
            assert i == generation

        best = pickle.loads(current_best)
        results = test_network(best)
        neurons, connections = network_size(best)
        yield i * params.PopulationSize, results, neurons, connections
Example #4
0
def grid2d_substrate(inputs: int, hidden_layers: int, nodes_per_layer: [int], outputs: int, leaky=True):
    input_nodes = get_2d_point_line(inputs, (-1.0, -1.0), (-1.0, 1.0))
    hidden_nodes = []
    layers = get_2d_point_line(hidden_layers + 2, (-1, 0), (1, 0))
    for i in range(hidden_layers):
        layer_x = layers[i + 1][0]
        hidden_nodes += get_2d_point_line(nodes_per_layer[i], (layer_x, -1.0), (layer_x, 1.0))
    output_nodes = get_2d_point_line(outputs, (1.0, -1.0), (1.0, 1.0))

    subst = neat.Substrate(input_nodes, hidden_nodes, output_nodes)
    subst.m_query_weights_only = True
    return subst
Example #5
0
def Draw(x, size=(300, 300)):
    img = np.zeros((size[0], size[1], 3), dtype=np.uint8)
    img += 10

    if isinstance(x, mneat.NeuralNetwork):
        DrawPhenotype(img, (0, 0, 250, 250), x)
    else:
        nn = mneat.NeuralNetwork()
        x.BuildPhenotype(nn)
        DrawPhenotype(img, (0, 0, 250, 250), nn)

    return img
Example #6
0
    def build_phenotype(self, current_genome):
        """
        Constructs an agent phenotype from its genotype.
        
        :param current_genome: agent genome.
        :return: agent phenotype network.
        """

        net = mneat.NeuralNetwork()
        current_genome.BuildPhenotype(net)

        return net
Example #7
0
    def __init__(self,
                 population_size=100,
                 input_size=9,
                 output_size=1,
                 generations=50):
        """Initialize the trainer.

        Keyword arguments:
        population_size -- number of genomes per generation (default 100)
        input_size -- size of the input state + 1 (default 9)
        output_size -- size of the result of the genome neural networks (default 1)
        generations -- number of generations (default 50)
        """
        self.generations = generations
        self.params = NEAT.Parameters()
        self.params.PopulationSize = population_size
        genome = NEAT.Genome(0, input_size, 0, output_size, False,
                             NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                             NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0,
                             self.params, 0)
        self.population = NEAT.Population(genome, self.params, True, 1.0, 0)
Example #8
0
def evaluate(genome):
    # create a neural network for the genome
    net = NEAT.NeuralNetwork()
    genome.BuildESHyperNEATPhenotype(net, substrate, params)

    # create a board
    board = Board(width, height)
    canAddPiece = 1
    current = 0
    line = 0
    maxPieces = 300

    # while the game is not over
    while (current < maxPieces):
        # get a piece
        pieceNum = random.randint(0, 6)
        piece = Piece(pieceNum)

        # pass board configuation & piece to the neural network
        conf = board.getBoard().ravel()
        c2, c3, c4, c5 = [piece.getPieceArray(i).ravel() for i in range(4)]
        conf = np.concatenate([conf, c2, c3, c4, c5])

        #conf = [tuple(row) for row in conf]
        np.append(conf, pieceNum)
        np.append(conf, 1.0)  # bias

        net.Input(conf)
        net.Activate()
        output = net.Output()

        if PRINT:
            print "output:", output[0], "outout", output[1]
        ## Found bug, output isnt always between 0 and 1
        col = int(sum(output[0:10]))
        rot = int(sum(output[10:14]))
        print col, rot
        ##print "output1:", output[0], "output2:", output[1]

        # update the board by the output we get
        ##print "rot: ", rot, "col: ", col
        canAddPiece = board.addPiece(piece, rot, col)
        line = line + board.clearRows()
        if canAddPiece == -1:
            #print board
            break
        current += 1

    # evaluate fitness
    #return line
    print "Rows Cleared:", line, " Pieces: ", current
    return current
Example #9
0
def evaluate(genome):
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    avg_reward = 0

    for trial in range(trials):
        avg_reward += do_trial(net)

    avg_reward /= trials

    #print(avg_reward)
    return 1000000 + avg_reward
Example #10
0
def evolve_neat(params, generations, out_dir, run_id, pool):
    pop = create_initial_population(params)
    max_ever = None
    t = time.time()

    for generation in range(generations):
        print(run_id, 'Starting generation', generation)
        genomes = NEAT.GetGenomeList(pop)

        if pool:
            data = [(g, g.GetGenomeTraits(), params) for g in genomes]
            fitnesses = pool.starmap(evaluate, data)
        else:
            fitnesses = [
                evaluate(g, g.GetGenomeTraits(), params) for g in genomes
            ]

        NEAT.ZipFitness(genomes, fitnesses)

        maxf, meanf = max(fitnesses), sum(fitnesses) / float(len(fitnesses))
        runtime = time.time() - t
        t = time.time()

        print()
        print('Generation %i ran in %f, %f per coral' % \
                                    (generation, runtime, runtime/len(genomes)))
        print('Max fitness:', maxf, 'Mean fitness:', meanf)

        if max_ever is None or maxf > max_ever:
            best = pop.GetBestGenome()
            max_ever = maxf
            print('New best fitness.', best.NumNeurons(), best.NumLinks())
            coral = simulate_and_save(best, params, out_dir, generation, maxf,
                                      meanf)[0]

        pop.Epoch()
        print('#' * 80)

    print('Run Complete.')
Example #11
0
def get_signals(genome, df):
    # this creates a neural network (phenotype) from the genome
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    signals = []
    for index, row in df.iterrows():
        row_list = list(row)
        row_list.pop(0)
        net.Input(row_list)
        net.Activate()
        signals.append(net.Output()[0])
    return signals
def getbest(i):
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.TANH, NEAT.ActivationFunction.TANH,
                    0, params, 0)

    pop = NEAT.Population(g, params, True, 1.0, i)
    pop.RNG.Seed(i)

    for generation in range(max_generations):
        genome_list = NEAT.GetGenomeList(pop)
        # if sys.platform == 'linux':
        #    fitnesses = EvaluateGenomeList_Parallel(genome_list, evaluate, display=False)
        # else:
        fitnesses = EvaluateGenomeList_Serial(genome_list,
                                              evaluate,
                                              display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        net = NEAT.NeuralNetwork()
        pop.GetBestGenome().BuildPhenotype(net)

        complexity = "complexity ({}, {})".format(net.NumHiddenNeurons(),
                                                  net.NumConnections())
        print('Gen: %d/%d Best: %3.5f. %s' %
              (generation, max_generations - 1, max(fitnesses), complexity))

        best = max(fitnesses)

        pop.Epoch()
        generations = generation

        if best > 15.0:
            break

    return generations
Example #13
0
def evaluate(genome):
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)
    net.Flush()
    ge = GpuExec()

    full_input = np.array([1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1], dtype=np.float32)
    out = ge.eval(full_input, 3, 4, net)
    # print(out)

    targets = [1, 1, 0, 0]
    err = np.abs(out - targets)
    return (4 - np.sum(err)) ** 2
def create_params():
    params = NEAT.Parameters()
    params.PopulationSize = 150

    params.DynamicCompatibility = True
    params.CompatTreshold = 3.0
    params.YoungAgeTreshold = 15
    params.SpeciesMaxStagnation = 100
    params.OldAgeTreshold = 35
    params.MinSpecies = 5
    params.MaxSpecies = 10
    params.RouletteWheelSelection = False

    params.MutateRemLinkProb = 0.02
    params.RecurrentProb = 0
    params.OverallMutationRate = 0.15
    params.MutateAddLinkProb = 0.1
    params.MutateAddNeuronProb = 0.03
    params.MutateWeightsProb = 0.90
    params.MaxWeight = 8.0
    params.WeightMutationMaxPower = 0.2
    params.WeightReplacementMaxPower = 1.0

    params.MutateActivationAProb = 0.0
    params.ActivationAMutationMaxPower = 0.5
    params.MinActivationA = 0.05
    params.MaxActivationA = 6.0

    params.MutateNeuronActivationTypeProb = 0.3

    params.ActivationFunction_SignedGauss_Prob = 1.0
    params.ActivationFunction_SignedSigmoid_Prob = 1.0
    params.ActivationFunction_SignedSine_Prob = 1.0
    params.ActivationFunction_Linear_Prob = 1.0
    
    params.ActivationFunction_Tanh_Prob = 0.0
    params.ActivationFunction_SignedStep_Prob = 0.0
    params.ActivationFunction_UnsignedSigmoid_Prob = 0.0
    params.ActivationFunction_TanhCubic_Prob = 0.0 
    params.ActivationFunction_UnsignedStep_Prob = 0.0
    params.ActivationFunction_UnsignedGauss_Prob = 0.0
    params.ActivationFunction_Abs_Prob = 0.0
    params.ActivationFunction_UnsignedSine_Prob = 0.0
    

    params.MutateNeuronTraitsProb = 0
    params.MutateLinkTraitsProb = 0

    params.AllowLoops = False

    return params
def build_parameters():
    params = NEAT.Parameters()
    params.PopulationSize = 100
    params.DynamicCompatibility = True
    params.NormalizeGenomeSize = True
    params.WeightDiffCoeff = 0.1
    params.CompatTreshold = 2.0
    params.YoungAgeTreshold = 15
    params.SpeciesMaxStagnation = 15
    params.OldAgeTreshold = 35
    params.MinSpecies = 2
    params.MaxSpecies = 10
    params.RouletteWheelSelection = False
    params.RecurrentProb = 0.0
    params.OverallMutationRate = 1.0

    params.ArchiveEnforcement = False

    params.MutateWeightsProb = 0.05

    params.WeightMutationMaxPower = 0.5
    params.WeightReplacementMaxPower = 8.0
    params.MutateWeightsSevereProb = 0.0
    params.WeightMutationRate = 0.25
    params.WeightReplacementRate = 0.9

    params.MaxWeight = 8

    params.MutateAddNeuronProb = 0.001
    params.MutateAddLinkProb = 0.3
    params.MutateRemLinkProb = 0.0

    params.MinActivationA = 4.9
    params.MaxActivationA = 4.9

    params.ActivationFunction_SignedSigmoid_Prob = 0.0
    params.ActivationFunction_UnsignedSigmoid_Prob = 1.0
    params.ActivationFunction_Tanh_Prob = 0.0
    params.ActivationFunction_SignedStep_Prob = 0.0

    params.CrossoverRate = 0.0
    params.MultipointCrossoverRate = 0.0
    params.SurvivalRate = 0.2

    params.MutateNeuronTraitsProb = 0
    params.MutateLinkTraitsProb = 0

    params.AllowLoops = True
    params.AllowClones = True

    return params
Example #16
0
def getbest(run):
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.TANH, NEAT.ActivationFunction.TANH,
                    0, params)

    pop = NEAT.Population(g, params, True, 1.0, run)
    for generation in range(1000):
        # Evaluate genomes
        genome_list = NEAT.GetGenomeList(pop)

        fitnesses = EvaluateGenomeList_Serial(genome_list,
                                              evaluate_xor,
                                              display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        print('Gen: %d Best: %3.5f' % (generation, max(fitnesses)))

        # Print best fitness
        # print("---------------------------")
        # print("Generation: ", generation)
        # print("max ", max([x.GetLeader().GetFitness() for x in pop.Species]))

        # Visualize best network's Genome

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net)
        cv2.imshow("CPPN", img)
        # Visualize best network's Pheotype
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildESHyperNEATPhenotype(
            net, substrate, params)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10

        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net, substrate=True)
        cv2.imshow("NN", img)
        cv2.waitKey(1)

        if max(fitnesses) > 15.0:
            break

        # Epoch
        generations = generation
        pop.Epoch()

    return generations
Example #17
0
def evolve():
    g = NEAT.Genome(0, 2, 0, 1, False, NEAT.ActivationFunction.LINEAR,
                    NEAT.ActivationFunction.LINEAR, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, 1)
    pop.RNG.Seed(int(time.clock()*100))

    generations = 0
    for generation in range(50):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = []
        for genome in genome_list:
            fitness_list.append(evaluate(genome))
        NEAT.ZipFitness(genome_list, fitness_list)
        pop.Epoch()
        generations = generation
        best = -max(fitness_list)
        bestG = pop.GetBestGenome()
        
        plot_nn(bestG)
        plt.pause(0.01)
        plt.ion()
        plt.show(block=False)
        
        print("Mejor fitness [",generation,"]: ",best)
        if best < 0.01:
            break

    testNet = NEAT.NeuralNetwork()
    bestG.BuildPhenotype(testNet)
    for i in range(10):
        testNet.Flush()
        testNet.Input(np.array([float(100+2*i), 1]))
        for _ in range(2):
            testNet.Activate()
        o = testNet.Output()
        print(100+2*i,"/ 2 = ",o[0]) 
        
    return generations
def evaluate_solutions(robot, obj_func_coeffs, generation):
    best_robot_genome = None
    solution_found = False
    distances = []
    n_items_list = []
    # evaluate robot genomes against maze simulation
    robot_genomes = NEAT.GetGenomeList(robot.population)
    for genome in robot_genomes:
        found, distance, n_item = evaluate_individual_solution(
            genome=genome, generation=generation, robot=robot)
        # store returned values
        distances.append(distance)
        n_items_list.append(n_item)

        if found:
            best_robot_genome = genome
            solution_found = True

    # evaluate novelty scores of robot genomes and calculate fitness
    max_fitness = 0
    best_coeffs = None
    best_distance = 1000
    best_novelty = 0
    for i, n_item in enumerate(n_items_list):
        novelty = robot.archive.evaluate_novelty_score(
            item=n_item, n_items_list=n_items_list)
        # The sanity check
        assert robot_genomes[i].GetID() == n_item.genomeId

        # calculate fitness
        fitness, coeffs = evaluate_solution_fitness(distances[i], novelty,
                                                    obj_func_coeffs)
        robot_genomes[i].SetFitness(fitness)

        if not solution_found:
            # find the best genome in population
            if max_fitness < fitness:
                max_fitness = fitness
                best_robot_genome = robot_genomes[i]
                best_coeffs = coeffs
                best_distance = distances[i]
                best_novelty = novelty
        elif best_robot_genome.GetID() == n_item.genomeId:
            # store fitness of winner solution
            max_fitness = fitness
            best_coeffs = coeffs
            best_distance = distances[i]
            best_novelty = novelty

    return best_robot_genome, solution_found, max_fitness, distances, best_coeffs, best_distance, best_novelty
Example #19
0
def hillclimb(g,params,evals,evaluate,seed=1):
    pop = NEAT.Population(g, params, True, 1.0, seed)
    pop.RNG.Seed(seed)

    species = pop.Species[0]
    champ = g
    c_fitness,beh = evaluate(champ)
    champ.SetFitness(c_fitness)
    champ.SetEvaluated()

    for x in xrange(evals):
     baby = NEAT.Genome(champ) #copy.copy(champ)
     species.MutateGenome(False,pop,baby,params,pop.RNG)
     b_fitness,beh = evaluate(baby)
     #print b_fitness, evaluate(champ)
     if b_fitness > c_fitness:
      #print b_fitness,evaluate(champ)
      c_fitness = b_fitness
      champ.Destroy() 
      champ = baby   
     else:
       baby.Destroy()
    return champ,c_fitness
Example #20
0
def evaluate_xor(genome):
    net = NEAT.NeuralNetwork()
    genome.BuildHyperNEATPhenotype(net, substrate)
    # nn=genome.BuildHyperNEATPhenotype(net, substrate)
    # error = 0
    # depth = 5

    # do stuff and return the fitness
    # net.Flush()
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)
    img = np.zeros((400, 400, 3), dtype=np.uint8)
    img += 10
    NEAT.DrawPhenotype(img, (0, 0, 400, 400), net)
    cv2.imshow("CPPN", img)
    # Visualize best network's Pheotype
    net = NEAT.NeuralNetwork()
    genome.BuildESHyperNEATPhenotype(net, substrate, params)
    img = np.zeros((800, 800, 3), dtype=np.uint8)
    img += 10

    NEAT.DrawPhenotype(img, (0, 0, 800, 800), net, substrate=True)
    cv2.imshow("NN", img)
    cv2.waitKey(33)

    subprocess.call('./autostart.sh',shell=True)
    # print("b")
    # time.sleep(1)
    # print("c")
    main(MyDriver(net=net))
    # print("d")
    with open("mydata.txt",'r') as f:
        fitt=f.read()
    # os.system('pkill torcs')
    subprocess.call('./autostop.sh',shell=True)
    # print("fitness *******************   ",fitt)
    return float(fitt)
Example #21
0
def evaluate_xor(genome):

    net = NEAT.NeuralNetwork()

    try:

        genome.BuildESHyperNEATPhenotype(net, substrate, params)
        error = 0
        depth = 3
        correct = 0.0

        net.Flush()

        net.Input([1, 0, 1])
        [net.Activate() for _ in range(depth)]
        o = net.Output()
        error += abs(o[0] - 1)
        if o[0] > 0.75:
            correct += 1.

        net.Flush()
        net.Input([0, 1, 1])
        [net.Activate() for _ in range(depth)]
        o = net.Output()
        error += abs(o[0] - 1)
        if o[0] > 0.75:
            correct += 1.

        net.Flush()
        net.Input([1, 1, 1])
        [net.Activate() for _ in range(depth)]
        o = net.Output()
        error += abs(o[0] - 0)
        if o[0] < 0.25:
            correct += 1.

        net.Flush()
        net.Input([0, 0, 1])
        [net.Activate() for _ in range(depth)]
        o = net.Output()
        error += abs(o[0] - 0)
        if o[0] < 0.25:
            correct += 1.

        return (4 - error)**2

    except Exception as ex:
        print('Exception:', ex)
        return 0.0
Example #22
0
def getbest():
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.SIGNED_GAUSS,
                    NEAT.ActivationFunction.SIGNED_GAUSS, 0, params)

    pop = NEAT.Population(g, params, True, 1.0)

    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        #    fitnesses = NEAT.EvaluateGenomeList_Parallel(genome_list, evaluate)
        fitnesses = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                   evaluate,
                                                   display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        best = max([x.GetLeader().GetFitness() for x in pop.Species])
        #        print 'Best fitness:', best

        # test
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net)
        cv2.imshow("CPPN", img)

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildHyperNEATPhenotype(net, substrate)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net, substrate=True)
        cv2.imshow("NN", img)

        cv2.waitKey(1)

        pop.Epoch()
        #        print "Generation:", generation
        generations = generation
        if best > 15.5:
            break

    return generations
def run_experiment(params, trial_id, n_generations, out_dir=None, view_results=False, save_results=True):
    g = NEAT.Genome(0, 3, 0, 1, False, NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, trial_id)

    # set random seed
    seed = int(time.time())
    pop.RNG.Seed(seed)

    generations = 0
    solved = False
    max_fitness = 0
    complexity = 0
    for generation in range(n_generations):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = EvaluateGenomeList_Serial(genome_list, evaluate, display=view_results)
        NEAT.ZipFitness(genome_list, fitness_list)
        generations = generation
        best = max(genome_list, key=get_fitness)
        best_fitness = best.GetFitness()
        complexity = best.NumNeurons() + best.NumLinks()
        solved = best_fitness > 15.5 # Changed to correspond limit used with other tested libraries
        if solved:
            max_fitness = best_fitness
            print("Trial: %2d\tgeneration: %d\tfitness: %f\tcomplexity: %d\tseed: %d" % (trial_id, generations, max_fitness, complexity, seed))
            break
        # check if best fitness in this generation is better than current maximum
        max_fitness = max(best_fitness, max_fitness)

        # move to the next epoch
        pop.Epoch()
            
    if not solved:
        print("Trial: %2d\tFAILED\t\tfitness: %f\tcomplexity: %d\tseed: %d" % (trial_id, max_fitness, complexity, seed))

    return solved, generations, complexity, max_fitness
Example #24
0
def simulate_genome(genome, config):
    """
    """
    network = NEAT.NeuralNetwork()
    traits = genome.GetGenomeTraits()
    genome.BuildPhenotype(network)
    genome.CalculateDepth()
    depth = genome.GetDepth()
    morphogens = []

    def fitness(grid):
        return balanced_accuracy_score_np(pred=grid, true=config['target'])

    grid = run_simulation(network, depth, traits, config, fitness)
    return fitness(grid), grid
Example #25
0
    def __init__(self, parameter_data):
        self.parameter_data = parameter_data
        genotype = pickle.loads(self.parameter_data['genotype_pickled'])

        self.net = NEAT.NeuralNetwork()
        genotype.BuildPhenotype(self.net)

        self.standardizer = standardizer.Standardizer([])
        self.standardizer.feature_statistics = self.parameter_data[
            'feature_statistics']

        self.effect = effect.get_effect_instance(
            self.parameter_data['args']['effect_names'])

        self.effect_parameters = None
def create_objective_fun(seed):
    """
    The function to create population of objective functions
    """
    params = create_objective_fun_params()
    # Genome has one input (0.5) and two outputs (a and b)
    genome = NEAT.Genome(
        0,
        1,
        1,
        2,
        False,
        NEAT.ActivationFunction.TANH,  # hidden layer activation
        NEAT.ActivationFunction.UNSIGNED_SIGMOID,  # output layer activation
        1,
        params,
        0)
    pop = NEAT.Population(genome, params, True, 1.0, seed)
    pop.RNG.Seed(seed)

    obj_archive = archive.NoveltyArchive(
        metric=maze.maze_novelty_metric_euclidean)
    obj_fun = ObjectiveFun(archive=obj_archive, genome=genome, population=pop)
    return obj_fun
def create_robot_params():
    """
    The function to create NEAT hyper-parameters for population of robots
    """
    params = NEAT.Parameters()
    params.PopulationSize = 250
    params.DynamicCompatibility = True
    params.AllowClones = False
    params.AllowLoops = True
    params.CompatTreshold = 2.0
    params.CompatTresholdModifier = 0.3
    params.YoungAgeTreshold = 15
    params.SpeciesMaxStagnation = 20
    params.OldAgeTreshold = 200
    params.MinSpecies = 3
    params.MaxSpecies = 20
    params.RouletteWheelSelection = True

    params.RecurrentProb = 0.2
    params.OverallMutationRate = 0.4

    params.LinkTries = 40
    params.SpeciesDropoffAge = 200
    params.DisjointCoeff = 1.0
    params.ExcessCoeff = 1.0

    params.MutateWeightsProb = 0.90
    params.WeightMutationMaxPower = 0.8
    params.WeightReplacementMaxPower = 5.0
    params.MutateWeightsSevereProb = 0.5
    params.WeightMutationRate = 0.75
    params.MaxWeight = 30.0
    params.MinWeight = -30.0

    params.MutateAddNeuronProb = 0.03
    params.MutateAddLinkProb = 0.05
    params.MutateRemLinkProb = 0.1

    params.Elitism = 0.1

    params.CrossoverRate = 0.8
    params.MultipointCrossoverRate = 0.6
    params.InterspeciesCrossoverRate = 0.01

    params.MutateNeuronTraitsProb = 0.1
    params.MutateLinkTraitsProb = 0.1

    return params
Example #28
0
def evaluate(genome):
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    error = 0
    maxval = 100
    for i in range(maxval):
        net.Flush()
        net.Input(np.array([float(i), 1]))
        for _ in range(2):
            net.Activate()
        o = net.Output()
        error += abs(i/2 -o[0])
    
    fitness = -error/maxval
    return fitness
def eval_individual(genome_id, genome, genomes, n_items_map, generation):
    """
    Evaluates the individual represented by genome.
    Arguments:
        genome_id:      The ID of genome.
        genome:         The genome to evaluate.
        genomes:        The genomes population for current generation.
        n_items_map:    The map to hold novelty items for current generation.
        generation:     The current generation.
    Return:
        The True if successful solver found.
    """
    # create NoveltyItem for genome and store it into map
    n_item = archive.NoveltyItem(generation=generation, genomeId=genome_id)
    n_items_map[genome_id] = n_item
    # run the simulation
    maze_env = copy.deepcopy(trial_sim.orig_maze_environment)
    multi_net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(multi_net)
    control_net = ANN(multi_net)
    goal_fitness = maze.maze_simulation_evaluate(
                                        env=maze_env, 
                                        net=control_net, 
                                        time_steps=SOLVER_TIME_STEPS,
                                        n_item=n_item)

    # Store simulation results into the agent record
    record = agent.AgenRecord(generation=generation, agent_id=genome_id)
    record.fitness = goal_fitness
    record.x = maze_env.agent.location.x
    record.y = maze_env.agent.location.y
    record.hit_exit = maze_env.exit_found
    #record.species_id = trial_sim.population.species.get_species_id(genome_id)
    #record.species_age = record.generation - trial_sim.population.species.get_species(genome_id).created
    # add record to the store
    trial_sim.record_store.add_record(record)

    # Evaluate the novelty of a genome and add the novelty item to the archive of Novelty items if appropriate
    if not maze_env.exit_found:
        # evaluate genome novelty and add it to the archive if appropriate
        record.novelty = trial_sim.archive.evaluate_individual_novelty(genome=Genome(genome), 
                                                                        genomes=genomes, n_items_map=n_items_map)

    # update fittest organisms list
    trial_sim.archive.update_fittest_with_genome(genome=Genome(genome), n_items_map=n_items_map)

    return (maze_env.exit_found, goal_fitness)
Example #30
0
    def __init__(self, path=None):
        self.traits_calculated = False

        # Evolution.
        self.neat = NEAT.Parameters()
        self.neat.PopulationSize = 80
        self.neat.OldAgeTreshold = 10
        self.neat.SpeciesMaxStagnation = 10
        self.neat.MinSpecies = 2
        self.neat.MaxSpecies = 8
        self.neat.OverallMutationRate = 0.6
        self.neat.MutateAddNeuronProb = 0.05
        self.neat.MutateAddLinkProb = 0.05
        self.neat.AllowLoops = False

        # Coral Growth.
        self.max_polyps = 15000
        self.max_volume = 50.0
        self.max_steps = 150
        self.max_growth = .20
        self.max_defect = 1.4
        self.max_face_growth = 1.3

        self.n_morphogens = 2
        self.n_signals = 3
        self.n_memory = 0

        self.light_amount = 0.7
        self.gradient_height = 6.0
        self.gradient_bottom = 0.2
        self.collection_radius = 5
        self.C = .4

        self.morphogen_thresholds = 2
        self.morphogen_steps = 200
        self.use_polar_direction = False

        self.addTrait('energy_diffuse_steps', (0, 8), 'int')

        if path:
            for line in open(path).readlines():
                key, value = line.strip().split('\t')
                if value == 'True': value = '1'
                if value == 'False': value = '0'
                setattr(self, key,
                        float(value) if '.' in value else int(value))
Example #31
0
#!/usr/bin/python
import os
import sys
import time
import random as rnd
import commands as comm
import cv2
import numpy as np
import cPickle as pickle
import MultiNEAT as NEAT
import multiprocessing as mpc


params = NEAT.loadParameters('params.txt')


# the simple 2D substrate with 3 input points, 2 hidden and 1 output for XOR
substrate = NEAT.EvolvableSubstrate(params, [(-1, -1), (-1, 0), (-1, 1)],
                           [(1, 0)])

# let's set the activation functions
substrate.m_hidden_nodes_activation = NEAT.ActivationFunction.TANH
substrate.m_outputs_nodes_activation = NEAT.ActivationFunction.UNSIGNED_SIGMOID

# code
cv2.namedWindow('CPPN', 0)
cv2.namedWindow('NN', 0)


def evaluate(genome):
    net = NEAT.NeuralNetwork()
Example #32
0
import pyximport; pyximport.install()
import numpy
import math
import os
import sys
import time
import random as rnd
import numpy as np
import cPickle
import pickle as pickle
import MultiNEAT as NEAT
import niche_transform
print "loading..."

NEAT.import_array()
import matplotlib
matplotlib.use('gtkagg')
import pylab as plt

from render_vox_fast import render
import image_rec
from image_rec import run_image 
from melites import melites 
from melites import novsearch
from fool_eval import evaluate

from clint.arguments import Args
args = Args()
arg_dict = dict(args.grouped)

seed=10