Example #1
0
    def start_cycle(self, one_by_one=False):
        """Start the cycle.

        Keyword arguments:
        one_by_one -- evaluate the genomes one by one if True (default False)
        """
        if one_by_one:
            # play each genome in a game alone.
            for generation in range(self.generations):
                # retrieve genome list and call evaluation function for each one.
                genome_list = NEAT.GetGenomeList(self.population)
                best_fitness = 0
                print("generation", generation + 1, ":")
                print("testing " + str(self.params.PopulationSize) +
                      " genomes : ")
                i = 0
                for genome in genome_list:
                    i += 1
                    print(i, end=" ")
                    net = NEAT.NeuralNetwork()
                    genome.BuildPhenotype(net)
                    fitness = self.evaluate(genome, generation + 1, i)
                    if best_fitness < fitness:
                        best_fitness = fitness
                    genome.SetFitness(fitness)
                # print best fitness and advance to the next generation
                print("best fitness : ", best_fitness)
                print("=======================================")
                self.population.Epoch()
        else:
            # play all of the population at the same time
            for generation in range(self.generations):
                # retrieve genome list and build the players list.
                genome_list = NEAT.GetGenomeList(self.population)
                players = list()
                for genome in genome_list:
                    net = NEAT.NeuralNetwork()
                    genome.BuildPhenotype(net)
                    players.append(Dino_player_neat(net))
                # start game and retrieve fitness list.
                the_game = Dino_NEAT(players, generation)
                fitness = the_game.on_execute()
                if fitness is None:
                    print("Training stopped.")
                    break
                # assign each genome to its corresponding fitness.
                best_fitness = 0
                for i in range(len(fitness)):
                    genome = genome_list[i]
                    if best_fitness < fitness[i]:
                        best_fitness = fitness[i]
                    genome.SetFitness(fitness[i])
                # print best fitness and advance to the next generation
                print("generation", generation, ":", best_fitness)
                self.population.Epoch()
Example #2
0
def getbest(run):
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.TANH, NEAT.ActivationFunction.TANH,
                    0, params)

    pop = NEAT.Population(g, params, True, 1.0, run)
    for generation in range(1000):
        # Evaluate genomes
        genome_list = NEAT.GetGenomeList(pop)

        fitnesses = EvaluateGenomeList_Serial(genome_list,
                                              evaluate_xor,
                                              display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        print('Gen: %d Best: %3.5f' % (generation, max(fitnesses)))

        # Print best fitness
        # print("---------------------------")
        # print("Generation: ", generation)
        # print("max ", max([x.GetLeader().GetFitness() for x in pop.Species]))

        # Visualize best network's Genome

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net)
        cv2.imshow("CPPN", img)
        # Visualize best network's Pheotype
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildESHyperNEATPhenotype(
            net, substrate, params)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10

        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net, substrate=True)
        cv2.imshow("NN", img)
        cv2.waitKey(1)

        if max(fitnesses) > 15.0:
            break

        # Epoch
        generations = generation
        pop.Epoch()

    return generations
Example #3
0
    def reformat_sim_info(self):

        # Pull out the Best Performer (Leader) Genotype and build its Phenotype
        net = mneat.NeuralNetwork()
        self.sim.pop.Species[0].GetLeader().BuildPhenotype(net)

        # Network characteristics for visualization

        #   - NODES
        out_node = [
            net.connections[connection].source_neuron_idx
            for connection in range(len(net.connections))
        ]
        in_node = [
            net.connections[connection].target_neuron_idx
            for connection in range(len(net.connections))
        ]

        nodes = list(set(out_node + in_node))

        types = [net.neurons[node].type for node in range(len(net.neurons))]

        #   - CONNECTIONS
        weights = [
            net.connections[connection].weight
            for connection in range(len(net.connections))
        ]

        edges = zip(out_node, in_node)

        return nodes, types, edges, weights
Example #4
0
def run_network(genome, env, episode_count=1):
    discrete_output = isinstance(env.action_space,
                                 gym.spaces.discrete.Discrete)
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    cumulated_reward = 0

    episode_count = 1
    for _ in xrange(episode_count):
        ob = env.reset()
        net.Flush()

        for _ in xrange(env.spec.timestep_limit):
            net.Input(np.reshape(ob, -1))
            for _ in xrange(1):
                net.Activate()
            o = net.Output()
            if discrete_output:
                action = np.argmax(o)
            else:
                assert (len(o) % 2 == 0)
                action = np.array([
                    np.random.normal(o[i], abs(o[i + 1]))
                    for i in xrange(0, len(o), 2)
                ])
            ob, reward, done, _ = env.step(action)
            cumulated_reward += reward

            if done:
                break

    return cumulated_reward
Example #5
0
def evaluate(genome):

    # this creates a neural network (phenotype) from the genome

    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    # let's input just one pattern to the net, activate it once and get the output

    net.Input([1.0, 0.0, 1.0])
    net.Activate()
    output = net.Output()

    # the output can be used as any other Python iterable. For the purposes of the tutorial,
    # we will consider the fitness of the individual to be the neural network that outputs constantly
    # 0.0 from the first output (the second output is ignored)

    #fitness = 1.0 - output[0]

    fitness = 0
    if output[0] >= 0.5:
        output[0] = 1. - output[0]
    fitness = 0.5 - (0.5 - output[0])

    return fitness, output[0]
Example #6
0
def getbest():

    g = NEAT.Genome(0, 3, 0, 1, False,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params)
    pop = NEAT.Population(g, params, True, 1.0)

    generations = 0
    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                      evaluate,
                                                      display=False)
        NEAT.ZipFitness(genome_list, fitness_list)

        best = max([x.GetLeader().GetFitness() for x in pop.Species])
        #        print 'Best fitness:', best, 'Species:', len(pop.Species)

        # test
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net)

        cv2.imshow("nn_win", img)
        cv2.waitKey(1)

        pop.Epoch()
        #        print "Generation:", generation
        generations = generation
        if best > 15.5:
            break

    return generations
Example #7
0
    def evaluate(self, genotype, generation):
        # this creates a neural network (phenotype) from the genome
        net = NEAT.NeuralNetwork()
        genotype.BuildPhenotype(net)

        nn = neat_net_wrapper.NeatNetWrapper(net)

        fitness_sum = 0.0
        punishment = 3.0  # punishment for partial captures, small misses and large captures

        for i in range(self.args.num_scenarios):
            nn.flush()

            seed = i + (997 * generation if self.args.mode == 'dynamic' else 0)

            beer_tracker = BeerTracker(
                nn=nn,
                seed=seed
            )
            beer_tracker.run()

            fitness_value = (
                1 * beer_tracker.world.agent.num_small_captures +
                (-punishment) * beer_tracker.world.agent.num_partial_captures +
                (-punishment) * beer_tracker.world.agent.num_small_misses +
                (-punishment) * beer_tracker.world.agent.num_large_captures
            )
            fitness_sum += fitness_value

        fitness_sum /= self.args.num_scenarios

        return fitness_sum
Example #8
0
def testNetwork(data, n_classes, genome, evals, file, seed):
    genome = pickle.loads(genome)
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    predictions = []

    for example in data["X_test"]:
        net.Flush()
        net.Input(example)
        for _ in range(3):
            net.Activate()

        result = softmax(net.Output())
        guess = np.argmax(result)
        predictions.append(guess)

    cm = confusion_matrix(data["y_test"], predictions)
    corr = 0
    for i in xrange(n_classes):
        for j in xrange(n_classes):
            if i == j:
                corr += cm[i, j]

    acc = corr / float(len(predictions))
    # dataset, architecture, seed, evals, acc
    file.write("neat %d %d %f\n" % (seed, evals, acc))
Example #9
0
def getbest(i):
    g = NEAT.Genome(0, 3, 0, 1, False, NEAT.ActivationFunction.UNSIGNED_SIGMOID,
                    NEAT.ActivationFunction.UNSIGNED_SIGMOID, 0, params, 0)
    pop = NEAT.Population(g, params, True, 1.0, i)
    # pop.RNG.Seed(int(time.clock()*100))
    pop.RNG.Seed(1234)

    generations = 0
    for generation in range(max_generations):
        genome_list = NEAT.GetGenomeList(pop)
        fitness_list = EvaluateGenomeList_Serial(genome_list, evaluate, display=False)
        # fitness_list = EvaluateGenomeList_Parallel(genome_list, evaluate, display=False)
        NEAT.ZipFitness(genome_list, fitness_list)
        pop.Epoch()
        generations = generation
        best = max(fitness_list)
        if best > 15.0:
            break

    net = NEAT.NeuralNetwork()
    pop.GetBestGenome().BuildPhenotype(net)

    # img = NEAT.viz.Draw(net)
    # cv2.imshow("current best", img)
    # cv2.waitKey(1)
    
    return generations, net.NumHiddenNeurons(), net.NumConnections()
Example #10
0
    def validator(self, NEAT_file):
        """ Validate a single run. 

        Args:
            NEAT_File: file for the NEAT genome
        """
        global man, quadruped

        # Initialize the manager to be unique to the process.
        man = ODEManager(near_callback,
                         stepsize=self.dt / self.n,
                         log_data=self.log_frames,
                         run_num=self.run_num)

        # Initialize the quadruped
        quadruped = Quadruped(man=man)

        # If logging the output, tell manager to write the body type, dimensions, and position to the logging file.
        if self.log_frames:
            man.log_world_setup(self.eval_time, ind_num=self.run_num)

        # Load in the best performing NEAT genome
        genome = NEAT.Genome(NEAT_file)
        self.current_network = NEAT.NeuralNetwork()
        if not self.hyperNEAT:
            genome.BuildPhenotype(self.current_network)
        else:
            genome.BuildHyperNEATPhenotype(self.current_network,
                                           self.substrate)

        fit = self.physics_only_simulation_validator()

        print(fit)
Example #11
0
    def objF(genome):
        net = NEAT.NeuralNetwork()
        genome.BuildPhenotype(net)

        random_state = np.random.get_state()
        np.random.seed(generation)
        sampled_data = np.random.choice(len(X_train),
                                        num_samples,
                                        replace=False)
        np.random.set_state(random_state)
        cur_data = X_train[sampled_data]
        cur_label = y_train[sampled_data]

        cum_correct = 0

        for example, cor in zip(cur_data, cur_label):
            net.Flush()
            net.Input(example)
            for _ in range(3):
                net.Activate()

            result = softmax(net.Output())
            loss_sum = 0
            for q, out in enumerate(result):
                if q != cor:
                    loss_sum += max(0, out - result[int(cor)] + 1)
            # Could also train with all results (e.g. l2 or logloss)
            #if guess == cor:
            #cum_correct += 1
            cum_correct += loss_sum

        # Return negative loss because fitness is expected
        return -cum_correct
def evaluate_individ_obj_function(genome, generation):
    """
    The function to evaluate individual objective function
    Arguments:
        genome:     The objective function genome
        generation: The current generation of evolution
    Returns:
        The NoveltyItem created using evaluation results.
    """
    # create NoveltyItem for genome and store it into map
    genome_id = genome.GetID()
    n_item = archive.NoveltyItem(generation=generation, genomeId=genome_id)
    # run the simulation
    multi_net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(multi_net)
    depth = 2
    try:
        genome.CalculateDepth()
        depth = genome.GetDepth()
    except:
        pass
    obj_net = ANN(multi_net, depth=depth)

    # set inputs and get ouputs ([a, b])
    output = obj_net.activate([0.5])

    # store coefficients
    n_item.data.append(output[0])
    n_item.data.append(output[1])

    return n_item
Example #13
0
    def evaluate_individual(self, genome):
        """ Evaluate an individual solution. 

        Args:
            genome: genome of the individual to evaluate

        Returns:
            fitness value of the individual
        """
        global man, current_network, quadruped

        # Initialize the manager to be unique to the process.
        man = ODEManager(near_callback,
                         stepsize=self.dt / self.n,
                         log_data=self.log_frames)

        # Initialize the quadruped
        quadruped = Quadruped(man=man)

        # Load in the ANN from the population
        self.current_network = NEAT.NeuralNetwork()
        if not self.hyperNEAT:
            genome.BuildPhenotype(self.current_network)
        else:
            genome.BuildHyperNEATPhenotype(self.current_network,
                                           self.substrate)

        # Conduct the evaluation
        fit = self.physics_only_simulation()
        #print(genome.GetID(), fit)

        return fit, len(self.current_network.neurons), len(
            self.current_network.connections)
Example #14
0
def simulate_genome(genome, traits, params, export_folder=None, verbose=False):
    network = NEAT.NeuralNetwork()
    genome.BuildPhenotype(network)

    genome.CalculateDepth()
    depth = genome.GetDepth()
    return simulate_network(network, depth, traits, params, export_folder, verbose)
Example #15
0
def evaluate(genome):
    multi_net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(multi_net)

    multi_net.Flush()
    fitness = cart.eval_fitness(net=ANNWrapper(multi_net))
    return fitness
Example #16
0
        def run_network(genome, cur_data, cur_label):
            net = NEAT.NeuralNetwork()
            genome.BuildPhenotype(net)
            try:
                actual_results = []
                results = []
                for example in cur_data:
                    net.Flush()
                    net.Input(example)
                    for _ in range(3):
                        net.Activate()

                    actual_result = net.Output()
                    result = softmax(actual_result)
                    # print "NEW"
                    # for element in actual_result:
                    #     print element
                    # print result
                    actual_results.append([actual_result[0], actual_result[1]])
                    results.append(result)

                predictions = [np.argmax(result) for result in results]

                acc = accuracy_score(cur_label, predictions)
                cost = log_loss(cur_label, results)
            except:
                print actual_results
                print results
            return acc, cost
Example #17
0
    def produce_output_sound(self, that_individual):
        output_filename = '{0}.cross_adapted.{1}.wav'.format(
            self.input_sound.filename, that_individual.get_id())

        # this creates a neural network (phenotype) from the genome
        net = NEAT.NeuralNetwork()
        that_individual.genotype.BuildPhenotype(net)

        output_vectors = []
        for input_vector in self.neural_input_vectors:
            net.Flush()
            net.Input(input_vector)
            net.Activate()
            output = net.Output()
            output = [min(1.0, max(0.0, x)) for x in output]
            output_vectors.append(output)

        that_individual.set_neural_output(zip(*output_vectors))

        process, resulting_sound, csd_path = self.cross_adapt(
            parameter_vectors=output_vectors,
            effect=self.effect,
            output_filename=output_filename)

        return process, resulting_sound, csd_path
Example #18
0
def getbest():
    g = NEAT.Genome(0, substrate.GetMinCPPNInputs(), 0,
                    substrate.GetMinCPPNOutputs(), False,
                    NEAT.ActivationFunction.SIGNED_GAUSS,
                    NEAT.ActivationFunction.SIGNED_GAUSS, 0, params)

    pop = NEAT.Population(g, params, True, 1.0)

    for generation in range(1000):
        genome_list = NEAT.GetGenomeList(pop)
        #    fitnesses = NEAT.EvaluateGenomeList_Parallel(genome_list, evaluate)
        fitnesses = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                   evaluate,
                                                   display=False)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        best = max([x.GetLeader().GetFitness() for x in pop.Species])
        #        print 'Best fitness:', best

        # test
        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net)
        cv2.imshow("CPPN", img)

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildHyperNEATPhenotype(net, substrate)
        img = np.zeros((250, 250, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 250, 250), net, substrate=True)
        cv2.imshow("NN", img)

        cv2.waitKey(1)

        pop.Epoch()
        #        print "Generation:", generation
        generations = generation
        if best > 15.5:
            break

    return generations
def evaluate(genome):
    multi_net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(multi_net)

    multi_net.Flush()
    fitness = cart.eval_fitness(net=ANNWrapper(multi_net), 
                                action_evaluator=cart.two_ouputs_action_evaluator)# tanh_action_evaluator)
    return fitness
Example #20
0
def build_network_single(genome, method='neat', substrate=None, **kwargs):
    net = neat.NeuralNetwork()
    if method in ['neat', 'gdneat']:
        genome.BuildPhenotype(net)
    elif method == 'hyperneat':
        genome.BuildHyperNEATPhenotype(net, substrate)
    else:
        raise ValueError('Invalid method: {}'.format(method))
    return net
def getbest():
    g = NEAT.Genome(0, 7, 1, False, NEAT.ActivationFunction.SIGNED_SIGMOID,
                    NEAT.ActivationFunction.SIGNED_SIGMOID, params)

    pop = NEAT.Population(g, params, True, 1.0)

    for generation in range(2000):

        genome_list = NEAT.GetGenomeList(pop)
        #    fitnesses = NEAT.EvaluateGenomeList_Parallel(genome_list, evaluate)
        fitnesses = NEAT.EvaluateGenomeList_Serial(genome_list,
                                                   evaluate_xor,
                                                   display=True)
        [
            genome.SetFitness(fitness)
            for genome, fitness in zip(genome_list, fitnesses)
        ]

        best = max([x.GetLeader().GetFitness() for x in pop.Species])

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().BuildPhenotype(net)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10
        NEAT.DrawPhenotype(img, (0, 0, 500, 500), net)
        cv2.imshow("CPPN", img)

        net = NEAT.NeuralNetwork()
        pop.Species[0].GetLeader().Build_ES_Phenotype(net, substrate, params)
        img = np.zeros((500, 500, 3), dtype=np.uint8)
        img += 10

        utilities.DrawPhenotype(img, (0, 0, 500, 500), net, substrate=True)
        cv2.imshow("NN", img)
        cv2.waitKey(1)

        generations = generation

        if best > 15.0:
            break

        pop.Epoch()

    return generations
Example #22
0
def evaluate_genome(env, genome, trials):
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)
    avg_reward = 0
    for trial in range(trials):
        f = do_trial(env, net, False)
        avg_reward += f
    avg_reward /= trials
    fitness = 20 + avg_reward
    return fitness
Example #23
0
def Draw(x, size=(300, 300)):
    img = np.zeros((size[0], size[1], 3), dtype=np.uint8)
    img += 10

    if isinstance(x, mneat.NeuralNetwork):
        DrawPhenotype(img, (0, 0, 250, 250), x)
    else:
        nn = mneat.NeuralNetwork()
        x.BuildPhenotype(nn)
        DrawPhenotype(img, (0, 0, 250, 250), nn)

    return img
Example #24
0
    def build_phenotype(self, current_genome):
        """
        Constructs an agent phenotype from its genotype.
        
        :param current_genome: agent genome.
        :return: agent phenotype network.
        """

        net = mneat.NeuralNetwork()
        current_genome.BuildPhenotype(net)

        return net
Example #25
0
def evaluate(genome):
    # create a neural network for the genome
    net = NEAT.NeuralNetwork()
    genome.BuildESHyperNEATPhenotype(net, substrate, params)

    # create a board
    board = Board(width, height)
    canAddPiece = 1
    current = 0
    line = 0
    maxPieces = 300

    # while the game is not over
    while (current < maxPieces):
        # get a piece
        pieceNum = random.randint(0, 6)
        piece = Piece(pieceNum)

        # pass board configuation & piece to the neural network
        conf = board.getBoard().ravel()
        c2, c3, c4, c5 = [piece.getPieceArray(i).ravel() for i in range(4)]
        conf = np.concatenate([conf, c2, c3, c4, c5])

        #conf = [tuple(row) for row in conf]
        np.append(conf, pieceNum)
        np.append(conf, 1.0)  # bias

        net.Input(conf)
        net.Activate()
        output = net.Output()

        if PRINT:
            print "output:", output[0], "outout", output[1]
        ## Found bug, output isnt always between 0 and 1
        col = int(sum(output[0:10]))
        rot = int(sum(output[10:14]))
        print col, rot
        ##print "output1:", output[0], "output2:", output[1]

        # update the board by the output we get
        ##print "rot: ", rot, "col: ", col
        canAddPiece = board.addPiece(piece, rot, col)
        line = line + board.clearRows()
        if canAddPiece == -1:
            #print board
            break
        current += 1

    # evaluate fitness
    #return line
    print "Rows Cleared:", line, " Pieces: ", current
    return current
Example #26
0
def evaluate(genome):
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)
    net.Flush()
    ge = GpuExec()

    full_input = np.array([1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1], dtype=np.float32)
    out = ge.eval(full_input, 3, 4, net)
    # print(out)

    targets = [1, 1, 0, 0]
    err = np.abs(out - targets)
    return (4 - np.sum(err)) ** 2
Example #27
0
def get_signals(genome, df):
    # this creates a neural network (phenotype) from the genome
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    signals = []
    for index, row in df.iterrows():
        row_list = list(row)
        row_list.pop(0)
        net.Input(row_list)
        net.Activate()
        signals.append(net.Output()[0])
    return signals
Example #28
0
def evaluate(genome):
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)

    avg_reward = 0

    for trial in range(trials):
        avg_reward += do_trial(net)

    avg_reward /= trials

    #print(avg_reward)
    return 1000000 + avg_reward
Example #29
0
def simGeneration():
	genomeList = NEAT.GetGenomeList(pop)

	#make graph
	graph = mp.Map(20, 20)

	for g in genomeList:
		net = NEAT.NeuralNetwork()
		g.BuildPhenotype(net)
		fitness = evaluate(net, graph)
		g.SetFitness(fitness)

	pop.Epoch()
Example #30
0
def evaluate_xor(genome):
    net = NEAT.NeuralNetwork()
    genome.BuildHyperNEATPhenotype(net, substrate)
    # nn=genome.BuildHyperNEATPhenotype(net, substrate)
    # error = 0
    # depth = 5

    # do stuff and return the fitness
    # net.Flush()
    net = NEAT.NeuralNetwork()
    genome.BuildPhenotype(net)
    img = np.zeros((400, 400, 3), dtype=np.uint8)
    img += 10
    NEAT.DrawPhenotype(img, (0, 0, 400, 400), net)
    cv2.imshow("CPPN", img)
    # Visualize best network's Pheotype
    net = NEAT.NeuralNetwork()
    genome.BuildESHyperNEATPhenotype(net, substrate, params)
    img = np.zeros((800, 800, 3), dtype=np.uint8)
    img += 10

    NEAT.DrawPhenotype(img, (0, 0, 800, 800), net, substrate=True)
    cv2.imshow("NN", img)
    cv2.waitKey(33)

    subprocess.call('./autostart.sh',shell=True)
    # print("b")
    # time.sleep(1)
    # print("c")
    main(MyDriver(net=net))
    # print("d")
    with open("mydata.txt",'r') as f:
        fitt=f.read()
    # os.system('pkill torcs')
    subprocess.call('./autostop.sh',shell=True)
    # print("fitness *******************   ",fitt)
    return float(fitt)