Пример #1
0
    def calculate(self, individual, raw_individual):
        predicted = [bool(individual(*x)) for x in self.X_train]

        positive_pred = set(
            [i for i in range(len(predicted)) if predicted[i] == 1])
        negative_pred = set(
            [i for i in range(len(predicted)) if predicted[i] == 0])

        pos_hits = len(self.true_positives & positive_pred)
        neg_hits = len(self.true_negatives - negative_pred)

        fitness = 1000
        size_selected = len(positive_pred)

        ind_size = len(util.genetic_2_predicate(raw_individual).split('and'))
        if size_selected > 0:
            fitness = len(
                self.true_positives
            ) - pos_hits + 2.0 * neg_hits + 0.05 * ind_size + 0.1 * size_selected
            #fitness = 1 - precision_recall_fscore_support(self.y_train, predicted, average='binary')[2] + 0.05*ind_size

        return fitness
Пример #2
0
    def simple_search(self,
                      population_size,
                      crossover_rate,
                      mutation_rate,
                      num_generations,
                      max_gen_without_gain,
                      verbose=True):
        stats = tools.Statistics(lambda ind: ind.fitness.values)
        stats.register("min", np.min)

        pop = self.toolbox.population(n=population_size)
        hof = tools.HallOfFame(1)
        pop, log = algorithms.eaSimple(pop,
                                       self.toolbox,
                                       crossover_rate,
                                       mutation_rate,
                                       num_generations,
                                       stats=stats,
                                       halloffame=hof,
                                       verbose=verbose)

        self.best_solution = hof[0]
        return util.genetic_2_predicate(hof[0])
Пример #3
0
    def search_best_predicate(self,
                              population_size,
                              crossover_rate,
                              mutation_rate,
                              num_generations,
                              max_gen_without_gain,
                              verbose=True):
        population = self.toolbox.population(n=population_size)
        fitnesses = list(map(self.toolbox.evaluate, population))
        for ind, fit in zip(population, fitnesses):
            ind.fitness.values = fit

        past_fitness = []
        hall_of_fame = {'indvidual': None, 'fitness': None}

        if verbose:
            print('##############################')
            print("Generation \t Min")

        for generation in range(1, num_generations):
            # Select next generation
            offspring = self.toolbox.select(population, len(population))
            # Clone the selected individuals
            offspring = list(map(self.toolbox.clone, offspring))
            # Apply crossover and mutation on the offspring
            for child1, child2 in zip(offspring[::2], offspring[1::2]):
                if random.random() < crossover_rate:
                    self.toolbox.mate(child1, child2)
                    del child1.fitness.values
                    del child2.fitness.values

            for mutant in offspring:
                if random.random() < mutation_rate:
                    self.toolbox.mutate(mutant)
                    del mutant.fitness.values

            # Evaluate the individuals with an invalid fitness
            invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
            fitnesses = map(self.toolbox.evaluate, invalid_ind)
            for ind, fit in zip(invalid_ind, fitnesses):
                ind.fitness.values = fit

            # Replace population
            population[:] = offspring

            fits = [ind.fitness.values[0] for ind in population]
            min_fitness = min(fits)

            if hall_of_fame['fitness'] == None or min_fitness < hall_of_fame[
                    'fitness']:
                hall_of_fame['individual'] = population[fits.index(
                    min_fitness)]
                hall_of_fame['fitness'] = min_fitness

            past_fitness.append(min_fitness)

            if verbose:
                print('{0}          \t {1:02.4f}'.format(
                    generation, min_fitness))

            # Stop evolution if fitness reach global optima
            if min_fitness <= 0.0:
                print('Fitness reached zero')
                break
            # Stop evolution if it does not improve
            if generation > max_gen_without_gain and past_fitness[
                    -max_gen_without_gain:] == [
                        min_fitness for x in range(max_gen_without_gain)
                    ]:
                break

        if verbose:
            print('####### Evolution ended #######')

        # Get best solution
        self.best_solution = hall_of_fame['individual']
        return util.genetic_2_predicate(hall_of_fame['individual'])
Пример #4
0
 def moop_calculate_genetic_fitness(self, genetic_solution):
     query = util.classification_rule_2_sql(
         util.genetic_2_predicate(genetic_solution))
     return self.base_calculate(query, multi_objective=True)
Пример #5
0
 def calculate_genetic_fitness(self, genetic_solution):
     query = util.classification_rule_2_sql(
         util.genetic_2_predicate(genetic_solution))
     return self.base_calculate(query),