Beispiel #1
0
def evolve_architecture(config: Configuration, population: [Module] = None):
    update_status("Creating initial population")

    # initializing population
    if not population:
        population_size = config.population_size
        if config.optimize_architectures:
            config.population_size = config.population_size * 10
            print(
                f"Optimizing the architecture of {config.population_size} individuals. Only {population_size} will be kept."
            )

        population = init_population(individs=config.population_size,
                                     in_shape=config.input_format,
                                     network_min_layers=config.min_size,
                                     network_max_layers=config.max_size)
        if config.optimize_architectures:
            from src.ea_nas.evolutionary_operations.optimize_architecture import optimize
            population = optimize(population,
                                  selection.tournament,
                                  steps=30,
                                  config=config)
            config.population_size = population_size
            population = [
                x for x in population
                if len(x.children) <= config.max_size * 1.10
            ]
            population = population[-config.population_size:]

        del population_size
        # Training initial population:
        population = workers.start(population, config)

    population.sort(key=weighted_overfit_score(config), reverse=True)
    upload_population(population)
    generation_finished(population, config,
                        f"--> Initialization complete. Leaderboards:")
    best = population[-1]

    # Running EA algorithm:
    for generation in range(config.generations):
        config.generation = generation

        # Preparation:
        print("\nGeneration", generation)
        builtins.generation = generation
        offsprings = []

        # Mutation:
        print("--> Mutations:")
        update_status("Mutating")
        stats = {"init": 0, "single": 0, "multi": 0}
        for i in range(config.population_size):
            draw = random.uniform(0, 1)
            mutated = None
            if draw < 0.9:
                mutations = 1 if random.uniform(
                    0, 1) < .90 else random.randint(1, 3)
                for x in range(mutations):
                    mutated = mutate(best, make_copy=(x == mutations - 1))
                if mutations == 1: stats['single'] += 1
                if mutations >= 2: stats['multi'] += 1
            else:
                mutated = init_population(1, config.input_format, 3, 30)[0]
                stats['init'] += 1
            if mutated:
                offsprings += [mutated]
        print(f"\n  Single mutation:     {stats['single']}"
              f"\n  Multiple mutations:  {stats['multi']}"
              f"\n  Spawned individuals: {stats['init']}")

        # Training networks:
        offsprings = list(set(offsprings))  # Preventing inbreeding

        # Elitism:
        population = [best] + offsprings
        population = workers.start(population, config)
        population.sort(key=weighted_overfit_score(config), reverse=True)
        best, removed = population[-1], population[:-1]

        config.results.store_generation([best], generation)

        # User feedback:
        upload_population(population)
        generation_finished([best], config,
                            f"--> Generation {generation} Leaderboards:")
        generation_finished(
            removed, config,
            "--> The following individs were removed by elitism:")

        # Checking for a satisfactory solution
        # if any(ind.test_acc() > config.training.acceptable_scores - 0.10 for ind in population):
        #     population, solved = try_finish(population, config, moo)
        #     if solved:
        #         return population
    return population
Beispiel #2
0
def main(config: Configuration):
    # 0.1 How many nets can be trained for each generation?
    solved = False
    compute_capacity = config.compute_capacity()

    # 0.2 Initializing multi objective optimisation sorting:
    moo_objectives = moo.classification_objectives(config)
    domination_operator = moo.classification_domination_operator(
        moo.classification_objectives(config))

    patterns, nets = initialize_population(config, compute_capacity)

    i = 0
    while not solved:

        # 3. Evolve for <x> generations:
        for generation in range(config.generations * i,
                                config.generations * (i + 1)):
            config.generation = generation

            # 3.1 Select some patterns for mutation. Tournament
            selected = selection.tournament(patterns,
                                            size=int(len(patterns) / 2))

            # 3.2 Perform Mutations + Crossover on selected patterns
            mutations, crossovers = selection.divide(selected)
            patterns = patterns + \
                       mutator.apply(mutations) + \
                       crossover.apply(crossovers)

            # 3.3 Evaluate new patterns. Fitness calculation
            nets = recombine.combine(patterns,
                                     compute_capacity,
                                     config.min_size,
                                     config.max_size,
                                     include_optimal=True)
            nets = workers.start(nets, config)
            patterns = evaluation.inherit_results(patterns, nets)

            # 3.4 Rank all patterns using MOO. Diversity in position, 2D vs 1D, scores ++
            patterns = nsga_ii(patterns, moo_objectives, domination_operator,
                               config)

            # 3.5 Evolution of the fittest. Elitism
            patterns = patterns[-config.population_size:]

            # 3.6 Feedback:
            print(f"--> Generation {generation} Leaderboards")
            generation_finished(patterns, config, "    - Patterns:")
            generation_finished(nets, config, "    - Neural networks:")
            config.results.store_generation(patterns, generation)
            config.results.store_generation(nets, generation)

        # To finish up, the best combination of patterns needs to be returned and trained for
        # much longer than what they are during fitness evaluation. The previous steps should only
        # be used for verifying that the combination of layers is good.
        #
        # This might need to be tried multiple times. When a good result is gotten, the algorithm should
        # stop and return the final structure with trained weights.

        print("Testing best combination of patterns")

        # Changing settings of training steps:
        original_training_settings = copy.deepcopy(config.training)
        config.training.use_restart = False
        config.training.fixed_epochs = True
        config.training.epochs = 300

        # Finding the best combined network:@
        config.type = "ea-nas"
        nets.sort(key=weighted_overfit_score(config), reverse=True)
        config.type = "PatternNets"
        best_net = nets[-1]

        # Performing training step:
        best_net = workers.start([best_net], config)[0]

        # Reset settings and return:
        config.training = original_training_settings

        if best_net.test_acc() >= config.training.acceptable_scores:
            print("Found good network! ")
            solved = True

        config.type = "ea-nas"
        generation_finished([best_net], config, "--> Found final solution:")
        config.type = "PatternNets"
        patterns = evaluation.inherit_results(patterns, nets)
        i += 1
Beispiel #3
0
def evolve_architecture(selection: callable,
                        config: Configuration,
                        population: [Module] = None):
    update_status("Creating initial population")

    # initializing population
    if not population:
        population = init_population(individs=config.population_size,
                                     in_shape=config.input_format,
                                     network_min_layers=config.min_size,
                                     network_max_layers=config.max_size)
        # Training initial population:
        population = workers.start(population, config)

    population = nsga_ii(population, moo.objectives(config),
                         moo.domination_operator(moo.objectives(config)),
                         config)
    upload_population(population)
    generation_finished(population, config,
                        f"--> Initialization complete. Leaderboards:")

    # Running EA algorithm:
    for generation in range(config.generations):
        config.generation = generation

        # Preparation:
        print("\nGeneration", generation)

        # Mutation:
        print("--> Mutations:")
        update_status("Mutating")
        children = mutation(population, selection, config)

        # Training networks:
        population += children
        population = workers.start(population, config)

        # Sorting
        population = nsga_ii(population, moo.objectives(config),
                             moo.domination_operator(moo.objectives(config)),
                             config)

        # Elitism:
        keep = len(population) - config.population_size
        if len(population[keep:]) >= config.population_size:
            population, removed = population[keep:], population[:keep]
        generation_finished(
            removed, config,
            "--> The following individs were removed by elitism:")
        generation_finished(population, config,
                            f"--> Generation {generation} Leaderboards:")

        # User feedback:
        upload_population(population)
        config.results.store_generation(population, generation)

        # Checking for a satisfactory solution
        # if any(ind.test_acc() > config.training.acceptable_scores - 0.10 for ind in population):
        #     population, solved = try_finish(population, config, moo)
        #     if solved:
        #         return population
    return population