Exemple #1
0
def init_island():
    np.random.seed(15)
    x = init_x_vals(START, STOP, NUM_POINTS)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea_algorithm = AgeFitnessEA(evaluator, agraph_generator, crossover,
                                mutation, MUTATION_PROBABILITY,
                                CROSSOVER_PROBABILITY, POP_SIZE)

    island = Island(ea_algorithm, agraph_generator, POP_SIZE)
    return island
Exemple #2
0
def execute_generational_steps():
    x = init_x_vals(-10, 10, 100)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover,
                      mutation, 0.4, 0.4, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)
    archipelago = SerialArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(max_generations=500,
                                                      fitness_threshold=1.0e-4)
    if opt_result.success:
        print(archipelago.get_best_individual().get_latex_string())
    else:
        print("Failed.")
Exemple #3
0
def training_function(training_data, ea_choice):
    component_generator = \
        ComponentGenerator(input_x_dimension=training_data.x.shape[1])
    component_generator.add_operator("+")
    component_generator.add_operator("-")
    component_generator.add_operator("*")

    agraph_generator = AGraphGenerator(agraph_size=32,
                                       component_generator=component_generator)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    POPULATION_SIZE = 64
    MUTATION_PROBABILITY = 0.1
    CROSSOVER_PROBABILITY = 0.7

    if ea_choice == "age_fitness":
        ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation,
                          MUTATION_PROBABILITY, CROSSOVER_PROBABILITY,
                          POPULATION_SIZE)
    else:
        ea = DeterministicCrowdingEA(evaluator, crossover, mutation,
                                     MUTATION_PROBABILITY,
                                     CROSSOVER_PROBABILITY)

    island = Island(ea, agraph_generator, POPULATION_SIZE)
    opt_result = island.evolve_until_convergence(
        max_generations=MAX_GENERATIONS, fitness_threshold=1e-6)

    return island.get_best_individual(), opt_result
def test_optimize_params(opt_individual, reg_individual, algorithm):
    fitness_function = MultipleFloatValueFitnessFunction()
    local_opt_fitness_function = ContinuousLocalOptimization(
        fitness_function, algorithm)
    opt_indv_fitness = local_opt_fitness_function(opt_individual)
    reg_indv_fitness = local_opt_fitness_function(reg_individual)
    assert opt_indv_fitness == pytest.approx(np.sqrt(NUM_VALS - NUM_OPT),
                                             rel=5.e-6)
    assert reg_indv_fitness == pytest.approx(np.sqrt(NUM_VALS))
def test_optimize_fitness_vector(opt_individual, reg_individual, algorithm):
    reg_list = [1. for _ in range(NUM_VALS)]
    opt_list = [1. for _ in range(NUM_VALS)]
    opt_list[:3] = [0., 0., 0.]
    fitness_function = FloatVectorFitnessFunction()
    local_opt_fitness_function = ContinuousLocalOptimization(
        fitness_function, algorithm)
    opt_indv_fitness = local_opt_fitness_function(opt_individual)
    reg_indv_fitness = local_opt_fitness_function(reg_individual)
    assert opt_indv_fitness == pytest.approx(np.mean(opt_list))
    assert reg_indv_fitness == pytest.approx(np.mean(reg_list))
def execute_generational_steps():
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    data = pd.read_csv('./data/fp_2var_test9_py.csv').as_matrix()
    x = data[:, 0:2]
    y = data[:2]

    if rank == 0:
        x = init_x_vals(-10, 10, 100)
        y = equation_eval(x)

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)
    component_generator.add_operator(5)
    component_generator.add_operator(6)
    component_generator.add_operator(7)
    component_generator.add_operator(10)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data,
                                 metric='root mean squared error')
    local_opt_fitness = ContinuousLocalOptimization(fitness,
                                                    algorithm='L-BFGS-B')
    evaluator = Evaluation(local_opt_fitness)

    #ea = AgeFitnessEA(evaluator, agraph_generator, crossover,
    #          mutation, 0.4, 0.4, POP_SIZE)

    ea = DeterministicCrowdingEA(evaluator, crossover, mutation, 0.4, 0.4)
    island = Island(ea, agraph_generator, POP_SIZE)

    archipelago = ParallelArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(
        MAX_GENERATIONS,
        fitness_threshold=FITNESS_THRESHOLD,
        min_generations=MIN_GENERATIONS,
        stagnation_generations=STAGNATION_LIMIT)
    if opt_result.success:
        if rank == 0:
            print("best: ", archipelago.get_best_individual())
Exemple #7
0
def main():
    crossover = SinglePointCrossover()
    mutation = SinglePointMutation(get_random_float)
    selection = Tournament(10)
    fitness = ZeroMinFitnessFunction()
    local_opt_fitness = ContinuousLocalOptimization(fitness)
    evaluator = Evaluation(local_opt_fitness)
    ea = MuPlusLambda(evaluator, selection, crossover, mutation, 0.4, 0.4, 20)
    generator = MultipleFloatChromosomeGenerator(get_random_float, 8)
    island = Island(ea, generator, 25)

    island.evolve(1)
    report_max_min_mean_fitness(island)
    island.evolve(500)
    report_max_min_mean_fitness(island)
Exemple #8
0
def explicit_regression_benchmark():
    np.random.seed(15)
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    x = None
    y = None

    if rank == 0:
        x = init_x_vals(-10, 10, 100)
        y = equation_eval(x)

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation, 0.4,
                      0.4, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)

    archipelago = ParallelArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(max_generations=500,
                                                      fitness_threshold=1.0e-4)

    if rank == 0:
        if opt_result.success:
            print("print the best indv", archipelago.get_best_individual())
        else:
            print("Failed.")
def test_valid_fitness_function():
    fitness_function = MultipleFloatValueFitnessFunction()
    with pytest.raises(TypeError):
        ContinuousLocalOptimization(fitness_function, algorithm='lm')
def test_set_training_data_pass_through():
    fitness_function = MultipleFloatValueFitnessFunction()
    local_opt_fitness_function = \
        ContinuousLocalOptimization(fitness_function, "Powell")
    local_opt_fitness_function.training_data = 123
    assert fitness_function.training_data == 123
def test_set_eval_count_pass_through():
    fitness_function = MultipleFloatValueFitnessFunction()
    local_opt_fitness_function = \
        ContinuousLocalOptimization(fitness_function, "Powell")
    local_opt_fitness_function.eval_count = 123
    assert fitness_function.eval_count == 123
def test_not_valid_algorithm():
    fitness_function = MultipleFloatValueFitnessFunction()
    with pytest.raises(KeyError):
        ContinuousLocalOptimization(fitness_function,
                                    algorithm='Dwayne - The Rock - Johnson')
Exemple #13
0
from benchmark_data import StatsPrinter, \
                           generate_random_individuals, \
                           copy_to_cpp, \
                           TEST_EXPLICIT_REGRESSION, \
                           TEST_EXPLICIT_REGRESSION_CPP, \
                           TEST_IMPLICIT_REGRESSION, \
                           TEST_IMPLICIT_REGRESSION_CPP, \
                           CLO_TIMING_NUMBER, \
                           CLO_TIMING_REPEATS, \
                           NUM_AGRAPHS_INDVS

import benchmark_data as benchmark_data


TEST_EXPLICIT_REGRESSION_OPTIMIZATION \
    = ContinuousLocalOptimization(TEST_EXPLICIT_REGRESSION)
TEST_IMPLICIT_REGRESSION_OPTIMIZATION \
    = ContinuousLocalOptimization(TEST_IMPLICIT_REGRESSION)
TEST_EXPLICIT_REGRESSION_OPTIMIZATION_CPP \
    = ContinuousLocalOptimization(TEST_EXPLICIT_REGRESSION_CPP)
TEST_IMPLICIT_REGRESSION_OPTIMIZATION_CPP \
    = ContinuousLocalOptimization(TEST_IMPLICIT_REGRESSION_CPP)

TEST_ITERATION = 0
DEBUG = False
TEST_AGRAPHS = generate_random_individuals(benchmark_data.NUM_AGRAPHS_INDVS,
                                           benchmark_data.COMMAND_ARRAY_SIZE,
                                           True)
TEST_AGRAPHS_CPP = copy_to_cpp(TEST_AGRAPHS)
BENCHMARK_LISTS = []
BENCHMARK_LISTS_CPP = []
def execute_generational_steps():
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    x = None
    y = None

    if rank == 0:

        df = pd.read_csv('data/combined_clean_data.csv')
        df = df.dropna()

        train, test = train_test_split(df, test_size=0.2, random_state=42)

        columns = df.columns
        x = train.loc[:, ~columns.str.contains('Damage')]
        x = x.loc[:, x.columns != 'Time']
        x = x.loc[:, x.columns != 'Machine'].values

        y = train.loc[:, columns.str.contains('Damage')]
        y = y.iloc[:, 0].values.reshape((-1, 1))

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)  # +
    component_generator.add_operator(3)  # -
    component_generator.add_operator(4)  # *
    component_generator.add_operator(5)  # /
    #    component_generator.add_operator(6) # sin
    #    component_generator.add_operator(7) # cos
    #    component_generator.add_operator(8) # exponential
    #    component_generator.add_operator(10) # power
    #    component_generator.add_operator(12) # sqrt

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data,
                                 metric='mean squared error')
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = DeterministicCrowdingEA(evaluator, crossover, mutation,
                                 CROSSOVER_PROBABILITY, MUTATION_PROBABILITY)

    island = FitnessPredictorIsland(ea,
                                    agraph_generator,
                                    POP_SIZE,
                                    predictor_size_ratio=0.2)

    pareto_front = ParetoFront(secondary_key=lambda ag: ag.get_complexity(),
                               similarity_function=agraph_similarity)

    archipelago = ParallelArchipelago(island, hall_of_fame=pareto_front)

    optim_result = archipelago.evolve_until_convergence(
        MAX_GENERATIONS,
        FITNESS_THRESHOLD,
        convergence_check_frequency=CHECK_FREQUENCY,
        min_generations=MIN_GENERATIONS,
        checkpoint_base_name='checkpoint',
        num_checkpoints=2)

    if optim_result.success:
        if rank == 0:
            print("best: ", archipelago.get_best_individual())

    if rank == 0:
        print(optim_result)
        print("Generation: ", archipelago.generational_age)
        print_pareto_front(pareto_front)
        plot_pareto_front(pareto_front)