def test_raises_error_invalid_mutation_probability(prob, expected_error,
                                                   prob_index,
                                                   sample_component_generator):
    input_probabilities = [0.25] * 4
    input_probabilities[prob_index] = prob
    with pytest.raises(expected_error):
        _ = AGraphMutation(sample_component_generator, *input_probabilities)
Exemplo n.º 2
0
def execute_generational_steps():
    x = init_x_vals(-10, 10, 100)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover,
                      mutation, 0.4, 0.4, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)
    archipelago = SerialArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(max_generations=500,
                                                      fitness_threshold=1.0e-4)
    if opt_result.success:
        print(archipelago.get_best_individual().get_latex_string())
    else:
        print("Failed.")
def test_mutation_resets_fitness(mutation_parent, sample_component_generator):
    assert mutation_parent.fit_set

    mutation = AGraphMutation(sample_component_generator)
    child = mutation(mutation_parent)
    assert not child.fit_set
    assert child.fitness is None
Exemplo n.º 4
0
def init_island():
    np.random.seed(15)
    x = init_x_vals(START, STOP, NUM_POINTS)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea_algorithm = AgeFitnessEA(evaluator, agraph_generator, crossover,
                                mutation, MUTATION_PROBABILITY,
                                CROSSOVER_PROBABILITY, POP_SIZE)

    island = Island(ea_algorithm, agraph_generator, POP_SIZE)
    return island
Exemplo n.º 5
0
def training_function(training_data, ea_choice):
    component_generator = \
        ComponentGenerator(input_x_dimension=training_data.x.shape[1])
    component_generator.add_operator("+")
    component_generator.add_operator("-")
    component_generator.add_operator("*")

    agraph_generator = AGraphGenerator(agraph_size=32,
                                       component_generator=component_generator)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    POPULATION_SIZE = 64
    MUTATION_PROBABILITY = 0.1
    CROSSOVER_PROBABILITY = 0.7

    if ea_choice == "age_fitness":
        ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation,
                          MUTATION_PROBABILITY, CROSSOVER_PROBABILITY,
                          POPULATION_SIZE)
    else:
        ea = DeterministicCrowdingEA(evaluator, crossover, mutation,
                                     MUTATION_PROBABILITY,
                                     CROSSOVER_PROBABILITY)

    island = Island(ea, agraph_generator, POPULATION_SIZE)
    opt_result = island.evolve_until_convergence(
        max_generations=MAX_GENERATIONS, fitness_threshold=1e-6)

    return island.get_best_individual(), opt_result
def test_param_mutation_constant_graph(constant_only_agraph, manual_constants):
    np.random.seed(10)
    comp_generator = ComponentGenerator(
        input_x_dimension=2,
        num_initial_load_statements=2,
        terminal_probability=1.0,
        constant_probability=1.0,
        automatic_constant_optimization=not manual_constants)
    mutation = AGraphMutation(comp_generator,
                              command_probability=0.0,
                              node_probability=0.0,
                              parameter_probability=1.0,
                              prune_probability=0.0)

    child = mutation(constant_only_agraph)
    p_stack = constant_only_agraph.command_array
    c_stack = child.command_array
    np.testing.assert_array_equal(p_stack, c_stack)

    if manual_constants:
        _assert_arrays_not_almost_equal(child.constants,
                                        constant_only_agraph.constants)
    else:
        np.testing.assert_array_almost_equal(child.constants,
                                             constant_only_agraph.constants)
def init_island():
    np.random.seed(10)
    x = init_x_vals(START, STOP, NUM_POINTS)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(
        x.shape[1],
        automatic_constant_optimization=False,
        numerical_constant_range=10)
    component_generator.add_operator("+")
    component_generator.add_operator("-")
    component_generator.add_operator("*")

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    evaluator = Evaluation(fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation,
                      MUTATION_PROBABILITY, CROSSOVER_PROBABILITY, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)
    return island
Exemplo n.º 8
0
def execute_generational_steps():
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    data = pd.read_csv('./data/fp_2var_test9_py.csv').as_matrix()
    x = data[:, 0:2]
    y = data[:2]

    if rank == 0:
        x = init_x_vals(-10, 10, 100)
        y = equation_eval(x)

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)
    component_generator.add_operator(5)
    component_generator.add_operator(6)
    component_generator.add_operator(7)
    component_generator.add_operator(10)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data,
                                 metric='root mean squared error')
    local_opt_fitness = ContinuousLocalOptimization(fitness,
                                                    algorithm='L-BFGS-B')
    evaluator = Evaluation(local_opt_fitness)

    #ea = AgeFitnessEA(evaluator, agraph_generator, crossover,
    #          mutation, 0.4, 0.4, POP_SIZE)

    ea = DeterministicCrowdingEA(evaluator, crossover, mutation, 0.4, 0.4)
    island = Island(ea, agraph_generator, POP_SIZE)

    archipelago = ParallelArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(
        MAX_GENERATIONS,
        fitness_threshold=FITNESS_THRESHOLD,
        min_generations=MIN_GENERATIONS,
        stagnation_generations=STAGNATION_LIMIT)
    if opt_result.success:
        if rank == 0:
            print("best: ", archipelago.get_best_individual())
def test_pruning_mutation_on_unprunable_agraph(terminal_only_agraph,
                                               sample_component_generator):
    np.random.seed(10)
    mutation = AGraphMutation(sample_component_generator,
                              command_probability=0.0,
                              node_probability=0.0,
                              parameter_probability=0.0,
                              prune_probability=1.0)
    for _ in range(5):
        child = mutation(terminal_only_agraph)
        p_stack = terminal_only_agraph.command_array
        c_stack = child.command_array
        np.testing.assert_array_equal(p_stack, c_stack)
Exemplo n.º 10
0
def test_mutation_of_parameters(mutation_parent, sample_component_generator,
                                algo_index):
    np.random.seed(0)
    input_probabilities = [0.0] * 4
    input_probabilities[algo_index] = 1.0
    mutation = AGraphMutation(sample_component_generator, *input_probabilities)

    for _ in range(5):
        child = mutation(mutation_parent)
        p_stack = mutation_parent.command_array
        c_stack = child.command_array
        changed_columns = np.sum(p_stack != c_stack, axis=0)

        assert sum(changed_columns[1:]) > 0
Exemplo n.º 11
0
def test_multiple_manual_constsnt_mutations_for_consistency():
    np.random.seed(0)
    test_graph = AGraph(manual_constants=True)
    test_graph.command_array = np.array([[1, -1, -1], [1, -1, -1], [1, -1, -1],
                                         [1, 0, 0]])
    test_graph.set_local_optimization_params([
        1.0,
    ])
    comp_generator = ComponentGenerator(input_x_dimension=2,
                                        automatic_constant_optimization=False)
    comp_generator.add_operator(2)
    mutation = AGraphMutation(comp_generator)
    for _ in range(20):
        test_graph = mutation(test_graph)
        assert test_graph.num_constants == len(test_graph.constants)
Exemplo n.º 12
0
def test_mutation_of_nodes(mutation_parent, sample_component_generator,
                           algo_index, expected_node_mutation):
    np.random.seed(0)
    input_probabilities = [0.0] * 4
    input_probabilities[algo_index] = 1.0
    mutation = AGraphMutation(sample_component_generator, *input_probabilities)

    for _ in range(5):
        child = mutation(mutation_parent)
        p_stack = mutation_parent.command_array
        c_stack = child.command_array
        changed_columns = np.sum(p_stack != c_stack, axis=0)

        if expected_node_mutation:
            assert changed_columns[0] == 1
        else:
            assert changed_columns[0] == 0
Exemplo n.º 13
0
def test_new_manual_constants_added(terminal_only_agraph, command_prob,
                                    node_prob):
    np.random.seed(0)
    comp_generator = ComponentGenerator(input_x_dimension=2,
                                        num_initial_load_statements=2,
                                        terminal_probability=1.0,
                                        constant_probability=1.0,
                                        automatic_constant_optimization=False)
    mutation = AGraphMutation(comp_generator,
                              command_probability=command_prob,
                              node_probability=node_prob,
                              parameter_probability=0.0,
                              prune_probability=0.0)
    child = mutation(terminal_only_agraph)

    assert child.num_constants == 1
    assert len(child.constants) == 1
Exemplo n.º 14
0
def explicit_regression_benchmark():
    np.random.seed(15)
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    x = None
    y = None

    if rank == 0:
        x = init_x_vals(-10, 10, 100)
        y = equation_eval(x)

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation, 0.4,
                      0.4, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)

    archipelago = ParallelArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(max_generations=500,
                                                      fitness_threshold=1.0e-4)

    if rank == 0:
        if opt_result.success:
            print("print the best indv", archipelago.get_best_individual())
        else:
            print("Failed.")
Exemplo n.º 15
0
def test_single_point_mutations(mutation_parent, algo_index,
                                sample_component_generator):
    np.random.seed(0)
    input_probabilities = [0.0] * 4
    input_probabilities[algo_index] = 1.0
    mutation = AGraphMutation(sample_component_generator, *input_probabilities)

    for _ in range(5):
        child = mutation(mutation_parent)
        p_stack = mutation_parent.command_array
        c_stack = child.command_array
        changed_commands = 0
        for p, c in zip(p_stack, c_stack):
            if (p != c).any():
                if p[0] != 1 or c[0] != 1:
                    changed_commands += 1
        if changed_commands != 1:
            print("parent\n", p_stack)
            print("child\n", c_stack)
        assert changed_commands == 1
Exemplo n.º 16
0
def test_mutation_creates_valid_parameters(sample_agraph_1):
    comp_generator = ComponentGenerator(input_x_dimension=2,
                                        num_initial_load_statements=2,
                                        terminal_probability=0.4,
                                        constant_probability=0.5)
    for operator in range(2, 13):
        comp_generator.add_operator(operator)
    np.random.seed(0)
    mutation = AGraphMutation(comp_generator,
                              command_probability=0.0,
                              node_probability=0.0,
                              parameter_probability=1.0,
                              prune_probability=0.0)
    for _ in range(20):
        child = mutation(sample_agraph_1)
        for row, operation in enumerate(child.command_array):
            if not bingo.symbolic_regression.agraph.maps.IS_TERMINAL_MAP[
                    operation[NODE_TYPE]]:
                assert operation[PARAM_1] < row
                assert operation[PARAM_2] < row
Exemplo n.º 17
0
def test_pruning_mutation(mutation_parent, sample_component_generator):
    np.random.seed(10)
    mutation = AGraphMutation(sample_component_generator,
                              command_probability=0.0,
                              node_probability=0.0,
                              parameter_probability=0.0,
                              prune_probability=1.0)
    for _ in range(5):
        child = mutation(mutation_parent)
        p_stack = mutation_parent.command_array
        c_stack = child.command_array
        changes = p_stack != c_stack

        p_changes = p_stack[changes]
        c_changes = c_stack[changes]
        if p_changes.size > 0:
            np.testing.assert_array_equal(
                p_changes, np.full(p_changes.shape, p_changes[0]))
            np.testing.assert_array_equal(
                c_changes, np.full(c_changes.shape, c_changes[0]))
            assert c_changes[0] < p_changes[0]
Exemplo n.º 18
0
def execute_generational_steps():
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    x = None
    y = None

    if rank == 0:

        df = pd.read_csv('data/combined_clean_data.csv')
        df = df.dropna()

        train, test = train_test_split(df, test_size=0.2, random_state=42)

        columns = df.columns
        x = train.loc[:, ~columns.str.contains('Damage')]
        x = x.loc[:, x.columns != 'Time']
        x = x.loc[:, x.columns != 'Machine'].values

        y = train.loc[:, columns.str.contains('Damage')]
        y = y.iloc[:, 0].values.reshape((-1, 1))

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)  # +
    component_generator.add_operator(3)  # -
    component_generator.add_operator(4)  # *
    component_generator.add_operator(5)  # /
    #    component_generator.add_operator(6) # sin
    #    component_generator.add_operator(7) # cos
    #    component_generator.add_operator(8) # exponential
    #    component_generator.add_operator(10) # power
    #    component_generator.add_operator(12) # sqrt

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data,
                                 metric='mean squared error')
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = DeterministicCrowdingEA(evaluator, crossover, mutation,
                                 CROSSOVER_PROBABILITY, MUTATION_PROBABILITY)

    island = FitnessPredictorIsland(ea,
                                    agraph_generator,
                                    POP_SIZE,
                                    predictor_size_ratio=0.2)

    pareto_front = ParetoFront(secondary_key=lambda ag: ag.get_complexity(),
                               similarity_function=agraph_similarity)

    archipelago = ParallelArchipelago(island, hall_of_fame=pareto_front)

    optim_result = archipelago.evolve_until_convergence(
        MAX_GENERATIONS,
        FITNESS_THRESHOLD,
        convergence_check_frequency=CHECK_FREQUENCY,
        min_generations=MIN_GENERATIONS,
        checkpoint_base_name='checkpoint',
        num_checkpoints=2)

    if optim_result.success:
        if rank == 0:
            print("best: ", archipelago.get_best_individual())

    if rank == 0:
        print(optim_result)
        print("Generation: ", archipelago.generational_age)
        print_pareto_front(pareto_front)
        plot_pareto_front(pareto_front)
Exemplo n.º 19
0
def test_mutation_genetic_age(mutation_parent, sample_component_generator):
    mutation = AGraphMutation(sample_component_generator)
    child = mutation(mutation_parent)
    assert child.genetic_age == mutation_parent.genetic_age