Пример #1
0
def test_poorly_shaped_input_y_of_training_data():
    y = np.zeros((5, 3, 3))
    x = y.flatten()
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        with pytest.raises(ValueError):
            _ = ExplicitTrainingData(x, y)
Пример #2
0
def test_reshaping_of_training_data():
    one_dim_input = np.zeros(5)
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        training_data = ExplicitTrainingData(one_dim_input, one_dim_input)
    assert training_data.x.ndim == 2
    assert training_data.y.ndim == 2
Пример #3
0
def execute_generational_steps():
    x = init_x_vals(-10, 10, 100)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover,
                      mutation, 0.4, 0.4, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)
    archipelago = SerialArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(max_generations=500,
                                                      fitness_threshold=1.0e-4)
    if opt_result.success:
        print(archipelago.get_best_individual().get_latex_string())
    else:
        print("Failed.")
def init_island():
    np.random.seed(10)
    x = init_x_vals(START, STOP, NUM_POINTS)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(
        x.shape[1],
        automatic_constant_optimization=False,
        numerical_constant_range=10)
    component_generator.add_operator("+")
    component_generator.add_operator("-")
    component_generator.add_operator("*")

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    evaluator = Evaluation(fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation,
                      MUTATION_PROBABILITY, CROSSOVER_PROBABILITY, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)
    return island
Пример #5
0
def init_island():
    np.random.seed(15)
    x = init_x_vals(START, STOP, NUM_POINTS)
    y = equation_eval(x)
    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea_algorithm = AgeFitnessEA(evaluator, agraph_generator, crossover,
                                mutation, MUTATION_PROBABILITY,
                                CROSSOVER_PROBABILITY, POP_SIZE)

    island = Island(ea_algorithm, agraph_generator, POP_SIZE)
    return island
Пример #6
0
def test_getting_subset_of_training_data(python):
    data_input = np.arange(5).reshape((-1, 1))
    training_data = ExplicitTrainingData(data_input, data_input) \
                    if python \
                    else bingocpp.ExplicitTrainingData(data_input, data_input)
    subset_training_data = training_data[[0, 2, 3]]

    expected_subset = np.array([[0], [2], [3]])
    np.testing.assert_array_equal(subset_training_data.x, expected_subset)
    np.testing.assert_array_equal(subset_training_data.y, expected_subset)
Пример #7
0
def execute_generational_steps():
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    data = pd.read_csv('./data/fp_2var_test9_py.csv').as_matrix()
    x = data[:, 0:2]
    y = data[:2]

    if rank == 0:
        x = init_x_vals(-10, 10, 100)
        y = equation_eval(x)

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)
    component_generator.add_operator(5)
    component_generator.add_operator(6)
    component_generator.add_operator(7)
    component_generator.add_operator(10)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data,
                                 metric='root mean squared error')
    local_opt_fitness = ContinuousLocalOptimization(fitness,
                                                    algorithm='L-BFGS-B')
    evaluator = Evaluation(local_opt_fitness)

    #ea = AgeFitnessEA(evaluator, agraph_generator, crossover,
    #          mutation, 0.4, 0.4, POP_SIZE)

    ea = DeterministicCrowdingEA(evaluator, crossover, mutation, 0.4, 0.4)
    island = Island(ea, agraph_generator, POP_SIZE)

    archipelago = ParallelArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(
        MAX_GENERATIONS,
        fitness_threshold=FITNESS_THRESHOLD,
        min_generations=MIN_GENERATIONS,
        stagnation_generations=STAGNATION_LIMIT)
    if opt_result.success:
        if rank == 0:
            print("best: ", archipelago.get_best_individual())
Пример #8
0
def explicit_regression_benchmark():
    np.random.seed(15)
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    x = None
    y = None

    if rank == 0:
        x = init_x_vals(-10, 10, 100)
        y = equation_eval(x)

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)
    component_generator.add_operator(3)
    component_generator.add_operator(4)

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data)
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation, 0.4,
                      0.4, POP_SIZE)

    island = Island(ea, agraph_generator, POP_SIZE)

    archipelago = ParallelArchipelago(island)

    opt_result = archipelago.evolve_until_convergence(max_generations=500,
                                                      fitness_threshold=1.0e-4)

    if rank == 0:
        if opt_result.success:
            print("print the best indv", archipelago.get_best_individual())
        else:
            print("Failed.")
Пример #9
0
def test_poorly_shaped_input_x_of_training_data():
    x = np.zeros((5, 3, 3))
    y = x.flatten()
    with pytest.raises(ValueError):
        _ = ExplicitTrainingData(x, y)
Пример #10
0
def test_correct_training_data_length(python, input_size):
    data_input = np.arange(input_size).reshape((-1, 1))
    training_data = ExplicitTrainingData(data_input, data_input)
    assert len(training_data) == input_size
Пример #11
0
def explicit_regression():
    training_data = ExplicitTrainingData(TEST_X_PARTIALS, TEST_Y_ZEROS)
    return ExplicitRegression(training_data)
Пример #12
0
def execute_generational_steps():
    communicator = MPI.COMM_WORLD
    rank = MPI.COMM_WORLD.Get_rank()

    x = None
    y = None

    if rank == 0:

        df = pd.read_csv('data/combined_clean_data.csv')
        df = df.dropna()

        train, test = train_test_split(df, test_size=0.2, random_state=42)

        columns = df.columns
        x = train.loc[:, ~columns.str.contains('Damage')]
        x = x.loc[:, x.columns != 'Time']
        x = x.loc[:, x.columns != 'Machine'].values

        y = train.loc[:, columns.str.contains('Damage')]
        y = y.iloc[:, 0].values.reshape((-1, 1))

    x = MPI.COMM_WORLD.bcast(x, root=0)
    y = MPI.COMM_WORLD.bcast(y, root=0)

    training_data = ExplicitTrainingData(x, y)

    component_generator = ComponentGenerator(x.shape[1])
    component_generator.add_operator(2)  # +
    component_generator.add_operator(3)  # -
    component_generator.add_operator(4)  # *
    component_generator.add_operator(5)  # /
    #    component_generator.add_operator(6) # sin
    #    component_generator.add_operator(7) # cos
    #    component_generator.add_operator(8) # exponential
    #    component_generator.add_operator(10) # power
    #    component_generator.add_operator(12) # sqrt

    crossover = AGraphCrossover(component_generator)
    mutation = AGraphMutation(component_generator)

    agraph_generator = AGraphGenerator(STACK_SIZE, component_generator)

    fitness = ExplicitRegression(training_data=training_data,
                                 metric='mean squared error')
    local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
    evaluator = Evaluation(local_opt_fitness)

    ea = DeterministicCrowdingEA(evaluator, crossover, mutation,
                                 CROSSOVER_PROBABILITY, MUTATION_PROBABILITY)

    island = FitnessPredictorIsland(ea,
                                    agraph_generator,
                                    POP_SIZE,
                                    predictor_size_ratio=0.2)

    pareto_front = ParetoFront(secondary_key=lambda ag: ag.get_complexity(),
                               similarity_function=agraph_similarity)

    archipelago = ParallelArchipelago(island, hall_of_fame=pareto_front)

    optim_result = archipelago.evolve_until_convergence(
        MAX_GENERATIONS,
        FITNESS_THRESHOLD,
        convergence_check_frequency=CHECK_FREQUENCY,
        min_generations=MIN_GENERATIONS,
        checkpoint_base_name='checkpoint',
        num_checkpoints=2)

    if optim_result.success:
        if rank == 0:
            print("best: ", archipelago.get_best_individual())

    if rank == 0:
        print(optim_result)
        print("Generation: ", archipelago.generational_age)
        print_pareto_front(pareto_front)
        plot_pareto_front(pareto_front)