Exemple #1
0
def compare_agraph_explicit(X, Y):
    """does the comparison"""
    # make solution manipulator
    sol_manip = agm(X.shape[1], 16, nloads=2)
    sol_manip.add_node_type(AGNodes.Add)
    sol_manip.add_node_type(AGNodes.Multiply)

    # make predictor manipulator
    pred_manip = fpm(32, Y.shape[0])

    # create training data
    Y = Y.reshape([-1, 1])
    training_data = ExplicitTrainingData(X, Y)

    # create fitness metric
    explicit_regressor = StandardRegression()

    # make and run island manager
    islmngr = SerialIslandManager(N_ISLANDS,
                                  solution_training_data=training_data,
                                  solution_manipulator=sol_manip,
                                  predictor_manipulator=pred_manip,
                                  fitness_metric=explicit_regressor)
    assert islmngr.run_islands(MAX_STEPS,
                               EPSILON,
                               step_increment=N_STEPS,
                               make_plots=False)
def compare_agraph_potential(X, Y):
    """does the comparison using agraph"""

    # make solution manipulator
    sol_manip = agm(1, 16, nloads=2)
    sol_manip.add_node_type(AGNodes.Add)
    sol_manip.add_node_type(AGNodes.Multiply)
    # sol_manip.add_node_type(AGNodes.Subtract)
    # sol_manip.add_node_type(AGNodes.Divide)
    # sol_manip.add_node_type(AGNodes.Exp)
    # sol_manip.add_node_type(AGNodes.Sin)
    # sol_manip.add_node_type(AGNodes.Cos)

    # make predictor manipulator
    pred_manip = fpm(32, X.shape[0])

    # create training data
    training_data = PairwiseAtomicTrainingData(potential_energy=Y,
                                               configurations=X)

    # create fitness metric
    atomic_regressor = PairwiseAtomicPotential()

    # make and run island manager
    islmngr = SerialIslandManager(N_ISLANDS,
                                  solution_training_data=training_data,
                                  solution_manipulator=sol_manip,
                                  predictor_manipulator=pred_manip,
                                  fitness_metric=atomic_regressor)
    success = islmngr.run_islands(MAX_STEPS,
                                  EPSILON,
                                  step_increment=N_STEPS,
                                  make_plots=False)
    assert success
def compare_ag_implicit(X, Y, operator, params):
    """does const symbolic regression and tests convergence"""
    # convert to single array
    X = np.hstack((X, Y.reshape([-1, 1])))

    # make solution manipulator
    sol_manip = agm(X.shape[1], 16, nloads=2)
    sol_manip.add_node_type(AGNodes.Add)
    sol_manip.add_node_type(AGNodes.Subtract)
    sol_manip.add_node_type(AGNodes.Multiply)
    sol_manip.add_node_type(AGNodes.Divide)
    sol_manip.add_node_type(AGNodes.Exp)
    sol_manip.add_node_type(AGNodes.Log)
    sol_manip.add_node_type(AGNodes.Sin)
    sol_manip.add_node_type(AGNodes.Cos)
    sol_manip.add_node_type(AGNodes.Abs)
    sol_manip.add_node_type(AGNodes.Pow)

    # make true equation
    equ = sol_manip.generate()
    equ.command_list[0] = (AGNodes.LoadData, (0, ))
    equ.command_list[1] = (AGNodes.LoadData, (1, ))
    equ.command_list[2] = (AGNodes.LoadData, (2, ))
    equ.command_list[3] = (operator, params)
    equ.command_list[-1] = (AGNodes.Subtract, (3, 2))

    # make predictor manipulator
    pred_manip = fpm(32, X.shape[0])

    # make training data
    training_data = ImplicitTrainingData(X)

    # make fitness metric
    implicit_regressor = ImplicitRegression()

    # make and run island manager
    islmngr = SerialIslandManager(N_ISLANDS,
                                  solution_training_data=training_data,
                                  solution_manipulator=sol_manip,
                                  predictor_manipulator=pred_manip,
                                  fitness_metric=implicit_regressor)
    epsilon = 1.05 * islmngr.isles[0].solution_fitness_true(equ) + 1.0e-10
    assert islmngr.run_islands(MAX_STEPS,
                               epsilon,
                               step_increment=N_STEPS,
                               make_plots=False)
Exemple #4
0
def compare_ag_explicit(X, Y, operator, params):
    """does the comparison"""
    # make solution manipulator
    sol_manip = agm(X.shape[1], 16, nloads=2)
    sol_manip.add_node_type(AGNodes.Add)
    sol_manip.add_node_type(AGNodes.Subtract)
    sol_manip.add_node_type(AGNodes.Multiply)
    sol_manip.add_node_type(AGNodes.Divide)
    sol_manip.add_node_type(AGNodes.Exp)
    sol_manip.add_node_type(AGNodes.Log)
    sol_manip.add_node_type(AGNodes.Sin)
    sol_manip.add_node_type(AGNodes.Cos)
    sol_manip.add_node_type(AGNodes.Abs)
    sol_manip.add_node_type(AGNodes.Pow)

    # make true equation
    equ = sol_manip.generate()
    equ.command_list[0] = (AGNodes.LoadData, (0, ))
    equ.command_list[1] = (AGNodes.LoadData, (1, ))
    equ.command_list[-1] = (operator, params)

    # make predictor manipulator
    pred_manip = fpm(32, Y.shape[0])

    # make training data
    Y = Y.reshape([-1, 1])
    training_data = ExplicitTrainingData(X, Y)

    # make fitness_metric
    explicit_regressor = StandardRegression()

    # make and run island manager
    islmngr = SerialIslandManager(N_ISLANDS,
                                  solution_training_data=training_data,
                                  solution_manipulator=sol_manip,
                                  predictor_manipulator=pred_manip,
                                  fitness_metric=explicit_regressor)

    epsilon = 1.05 * islmngr.isles[0].solution_fitness_true(equ) + 1.0e-10

    assert islmngr.run_islands(MAX_STEPS,
                               epsilon,
                               step_increment=N_STEPS,
                               make_plots=False)
Exemple #5
0
# ag.command_list[-1] = (AGNodes.Subtract, (5, 4))
#
# print(ag)
# print(ag.latexstring())
# # for x, y in zip(X[fp.indices, :], Y[fp.indices, :]):
# #     print(x, y, ag.evaluate_deriv(x), ag.evaluate_deriv(x)*y)
#
# print(fp.fit_func(ag, X, Y, False))

ac = AGraphCpp.AGraphCppManipulator(2, 16, nloads=2)
ac.add_node_type(2)
ac.add_node_type(3)
gc = ac.generate()
gc_list, _ = ac.dump(gc)

a = agm(2, 16, nloads=2, constant_optimization=True)
a.add_node_type(AGNodes.Add)
a.add_node_type(AGNodes.Subtract)

for i, (node, params) in enumerate(gc_list):
    if node is 1:
        if params[0] is -1:
            gc_list[i] = (node, (None, ))
g = a.load(gc_list)

#print(g)
#print(gc)

x = np.linspace(1, 100, 100)
x = x.reshape((-1, 2))
y = x[:, 0] + 50
Exemple #6
0
def test_const_opt_agraph_implicit():
    """test optimization code"""
    print("-----test_const_opt_agraph_implicit-----")
    # get independent vars
    x_true = snake_walk()

    # make solutions
    consts = np.random.rand(6) * 90 + 10
    print("CONSTANTS: ", consts[1:])
    y = consts[0] + consts[1] * x_true[:, 0] + consts[2] * x_true[:, 1] + \
        consts[3] * x_true[:, 0] * x_true[:, 0] + \
        consts[4] * x_true[:, 1] * x_true[:, 1] + \
        consts[5] * x_true[:, 0] * x_true[:, 1]
    x_true = np.hstack((x_true, y.reshape((-1, 1))))

    # create manipulator to set things up
    sol_manip = agm(x_true.shape[1], 23, nloads=2)
    sol_manip.add_node_type(AGNodes.Add)
    sol_manip.add_node_type(AGNodes.Subtract)
    sol_manip.add_node_type(AGNodes.Multiply)

    # create gene with proper functional form
    sol = sol_manip.generate()
    sol.command_list[0] = (AGNodes.LoadConst, (None, ))
    sol.command_list[1] = (AGNodes.LoadConst, (None, ))
    sol.command_list[2] = (AGNodes.LoadConst, (None, ))
    sol.command_list[3] = (AGNodes.LoadConst, (None, ))
    sol.command_list[4] = (AGNodes.LoadConst, (None, ))
    sol.command_list[5] = (AGNodes.LoadConst, (None, ))

    sol.command_list[6] = (AGNodes.LoadData, (0, ))
    sol.command_list[7] = (AGNodes.LoadData, (1, ))
    sol.command_list[8] = (AGNodes.Multiply, (6, 6))
    sol.command_list[9] = (AGNodes.Multiply, (7, 7))
    sol.command_list[10] = (AGNodes.Multiply, (6, 7))

    sol.command_list[11] = (AGNodes.Multiply, (1, 6))
    sol.command_list[12] = (AGNodes.Multiply, (2, 7))
    sol.command_list[13] = (AGNodes.Multiply, (3, 8))
    sol.command_list[14] = (AGNodes.Multiply, (4, 9))
    sol.command_list[15] = (AGNodes.Multiply, (5, 10))

    sol.command_list[16] = (AGNodes.Add, (0, 11))
    sol.command_list[17] = (AGNodes.Add, (16, 12))
    sol.command_list[18] = (AGNodes.Add, (17, 13))
    sol.command_list[19] = (AGNodes.Add, (18, 14))
    sol.command_list[20] = (AGNodes.Add, (19, 15))
    sol.command_list[21] = (AGNodes.LoadData, (2, ))
    sol.command_list[22] = (AGNodes.Subtract, (20, 21))
    # print(sol.latexstring())
    sol.set_constants(consts)

    # create training data
    training_data = ImplicitTrainingData(x_true)

    # create fitness metric
    implicit_regressor = ImplicitRegression(required_params=3)

    # fit the constants
    t0 = time.time()

    implicit_regressor.evaluate_fitness(sol, training_data)

    t1 = time.time()
    print("fit time: ", t1 - t0, "seconds")

    # make sure constants are close
    c_fit = sol.constants
    print(
        "average accuracy: ",
        np.mean(
            np.abs(np.array(c_fit[1:]) - np.array(consts[1:])) /
            np.array(consts[1:])))
    print("FITTED:    ", c_fit[1:])
    for tru, fit in zip(consts[1:], c_fit[1:]):
        print(np.abs(tru - fit) / tru)
        assert np.abs(tru - fit) / tru < 0.1
Exemple #7
0
def test_const_opt_agraph_explicit():
    """test optimization code"""
    print("-----test_const_opt_agraph_explicit-----")
    # get independent vars
    x_true = snake_walk()

    # make solutions
    consts = np.random.rand(6) * 90 + 10
    print("CONSTANTS: ", consts)
    y = consts[0] + consts[1] * x_true[:, 0] + consts[2] * x_true[:, 1] + \
        consts[3] * x_true[:, 0] * x_true[:, 0] + \
        consts[4] * x_true[:, 1] * x_true[:, 1] + \
        consts[5] * x_true[:, 0] * x_true[:, 1]

    # create manipulator to set things up
    sol_manip = agm(x_true.shape[1], 21, nloads=2)
    sol_manip.add_node_type(AGNodes.Add)
    sol_manip.add_node_type(AGNodes.Multiply)

    # create gene with proper functional form
    sol = sol_manip.generate()
    sol.command_list[0] = (AGNodes.LoadConst, (None, ))
    sol.command_list[1] = (AGNodes.LoadConst, (None, ))
    sol.command_list[2] = (AGNodes.LoadConst, (None, ))
    sol.command_list[3] = (AGNodes.LoadConst, (None, ))
    sol.command_list[4] = (AGNodes.LoadConst, (None, ))
    sol.command_list[5] = (AGNodes.LoadConst, (None, ))

    sol.command_list[6] = (AGNodes.LoadData, (0, ))
    sol.command_list[7] = (AGNodes.LoadData, (1, ))
    sol.command_list[8] = (AGNodes.Multiply, (6, 6))
    sol.command_list[9] = (AGNodes.Multiply, (7, 7))
    sol.command_list[10] = (AGNodes.Multiply, (6, 7))

    sol.command_list[11] = (AGNodes.Multiply, (1, 6))
    sol.command_list[12] = (AGNodes.Multiply, (2, 7))
    sol.command_list[13] = (AGNodes.Multiply, (3, 8))
    sol.command_list[14] = (AGNodes.Multiply, (4, 9))
    sol.command_list[15] = (AGNodes.Multiply, (5, 10))

    sol.command_list[16] = (AGNodes.Add, (0, 11))
    sol.command_list[17] = (AGNodes.Add, (16, 12))
    sol.command_list[18] = (AGNodes.Add, (17, 13))
    sol.command_list[19] = (AGNodes.Add, (18, 14))
    sol.command_list[20] = (AGNodes.Add, (19, 15))
    # print(sol.latexstring())
    sol.set_constants(consts)

    # create training data
    y = y.reshape([-1, 1])
    training_data = ExplicitTrainingData(x_true, y)

    # create fitness metric
    explicit_regressor = StandardRegression()

    # fit the constants
    explicit_regressor.evaluate_fitness(sol, training_data)

    # make sure constants are close
    c_fit = sol.constants
    print("FITTED:    ", c_fit)
    print("REL DIFF:  ", (consts - c_fit) / consts)
    for tru, fit in zip(consts, c_fit):
        assert np.abs(tru - fit) < 1e-8