Esempio n. 1
0
 def fit_models(self, estimation_settings={}, pool_map=map):
     if not estimation_settings:
         estimation_settings = self.estimation_settings
     self.models = fit_models(self.models,
                              self.task.data,
                              self.task.target_variable_index,
                              time_index=self.task.time_index,
                              task_type=self.task.task_type,
                              pool_map=pool_map,
                              verbosity=self.verbosity,
                              estimation_settings=estimation_settings)
     return self.models
Esempio n. 2
0
def test_parameter_estimation():
    np.random.seed(1)
    def f(x):
        return 2.0 * (x[:,0] + 0.3)
    X = np.linspace(-1, 1, 20).reshape(-1,1)
    Y = f(X).reshape(-1,1)
    data = np.hstack((X, Y))
    
    grammar = GeneratorGrammar("""S -> S '+' T [0.4] | T [0.6]
                              T -> 'C' [0.6] | T "*" V [0.4]
                              V -> 'x' [0.5] | 'y' [0.5]""")
    symbols = {"x":['x'], "start":"S", "const":"C"}
    N = 2
    
    models = generate_models(grammar, symbols, strategy_settings = {"N":N})
    models = fit_models(models, data, target_variable_index = -1, task_type="algebraic")
    
    assert np.abs(models[0].get_error() - 0.36) < 1e-6
    assert np.abs(models[1].get_error() - 1.4736842) < 1e-6
Esempio n. 3
0
def test_parameter_estimation_ODE():
    B = -2.56; a = 0.4; ts = np.linspace(0.45, 0.87, 5)
    ys = (ts+B)*np.exp(a*ts); xs = np.exp(a*ts)
    data = np.hstack((ts.reshape(-1, 1), xs.reshape(-1, 1), ys.reshape(-1, 1)))

    grammar = GeneratorGrammar("""S -> S '+' T [0.4] | T [0.6]
                                T -> V [0.6] | 'C' "*" V [0.4]
                                V -> 'x' [0.5] | 'y' [0.5]""")
    symbols = {"x":['y', 'x'], "start":"S", "const":"C"}
    np.random.seed(2)
    models = generate_models(grammar, symbols, strategy_settings={"N":5})
    models = fit_models(models, data, target_variable_index=-1, time_index=0, task_type="differential")

    print("\n", models, "\n\nFinal score:")
    for m in models:
        print(f"model: {str(m.get_full_expr()):<30}; error: {m.get_error():<15}")

    def assert_line(models, i, expr, error, tol=1e-9, n=100):
        assert str(models[i].get_full_expr())[:n] == expr[:n]
        assert abs(models[i].get_error() - error) < tol
    assert_line(models, 0, "y", 0.7321678286712089)
    assert_line(models, 1, "x", 0.06518775248116751)
    assert_line(models, 2, "x + 0.40026612522043*y", 2.5265334439915307e-09, n=8)
    return
Esempio n. 4
0
                            V -> 'x' [0.5] | 'y' [0.5]""")
symbols = {"x": ['y', 'x'], "start": "S", "const": "C"}
# models = generate_models(grammar, symbols, strategy_settings={"N":20})
models = generate_models(grammar, symbols, strategy_settings={"N": 4})

# 3.) discover the right equation
data = np.hstack((T.reshape(-1, 1), X, Y))
models = fit_models(
    models,
    data,
    target_variable_index=-1,
    time_index=0,
    task_type="differential",
    # estimation_settings={"optimizer": 'metamodel', "verbosity": 4}
    # estimation_settings={"optimizer": 'differential_evolution', "verbosity": 1}
    # hyperopt:
    estimation_settings={
        "optimizer": 'hyperopt',
        # "hyperopt_space_fn": hp.quniform,
        "hyperopt_space_fn": hp.qnormal,
        # "hyperopt_space_args": (-13, 13, 1/100),  # 1/100 ... ne razlikuje 0.123 in 0.124
        "hyperopt_space_args":
        (0.4, 2, 1 / 1000),  # 1/100 ... ne razlikuje 0.123 in 0.124
    })

# 4.) print models' results
print("\n", models, "\n\nFinal score:")
for m in models:
    print(f"model: {str(m.get_full_expr()):<30}; error: {m.get_error():<15}")
print("\n While dataset was generated by diff. eq. ẏ = 0.4·y + x   ")