def generate_models(self, strategy_settings=None): if not strategy_settings: strategy_settings = self.strategy_settings self.models = generate_models(self.generator, self.task.symbols, self.strategy, strategy_settings, verbosity=self.verbosity) return self.models
def test_generate_models(): np.random.seed(0) generator = grammar_from_template("polynomial", {"variables":["'x'", "'y'"], "p_vars":[0.3,0.7]}) symbols = {"x":['x', 'y'], "start":"S", "const":"C"} N = 3 samples = ["C0*y", "C0*x*y**2", "C0*x**2 + C1"] models = generate_models(generator, symbols, strategy_settings = {"N":N}) for i in range(len(models)): assert str(models[i]) == samples[i]
def test_parameter_estimation(): np.random.seed(1) def f(x): return 2.0 * (x[:,0] + 0.3) X = np.linspace(-1, 1, 20).reshape(-1,1) Y = f(X).reshape(-1,1) data = np.hstack((X, Y)) grammar = GeneratorGrammar("""S -> S '+' T [0.4] | T [0.6] T -> 'C' [0.6] | T "*" V [0.4] V -> 'x' [0.5] | 'y' [0.5]""") symbols = {"x":['x'], "start":"S", "const":"C"} N = 2 models = generate_models(grammar, symbols, strategy_settings = {"N":N}) models = fit_models(models, data, target_variable_index = -1, task_type="algebraic") assert np.abs(models[0].get_error() - 0.36) < 1e-6 assert np.abs(models[1].get_error() - 1.4736842) < 1e-6
def test_parameter_estimation_ODE(): B = -2.56; a = 0.4; ts = np.linspace(0.45, 0.87, 5) ys = (ts+B)*np.exp(a*ts); xs = np.exp(a*ts) data = np.hstack((ts.reshape(-1, 1), xs.reshape(-1, 1), ys.reshape(-1, 1))) grammar = GeneratorGrammar("""S -> S '+' T [0.4] | T [0.6] T -> V [0.6] | 'C' "*" V [0.4] V -> 'x' [0.5] | 'y' [0.5]""") symbols = {"x":['y', 'x'], "start":"S", "const":"C"} np.random.seed(2) models = generate_models(grammar, symbols, strategy_settings={"N":5}) models = fit_models(models, data, target_variable_index=-1, time_index=0, task_type="differential") print("\n", models, "\n\nFinal score:") for m in models: print(f"model: {str(m.get_full_expr()):<30}; error: {m.get_error():<15}") def assert_line(models, i, expr, error, tol=1e-9, n=100): assert str(models[i].get_full_expr())[:n] == expr[:n] assert abs(models[i].get_error() - error) < tol assert_line(models, 0, "y", 0.7321678286712089) assert_line(models, 1, "x", 0.06518775248116751) assert_line(models, 2, "x + 0.40026612522043*y", 2.5265334439915307e-09, n=8) return
print("Random number for log filename:", random) # Tee("logfile_demo_" + random + ".txt") np.random.seed(2) # 1.) construct dataset T, Ys, Xs, _, a = example_tB_data() # dataset constructed in odes.py X = np.array([Xs]).T Y = np.array([Ys]).T # convert in conformed shape # 2.) generate grammar and models: grammar = GeneratorGrammar("""S -> S '+' T [0.4] | T [0.6] T -> V [0.6] | 'C' "*" V [0.4] V -> 'x' [0.5] | 'y' [0.5]""") symbols = {"x": ['y', 'x'], "start": "S", "const": "C"} # models = generate_models(grammar, symbols, strategy_settings={"N":20}) models = generate_models(grammar, symbols, strategy_settings={"N": 4}) # 3.) discover the right equation data = np.hstack((T.reshape(-1, 1), X, Y)) models = fit_models( models, data, target_variable_index=-1, time_index=0, task_type="differential", # estimation_settings={"optimizer": 'metamodel', "verbosity": 4} # estimation_settings={"optimizer": 'differential_evolution', "verbosity": 1} # hyperopt: estimation_settings={ "optimizer": 'hyperopt', # "hyperopt_space_fn": hp.quniform,