Exemple #1
0
def get_symbolic_model(f, dim_x, n_points=100, x_range=[0, 1]):
    H = load_H()
    loss_tol = load_hyperparameters()['loss_tol']
    faithful_models = []
    losses = []

    for k in range(len(H)):
        for l in range(len(H)):
            print("===================================================================================================")
            print("Testing Hyperparameter Configuration k =  ", k + 1 , " ; l = ", l+1)
            faithful_model, loss = symbolic_modeling(f, dim_x,
                                                      H['hyper_' + str(k + 1)][1], H['hyper_' + str(l + 1)][1],
                                                      H['hyper_' + str(k + 1)][0], H['hyper_' + str(l + 1)][0],
                                                      n_points=n_points, x_range=x_range)

            faithful_models.append(faithful_model)
            losses.append(loss)

            if losses[-1] <= loss_tol:
                print("==========The desired loss was achieved so the algorithm stopped==========")
                break

    best_model = np.argmin(np.array(losses))
    X = np.random.uniform(x_range[0], x_range[1], dim_x * n_points)
    X = X.reshape((n_points, dim_x))
    Y_true = f(X).reshape((-1, 1))
    Y_est = faithful_models[best_model].evaluate(X).reshape((-1, 1))
    R2_perf = compute_Rsquared(Y_true, Y_est)

    return faithful_models[best_model], R2_perf
Exemple #2
0
def symbolic_regressor(f, npoints, xrange):
    X = np.linspace(xrange[0], xrange[1], npoints).reshape((-1, 1))
    y = f(X)

    est_gp = SymbolicRegressor(population_size=5000,
                               generations=20, stopping_criteria=0.01,
                               p_crossover=0.7, p_subtree_mutation=0.1,
                               p_hoist_mutation=0.05, p_point_mutation=0.1,
                               max_samples=0.9, verbose=1,
                               parsimony_coefficient=0.01, random_state=0)

    est_gp.fit(X, y)

    sym_expr = str(est_gp._program)

    converter = {
        'sub': lambda x, y: x - y,
        'div': lambda x, y: x / y,
        'mul': lambda x, y: x * y,
        'add': lambda x, y: x + y,
        'neg': lambda x: -x,
        'pow': lambda x, y: x ** y
    }

    x, X0 = symbols('x X0')
    sym_reg = simplify(sympify(sym_expr, locals=converter))
    sym_reg = sym_reg.subs(X0, x)

    Y_true = y.reshape((-1, 1))
    Y_est = np.array([sympify(str(sym_reg)).subs(x, X[k]) for k in range(len(X))]).reshape((-1, 1))

    R2_perf = compute_Rsquared(Y_true, Y_est)

    return sym_reg, R2_perf
def get_symbolic_model(f, npoints, xrange):

    hyperparameter_space = load_hyperparameter_config()
    loss_threshold = 10e-5

    symbol_exprs = []
    losses_ = []

    for k in range(len(hyperparameter_space)):

        symbolic_model, Loss_ = symbolic_modeling(
            f, hyperparameter_space['hyper_' + str(k + 1)][1],
            hyperparameter_space['hyper_' + str(k + 1)][0], npoints, xrange)

        symbol_exprs.append(symbolic_model)
        losses_.append(Loss_)

        if losses_[-1] <= loss_threshold:
            break

    best_model = np.argmin(np.array(losses_))

    X = np.linspace(xrange[0], xrange[1], npoints).reshape((-1, 1))

    Y_true = f(X).reshape((-1, 1))
    Y_est = symbol_exprs[best_model].evaluate(X).reshape((-1, 1))

    R2_perf = compute_Rsquared(Y_true, Y_est)

    return symbol_exprs[best_model], R2_perf