コード例 #1
0
def symbolicRegr(funcs):
    gpRun = genetic.SymbolicRegressor(population_size=popSize,
                                      generations=noGens,
                                      tournament_size=20,
                                      const_range=None,
                                      function_set=funcs,
                                      metric=nsae,
                                      p_crossover=crossoverProb,
                                      p_subtree_mutation=mutationProb,
                                      p_hoist_mutation=mutationProb,
                                      p_point_mutation=mutationProb,
                                      verbose=0)
    gpRun.fit(np.array(depVar).reshape(-1, 1), np.array(yVar))
    print(
        f'Expression: {gpRun._program}\nFitness: {gpRun._program.fitness_}\nLength: {gpRun._program.length_}'
    )
    return gpRun
コード例 #2
0
def get_predictor(train_x, train_y, predictor_parameter_type=get_setting_cfg().get('GP', 'setting')):
    parameter = get_gp_parameter(predictor_parameter_type)
    mltd_predictor = gp.SymbolicRegressor(
        function_set=cf.get_custom_function_list(), **parameter['predictor_parameter'])
    return mltd_predictor.fit(train_x, train_y)
コード例 #3
0
N = 20

# X_train = rng.uniform(xmin, xmax, N).reshape(N, 1)
# y_train = np.ravel(X_train)

def protexp(x):
    return np.exp(-np.abs(x))

nexp = gf.make_function(protexp, 'negabsexp', 1)
f_s = ['add', 'sub', 'mul', 'div', 'inv', 'abs', nexp, 'log']

est_gp = gl.SymbolicRegressor(init_depth=(3, 6), population_size=4000,
                              tournament_size=20,
                              generations=30, stopping_criteria=0.01,
                              p_crossover=0.7, p_subtree_mutation=0.1,
                              p_hoist_mutation=0.05, p_point_mutation=0.1,
                              max_samples=0.9, verbose=1,
                              parsimony_coefficient=0.01, random_state=0,
                              function_set=f_s)
est_gp.fit(X_train, y_train)

y_gp = est_gp.predict(np.c_[X_traintemp.ravel()]).reshape(X_traintemp.shape)

print(est_gp.program)

plt.plot(X_traintemp, y_traintemp, '.')
plt.plot(X_traintemp, y_gp)
plt.title('SR on raw data')

plt.show()
コード例 #4
0
    with np.errstate(over='ignore'):
        return np.where(np.abs(x) < 100, np.exp(-x), 0.)


pexp = gf.make_function(_protected_exponent, 'exp', 1)
pnexp = gf.make_function(_protected_negexponent, 'nexp', 1)
f_s = ['add', 'sub', 'mul', 'div', pexp, pnexp, 'neg']

est_gp = gl.SymbolicRegressor(init_depth=(2, 4),
                              population_size=3000,
                              tournament_size=20,
                              const_range=(-40, 40),
                              generations=20,
                              stopping_criteria=0.01,
                              p_crossover=0.7,
                              p_subtree_mutation=0.1,
                              warm_start=True,
                              p_hoist_mutation=0.05,
                              p_point_mutation=0.1,
                              max_samples=0.9,
                              verbose=1,
                              random_state=0,
                              function_set=f_s)
est_gp.fit(inTrain, outTrain)

y_gp = est_gp.predict(np.c_[domaingrid.ravel()]).reshape(domaingrid.shape)

print(est_gp.program)

plt.plot(inTrain, outTrain)
plt.plot(domaingrid, y_gp)
コード例 #5
0
rng = check_random_state(0)

# Training samples
X_train = rng.uniform(-1, 1, 100).reshape(50, 2)
y_train = X_train[:, 0]**2 - X_train[:, 1]**2 + X_train[:, 1] - 1

# Testing samples
X_test = rng.uniform(-1, 1, 100).reshape(50, 2)
y_test = X_test[:, 0]**2 - X_test[:, 1]**2 + X_test[:, 1] - 1

est_gp = genetic.SymbolicRegressor(population_size=50,
                                   generations=200,
                                   stopping_criteria=0.01,
                                   p_crossover=0.7,
                                   p_subtree_mutation=0.1,
                                   p_hoist_mutation=0.05,
                                   p_point_mutation=0.1,
                                   max_samples=0.9,
                                   verbose=1,
                                   parsimony_coefficient=0.01,
                                   random_state=0)
est_gp.fit(X_train, y_train)

print(est_gp._program)

with open("../Data/programs_test", "w") as f:
    for i in est_gp._programs[-1]:
        f.write(i)
        f.write("\n")

for i in est_gp._programs[-1]:
コード例 #6
0
ファイル: gp.py プロジェクト: thaije/Natural-Computing
function_set = ['add', 'sub', 'mul', 'div', 'log', 'sin', 'cos', exp]

#create summed absolute error as metric
_sae = lambda y, t, w: np.sum(np.abs(y - t))
sae = fitness.make_fitness(_sae, False)

n_generations = 50
#Initialize genetic programm regressor
est_gp = genetic.SymbolicRegressor(population_size=1000,
                                   generations=1,
                                   stopping_criteria=0.01,
                                   p_crossover=0.7,
                                   p_subtree_mutation=0,
                                   p_hoist_mutation=0,
                                   p_point_mutation=0,
                                   max_samples=0.9,
                                   verbose=1,
                                   parsimony_coefficient=0,
                                   random_state=0,
                                   metric=sae,
                                   function_set=function_set)
est_gp.fit(x, y)

#Generate generations and save best fitness and size
fitness = []
size = []
for i in range(2, n_generations + 1):
    est_gp.set_params(generations=i, warm_start=True)
    est_gp.fit(x, y)
    fitness.append(est_gp._program.raw_fitness_)