Esempio n. 1
0
File: lccb.py Progetto: jmmcd/PODI
def LCEB(fitness_fn, ngens, popsize, st_maxdepth):
    """Linear combination of evolved bases. There is a single
    individual composed of many randomly-generated trees. At each
    step, we take them as bases for a GLM, fit, and look at their
    coefficients. Any tree which has a small coefficient is not
    helping much: replace it with a new randomly-generated tree, and
    repeat.

    FIXME problem: it rewards trees which require huge coefficients,
    ie hardly do anything."""

    y = fitness_fn.train_y

    # make initial population
    pop = [gp.grow(st_maxdepth, random) for i in range(popsize)]
    
    for gen in xrange(ngens):

        X = None
        for ind in pop:
            fit, col = fitness_fn.get_semantics(gp.make_fn(ind))
            if (fit != sys.maxint
                and all(np.isfinite(col))):
                pass
            else:
                print("Omitting a column")
                col = np.zeros(len(y))
            if X is None:
                X = col
            else:
                X = np.c_[X, col]

        print("X")
        print(X.shape)
        print(X)
        model = LinearRegression()
        model.fit(X, y)
        coefs = model.coef_
        output = model.predict(X)
        rmse = fitness_fn.rmse(y, output)
        print("rmse", rmse)
        print("coefs", coefs)
        
        worst_idx, worst_val = argabsmin(coefs)
        print("worst tree")
        print(pop[worst_idx])
        pop[worst_idx] = gp.grow(st_maxdepth, random)
Esempio n. 2
0
def test_grow():
    for i in range(7):
        print(gp.grow(i, random))
Esempio n. 3
0
def generate_grow_tree_and_fn_maxd(rng, max_depth=3):
    t = gp.grow(max_depth, rng)
    return t, gp.make_fn(t)