コード例 #1
0
    def evolve(self, popsize=300, ngen=40):
        """run back test with genious algorithm to optimize

        :param popsize: population size of genious algorithm
        :param ngen: number of generations"""
        low = -1
        high = 1
        prototype = [
            np.random.randint(low, high) for _ in range(self.index_rows_number)
        ]
        bounds = [[low, high] for _ in range(self.index_rows_number)]
        # self.test(prototype)

        p = Population(prototype=prototype,
                       gene_bounds=bounds,
                       fitness_func=self.test)
        p.populate(popsize=popsize)
        p.evolve(ngen=ngen)
        print(p.best.genes)
コード例 #2
0
ファイル: test.py プロジェクト: olliemath/Python-TinyEvolver
from tinyevolver import Population
import random

random.seed(1234)
proto = [1.0 for _ in range(100)]
bounds = [(0, random.uniform(0, 10)) for _ in range(100)]

pop = Population(proto, bounds, sum)
pop.populate(500)
pop.evolve(100)
コード例 #3
0
from tinyevolver import Population

"""
    In this example we consider individuals consisting entirely of random
    booleans. The more 'True's the better (fitter) the individual.
"""

# Our prototype individual is a list of (arbitrary) boolean geans
prototype = [False for _ in range(100)]

# This statement initialises a population class with appropriate methods.
p = Population(prototype=prototype, gene_bounds=None, fitness_func=sum)

# Create the desired number of individuals in the class:
p.populate(popsize=300)

# Evolve! It's as simple as that.
p.evolve(verbose=True)
コード例 #4
0
bounds = [(0.0, 1.0), (0, 3), (0, 5.0)]

# How fit an individual is will depend on how well it approximates the
# data. So let's cook up some data:
times = range(20)
data = [0.5 * time ** 2 + 1.0 + random.uniform(0, 10) for time in times]


def fitness(ind):
    curve = [ind[0] * time ** ind[1] + ind[2] for time in times]
    square_error = [(f - d) ** 2 for f, d in zip(curve, data)]
    # More error = less fit
    try:
        return 20.0 / sum(square_error)
    except ZeroDivisionError:
        return float('inf')


# Now to populate and evolve:
p = Population(prototype, bounds, fitness)
p.populate()
p.evolve()

# Let's see how we did:
if plt:
    best_ind = p.best
    best_fit = [best_ind[0] * time ** best_ind[1] + best_ind[2] for time in times]
    plt.plot(times, data)
    plt.plot(times, best_fit)
    plt.show()
コード例 #5
0
prototype = [0.0 for _ in range(6)]  # this goes up to x^5
bounds = [(-1.0, 1.0) for _ in range(6)]

# Construct the noisy sine curve to be approximated
xs = np.linspace(0, 3)
ys = np.sin(xs) + np.random.normal(0.0, 0.1, 50)


# An individual with a geater mean-absolute-error is less fit:
def fitness(ind):
    return -np.mean(np.abs(ys - sum(ind[n] * xs ** n for n in range(6))))


# Now we're good to go:
p = Population(prototype, bounds, fitness)
p.populate(500)
p.evolve(ngen=50)


# If you have matplotlib installed, we can plot the best result:
try:
    import matplotlib.pyplot as plt
    best_ind = p.best
    plt.plot(xs, ys)
    plt.plot(xs, sum(best_ind[n] * xs ** n for n in range(6)))
    plt.show()

except ImportError:
    pass