Beispiel #1
0
def test_sus_selection_offset():
    ''' Test of SUS selection with a non-default offset '''
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    # evaluate population and negate fitness of second individual
    pop = Individual.evaluate_population(pop)
    pop[1].fitness = -pop[1].fitness

    # now we try to evaluate normally (this should throw a ValueError)
    # due to the negative fitness
    with pytest.raises(ValueError):
        selector = ops.sus_selection(pop)
        selected = next(selector)
    # it should work by setting the offset to +3
    # this adds 3 to each fitness value, making the second
    # individual's fitness 0.
    selector = ops.sus_selection(pop, offset=3)

    # we expect the first individual to always be selected
    # since the new zero point is now -3.
    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])

    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])
Beispiel #2
0
def test_tournament_selection2():
    """If there are just two individuals in the population, and we set select_worst=True,
    then binary tournament selection will select the worse one with 75% probability."""
    # Make a population where binary tournament_selection has an obvious
    # reproducible choice
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]
    # Assign a unique identifier to each individual
    pop[0].id = 0
    pop[1].id = 1

    # We first need to evaluate all the individuals so that
    # selection has fitnesses to compare
    pop = Individual.evaluate_population(pop)
    selected = ops.tournament_selection(pop, select_worst=True)

    N = 1000
    p_thresh = 0.1
    observed_dist = statistical_helpers.collect_distribution(
        lambda: next(selected).id, samples=N)
    expected_dist = {pop[0].id: 0.75 * N, pop[1].id: 0.25 * N}
    print(f"Observed: {observed_dist}")
    print(f"Expected: {expected_dist}")
    assert (statistical_helpers.stochastic_equals(expected_dist,
                                                  observed_dist,
                                                  p=p_thresh))
Beispiel #3
0
def test_random_selection1():
    """If there are just two individuals in the population, then random
    selection will select the better one with 50% probability."""
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]
    # Assign a unique identifier to each individual
    pop[0].id = 0
    pop[1].id = 1

    # We first need to evaluate all the individuals so that
    # selection has fitnesses to compare
    pop = Individual.evaluate_population(pop)
    selected = ops.random_selection(pop)

    N = 1000
    p_thresh = 0.1
    observed_dist = statistical_helpers.collect_distribution(
        lambda: next(selected).id, samples=N)
    expected_dist = {pop[0].id: 0.5 * N, pop[1].id: 0.5 * N}
    print(f"Observed: {observed_dist}")
    print(f"Expected: {expected_dist}")
    assert (statistical_helpers.stochastic_equals(expected_dist,
                                                  observed_dist,
                                                  p=p_thresh))
Beispiel #4
0
def test_sus_selection_shuffle():
    ''' Test of a stochastic case of SUS selection '''
    # Make a population where sus_selection has an obvious
    # reproducible choice
    # Proportions here should be 1/4 and 3/4, respectively
    pop = [
        Individual(np.array([0, 1, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    # Assign a unique identifier to each individual
    pop[0].id = 0
    pop[1].id = 1

    # We first need to evaluate all the individuals so that
    # selection has fitnesses to compare
    pop = Individual.evaluate_population(pop)
    selected = ops.sus_selection(pop)

    N = 1000
    p_thresh = 0.1
    observed_dist = statistical_helpers.collect_distribution(
        lambda: next(selected).id, samples=N)
    expected_dist = {pop[0].id: 0.25 * N, pop[1].id: 0.75 * N}
    print(f"Observed: {observed_dist}")
    print(f"Expected: {expected_dist}")
    assert (statistical_helpers.stochastic_equals(expected_dist,
                                                  observed_dist,
                                                  p=p_thresh))
Beispiel #5
0
def test_proportional_selection1():
    ''' Test of a deterministic case of proportional selection '''
    # Make a population where proportional_selection has an obvious
    # reproducible choice
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    parents = Individual.evaluate_population(pop)
    # This selection operator will always select the [1, 1, 1] individual since
    # [0, 0, 0] has zero fitness
    selector = ops.proportional_selection(parents)

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
Beispiel #6
0
def test_naive_cyclic_selection():
    """ Test of the naive deterministic cyclic selection """
    pop = [
        Individual(np.array([0, 0]), problem=MaxOnes()),
        Individual(np.array([0, 1]), problem=MaxOnes())
    ]

    # This selection operator will deterministically cycle through the
    # given population
    selector = ops.naive_cyclic_selection(pop)

    selected = next(selector)
    assert np.all(selected.genome == [0, 0])

    selected = next(selector)
    assert np.all(selected.genome == [0, 1])

    # And now we cycle back to the first individual
    selected = next(selector)
    assert np.all(selected.genome == [0, 0])
Beispiel #7
0
def test_truncation_selection():
    """ Basic truncation selection test"""
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([0, 0, 1]), problem=MaxOnes()),
        Individual(np.array([1, 1, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    # We first need to evaluate all the individuals so that truncation
    # selection has fitnesses to compare
    pop = Individual.evaluate_population(pop)

    truncated = ops.truncation_selection(pop, 2)

    assert len(truncated) == 2

    # Just to make sure, check that the two best individuals from the
    # original population are in the selected population
    assert pop[2] in truncated
    assert pop[3] in truncated
Beispiel #8
0
def test_proportional_selection_custom_key():
    ''' Test of proportional selection with custom evaluation '''
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    def custom_key(individual):
        ''' Returns fitness based on MaxZeros '''
        return np.count_nonzero(individual.genome == 0)

    pop = Individual.evaluate_population(pop)
    selector = ops.proportional_selection(pop, key=custom_key)

    # we expect the first individual to always be selected
    # since its genome is all 0s
    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])

    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])
Beispiel #9
0
def test_truncation_parents_selection():
    """ Test (mu + lambda), i.e., parents competing with offspring

    Create parent and offspring populations such that each has an "best" individual that will be selected by
    truncation selection.
    """
    parents = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 0]), problem=MaxOnes())
    ]

    parents = Individual.evaluate_population(parents)

    offspring = [
        Individual(np.array([0, 0, 1]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]
    offspring = Individual.evaluate_population(offspring)

    truncated = ops.truncation_selection(offspring, 2, parents=parents)

    assert len(truncated) == 2

    assert parents[1] in truncated
    assert offspring[1] in truncated
Beispiel #10
0
def test_truncation_selection_with_nan1():
    """If truncation selection encounters a NaN and non-NaN fitness
    while maximizing, the non-NaN wins.
    """
    # Make a population where binary tournament_selection has an obvious
    # reproducible choice
    problem = MaxOnes()
    pop = [
        Individual(np.array([0, 0, 0]), problem=problem),
        Individual(np.array([1, 1, 1]), problem=problem)
    ]

    # We first need to evaluate all the individuals so that truncation
    # selection has fitnesses to compare
    pop = Individual.evaluate_population(pop)

    # Now set the "best" to NaN
    pop[1].fitness = nan

    best = ops.truncation_selection(pop, size=1)

    assert pop[0] == best[0]
Beispiel #11
0
def test_proportional_selection_pop_min():
    ''' Test of proportional selection with pop-min offset '''
    # Create a population of positive fitness individuals
    # scaling the fitness by the population minimum makes it so the
    # least fit member never gets selected.
    pop = [
        Individual(np.array([0, 1, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    pop = Individual.evaluate_population(pop)

    selector = ops.proportional_selection(pop, offset='pop-min')

    # we expect that the second individual is always selected
    # since the new zero point will be at the minimum fitness
    # of the population
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
Beispiel #12
0
def test_sus_selection_num_points():
    ''' Test of SUS selection with varying `n` random points '''
    # the second individual should always be selected
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    pop = Individual.evaluate_population(pop)
    # with negative points
    with pytest.raises(ValueError):
        selector = ops.sus_selection(pop, n=-1)
        selected = next(selector)

    # with n = None (default)
    selector = ops.sus_selection(pop, n=None)
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    # with n less than len(population)
    selector = ops.sus_selection(pop, n=1)
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    # with n greater than len(population)
    selector = ops.sus_selection(pop, n=3)
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
Beispiel #13
0
def test_truncation_selection_with_nan2():
    """If truncation selection encounters a NaN and non-NaN fitness
    while minimizing, the non-NaN wins.
    """
    problem = SpheroidProblem(maximize=False)

    pop = []

    pop.append(Individual(np.array([0]), problem=problem))
    pop.append(Individual(np.array([1]), problem=problem))

    pop = Individual.evaluate_population(pop)

    # First *normal* selection should yield the 0 as the "best"
    best = ops.truncation_selection(pop, size=1)
    assert pop[0] == best[0]

    # But now let's set that best to a NaN, which *should* force the other
    # individual to be selected.
    pop[0].fitness = nan

    best = ops.truncation_selection(pop, size=1)
    assert pop[1] == best[0]
Beispiel #14
0
def test_sus_selection1():
    ''' Test of a deterministic case of stochastic universal sampling '''
    # Make a population where sus_selection has an obvious
    # reproducible choice
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    pop = Individual.evaluate_population(pop)
    # This selection operator will always choose the [1, 1, 1] individual
    # since [0, 0, 0] has zero fitness
    selector = ops.sus_selection(pop)

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    # run one more time to test shuffle
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
Beispiel #15
0
from toolz import pipe

from leap_ec import Individual, context, test_env_var
from leap_ec import ops, probe, util
from leap_ec.decoder import IdentityDecoder
from leap_ec.binary_rep.problems import MaxOnes
from leap_ec.binary_rep.initializers import create_binary_sequence
from leap_ec.binary_rep.ops import mutate_bitflip

##############################
# Entry point
##############################
if __name__ == '__main__':
    parents = Individual.create_population(
        5,
        initialize=create_binary_sequence(4),
        decoder=IdentityDecoder(),
        problem=MaxOnes())

    # Evaluate initial population
    parents = Individual.evaluate_population(parents)

    # print initial, random population
    util.print_population(parents, generation=0)

    # When running the test harness, just run for two generations
    # (we use this to quickly ensure our examples don't get bitrot)
    if os.environ.get(test_env_var, False) == 'True':
        max_generation = 2
    else:
        max_generation = 6
Beispiel #16
0
    # this was in the context of the 1/5 success rule, which we've not
    # implemented here.
    # Handbook of EC, B1.3:2
    context['leap']['std'] *= .85


if __name__ == '__main__':
    # Define the real value bounds for initializing the population. In this case,
    # we define a genome of four bounds.

    # the (-5.12,5.12) was what was originally used for this problem in
    # Ken De Jong's 1975 dissertation, so was used for historical reasons.
    bounds = [(-5.12, 5.12), (-5.12, 5.12), (-5.12, 5.12), (-5.12, 5.12)]
    parents = Individual.create_population(
        5,
        initialize=create_real_vector(bounds),
        decoder=IdentityDecoder(),
        problem=SpheroidProblem(maximize=False))

    # Evaluate initial population
    parents = Individual.evaluate_population(parents)

    context['leap']['std'] = 2

    # We use the provided context, but we could roll our own if we
    # wanted to keep separate contexts.  E.g., island models may want to have
    # their own contexts.
    generation_counter = util.inc_generation(context=context,
                                             callbacks=(anneal_std, ))

    # print initial, random population