Esempio n. 1
0
def test_proportional_selection_offset():
    ''' Test of proportional selection with a non-default offset '''
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    # evaluate population and negate fitness of second individual
    pop = Individual.evaluate_population(pop)
    pop[1].fitness = -pop[1].fitness

    # now we try to evaluate normally (this should throw a ValueError)
    # due to the negative fitness
    with pytest.raises(ValueError):
        selector = ops.proportional_selection(pop)
        selected = next(selector)
    # it should work by setting the offset to +3
    # this adds 3 to each fitness value, making the second
    # individual's fitness 0.
    selector = ops.proportional_selection(pop, offset=3)

    # we expect the first individual to always be selected
    # since the new zero point is now -3.
    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])

    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])
Esempio n. 2
0
def test_proportional_selection2():
    ''' Test of a stochastic proportional selection '''
    # Make a population where fitness proportional selection has an obvious
    # reproducible choice
    # Proportions here should be 1/4 and 3/4, respectively
    pop = [
        Individual(np.array([0, 1, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]
    # Assign a unique identifier to each individual
    pop[0].id = 0
    pop[1].id = 1

    # We first need to evaluate all the individuals so that
    # selection has fitnesses to compare
    pop = Individual.evaluate_population(pop)
    selected = ops.proportional_selection(pop)

    N = 1000
    p_thresh = 0.1
    observed_dist = statistical_helpers.collect_distribution(
        lambda: next(selected).id, samples=N)
    expected_dist = {pop[0].id: 0.25 * N, pop[1].id: 0.75 * N}
    print(f"Observed: {observed_dist}")
    print(f"Expected: {expected_dist}")
    assert (statistical_helpers.stochastic_equals(expected_dist,
                                                  observed_dist,
                                                  p=p_thresh))
Esempio n. 3
0
def test_proportional_selection1():
    ''' Test of a deterministic case of proportional selection '''
    # Make a population where proportional_selection has an obvious
    # reproducible choice
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    parents = Individual.evaluate_population(pop)
    # This selection operator will always select the [1, 1, 1] individual since
    # [0, 0, 0] has zero fitness
    selector = ops.proportional_selection(parents)

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])
Esempio n. 4
0
def test_proportional_selection_custom_key():
    ''' Test of proportional selection with custom evaluation '''
    pop = [
        Individual(np.array([0, 0, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    def custom_key(individual):
        ''' Returns fitness based on MaxZeros '''
        return np.count_nonzero(individual.genome == 0)

    pop = Individual.evaluate_population(pop)
    selector = ops.proportional_selection(pop, key=custom_key)

    # we expect the first individual to always be selected
    # since its genome is all 0s
    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])

    selected = next(selector)
    assert np.all(selected.genome == [0, 0, 0])
Esempio n. 5
0
def test_proportional_selection_pop_min():
    ''' Test of proportional selection with pop-min offset '''
    # Create a population of positive fitness individuals
    # scaling the fitness by the population minimum makes it so the
    # least fit member never gets selected.
    pop = [
        Individual(np.array([0, 1, 0]), problem=MaxOnes()),
        Individual(np.array([1, 1, 1]), problem=MaxOnes())
    ]

    pop = Individual.evaluate_population(pop)

    selector = ops.proportional_selection(pop, offset='pop-min')

    # we expect that the second individual is always selected
    # since the new zero point will be at the minimum fitness
    # of the population
    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])

    selected = next(selector)
    assert np.all(selected.genome == [1, 1, 1])