Пример #1
0
def test_genalg_problems():
    # Attempt to solve various problems
    # Assert that the optimizer can find the solutions
    optimizer = GenAlg(32)
    optimizer.optimize(problems.ackley_binary,
                       logging_func=lambda *args: optimize._print_fitnesses(
                           *args, frequency=100))
    assert optimizer.solution_found
Пример #2
0
def test_genalg_problems():
    # Attempt to solve various problems
    # Assert that the optimizer can find the solutions
    optimizer = GenAlg(32)
    optimizer.optimize(
        problems.ackley_binary,
        logging_func=
        lambda *args: optimize._print_fitnesses(*args, frequency=100))
    assert optimizer.solution_found
Пример #3
0
def test_Optimizer_optimize_cache_encoded_True_cache_solution_False():
    """Should only cache decoded solutions if True."""
    # After calling Optimizer._get_fitnesses
    # __encoded_cache should not be empty
    # __solution_cache should be empty
    optimizer = GenAlg(2)

    # Get fitnesses
    optimizer.optimize(SIMPLE_PROBLEM,
                       max_iterations=1,
                       cache_encoded=True,
                       cache_solution=False,
                       clear_cache=False)

    # Assert caches as expected
    assert optimizer._Optimizer__encoded_cache != {}
    assert optimizer._Optimizer__solution_cache == {}
Пример #4
0
def test_Optimizer_optimize_cache_encoded_True_cache_solution_False():
    """Should only cache decoded solutions if True."""
    # After calling Optimizer._get_fitnesses
    # __encoded_cache should not be empty
    # __solution_cache should be empty
    optimizer = GenAlg(2)

    # Get fitnesses
    optimizer.optimize(
        SIMPLE_PROBLEM,
        max_iterations=1,
        cache_encoded=True,
        cache_solution=False,
        clear_cache=False)

    # Assert caches as expected
    assert optimizer._Optimizer__encoded_cache != {}
    assert optimizer._Optimizer__solution_cache == {}
Пример #5
0
def test_metaoptimize_genalg():
    optimizer = GenAlg(32)
    prev_hyperparameters = optimizer._get_hyperparameters()

    # Test without metaoptimize, save iterations to solution
    optimizer.optimize(problems.sphere_binary)
    iterations_to_solution = optimizer.iteration

    # Test with metaoptimize, assert that iterations to solution is lower
    optimizer.optimize_hyperparameters(
        problems.sphere_binary,
        smoothing=1,
        max_iterations=1,
        _meta_optimizer=GenAlg(None, population_size=2))
    optimizer.optimize(problems.sphere_binary)

    assert optimizer._get_hyperparameters() != prev_hyperparameters
Пример #6
0
def test_Optimizer_meta_optimize_parameter_locks():
    # Run meta optimize with locks
    # assert that locked parameters did not change

    # Only optimize mutation chance
    parameter_locks = [
        '_population_size', '_crossover_chance', '_selection_function',
        '_crossover_function'
    ]

    my_genalg = GenAlg(2)
    original = copy.deepcopy(my_genalg)

    # Low smoothing for faster performance
    my_genalg.optimize_hyperparameters(
        SIMPLE_PROBLEM, parameter_locks=parameter_locks, smoothing=1)

    # Check that mutation chance changed
    assert my_genalg._mutation_chance != original._mutation_chance

    # And all others stayed the same
    for parameter in parameter_locks:
        assert getattr(my_genalg, parameter) == getattr(original, parameter)
Пример #7
0
def test_Optimizer_meta_optimize_parameter_locks():
    # Run meta optimize with locks
    # assert that locked parameters did not change

    # Only optimize mutation chance
    parameter_locks = [
        '_population_size', '_crossover_chance', '_selection_function',
        '_crossover_function'
    ]

    my_genalg = GenAlg(2)
    original = copy.deepcopy(my_genalg)

    # Low smoothing for faster performance
    my_genalg.optimize_hyperparameters(SIMPLE_PROBLEM,
                                       parameter_locks=parameter_locks,
                                       smoothing=1)

    # Check that mutation chance changed
    assert my_genalg._mutation_chance != original._mutation_chance

    # And all others stayed the same
    for parameter in parameter_locks:
        assert getattr(my_genalg, parameter) == getattr(original, parameter)
Пример #8
0
def test_metaoptimize_gsa():
    optimizer = GSA(2, [-5.0] * 2, [5.0] * 2)
    prev_hyperparameters = optimizer._get_hyperparameters()

    # Test without metaoptimize, save iterations to solution
    optimizer.optimize(problems.sphere_real)
    iterations_to_solution = optimizer.iteration

    # Test with metaoptimize, assert that iterations to solution is lower
    optimizer.optimize_hyperparameters(problems.sphere_real,
                                       smoothing=1,
                                       max_iterations=1,
                                       _meta_optimizer=GenAlg(
                                           None, population_size=2))
    optimizer.optimize(problems.sphere_real)

    assert optimizer._get_hyperparameters() != prev_hyperparameters
Пример #9
0
def test_metaoptimize_crossentropy():
    optimizer = crossentropy.CrossEntropy(32)
    prev_hyperparameters = optimizer._get_hyperparameters()

    # Test without metaoptimize, save iterations to solution
    optimizer.optimize(problems.sphere_binary)
    iterations_to_solution = optimizer.iteration

    # Test with metaoptimize, assert that iterations to solution is lower
    optimizer.optimize_hyperparameters(problems.sphere_binary,
                                       smoothing=1,
                                       max_iterations=1,
                                       _meta_optimizer=GenAlg(
                                           None, population_size=2))
    optimizer.optimize(problems.sphere_binary)

    assert optimizer._get_hyperparameters() != prev_hyperparameters
Пример #10
0
def test_genalg_sphere_tournament_with_diversity():
    _check_optimizer(
        GenAlg(32,
               selection_function=functools.partial(
                   gaoperators.tournament_selection, diversity_weight=1.0)))
Пример #11
0
def test_genalg_chromosome_size_eq_1():
    """Regression test for chromosome_size == 1 edge case."""
    optimizer = GenAlg(1)
    optimizer.optimize(VERY_SIMPLE_PROBLEM)
    assert optimizer.solution_found
Пример #12
0
def test_genalg_sphere_defaults():
    _check_optimizer(GenAlg(32))
Пример #13
0
def test_Optimizer_optimize_sphere_max_seconds():
    optimizer = GenAlg(32, population_size=10)
    optimizer.optimize(problems.sphere_binary,
                       max_iterations=float('inf'),
                       max_seconds=10)
    assert optimizer.solution_found
Пример #14
0
    def optimize_hyperparameters(self,
                                 problems,
                                 parameter_locks=None,
                                 smoothing=20,
                                 max_iterations=100,
                                 _meta_optimizer=None,
                                 _low_memory=True):
        """Optimize hyperparameters for a given problem.

        Args:
            parameter_locks: a list of strings, each corresponding to a hyperparamter
                             that should not be optimized.
            problems: Either a single problem, or a list of problem instances,
                     allowing optimization based on multiple similar problems.
            smoothing: int; number of runs to average over for each set of hyperparameters.
            max_iterations: The number of iterations to optimize before stopping.
            _low_memory: disable performance enhancements to save memory
                         (they use a lot of memory otherwise).
        """
        if smoothing <= 0:
            raise ValueError('smoothing must be > 0')

        # problems supports either one or many problem instances
        if isinstance(problems, collections.Iterable):
            for problem in problems:
                if not isinstance(problem, Problem):
                    raise TypeError(
                        'problem must be Problem instance or list of Problem instances'
                    )
        elif isinstance(problems, Problem):
            problems = [problems]
        else:
            raise TypeError(
                'problem must be Problem instance or list of Problem instances'
            )

        # Copy to avoid permanent modification
        meta_parameters = copy.deepcopy(self._hyperparameters)

        # First, handle parameter locks, since it will modify our
        # meta_parameters dict
        locked_values = _parse_parameter_locks(self, meta_parameters,
                                               parameter_locks)

        # We need to know the size of our chromosome,
        # based on the hyperparameters to optimize
        solution_size = _get_hyperparameter_solution_size(meta_parameters)

        # We also need to create a decode function to transform the binary solution
        # into parameters for the metaheuristic
        decode = _make_hyperparameter_decode_func(locked_values,
                                                  meta_parameters)

        # A master fitness dictionary can be stored for use between calls
        # to meta_fitness
        if _low_memory:
            master_fitness_dict = None
        else:
            master_fitness_dict = {}

        additional_parameters = {
            '_optimizer': self,
            '_problems': problems,
            '_runs': smoothing,
            '_master_fitness_dict': master_fitness_dict,
        }
        META_FITNESS = Problem(
            _meta_fitness_func,
            decode_function=decode,
            fitness_kwargs=additional_parameters)
        if _meta_optimizer is None:
            # Initialize default meta optimizer
            # GenAlg is used because it supports both discrete and continous
            # attributes
            from optimal import GenAlg

            # Create metaheuristic with computed decode function and soltuion
            # size
            _meta_optimizer = GenAlg(solution_size)
        else:
            # Adjust supplied metaheuristic for this problem
            _meta_optimizer._solution_size = solution_size

        # Determine the best hyperparameters with a metaheuristic
        best_parameters = _meta_optimizer.optimize(
            META_FITNESS, max_iterations=max_iterations)

        # Set the hyperparameters inline
        self._set_hyperparameters(best_parameters)

        # And return
        return best_parameters
Пример #15
0
    #-20 * math.exp(-0.2 * math.sqrt(0.5 * (x1**2 + x2**2))) - math.exp(
    # 0.5 * (math.cos(2 * math.pi * x1) + math.cos(2 * math.pi * x2))) + 20 + math.e

    # You can prematurely stop the metaheuristic by returning True
    # as the second return value
    # Here, we consider the problem solved if the output is <= 0.01
    finished = output <= 0.01

    # Because this function is trying to minimize the output,
    # a smaller output has a greater fitness
    fitness = 1 / output

    # First return argument must be a real number
    # The higher the number, the better the solution
    # Second return argument is a boolean, and optional
    return fitness, finished


# Define a problem instance to optimize
# We can optionally include a decode function
# The optimizer will pass the decoded solution into your fitness function
# Additional fitness function and decode function parameters can also be added
ackley = Problem(ackley_fitness, decode_function=decode_ackley)

# Create a genetic algorithm with a chromosome size of 32,
# and use it to solve our problem
my_genalg = GenAlg(32)
best_solution = my_genalg.optimize(ackley)

print best_solution
Пример #16
0
def test_Optimizer_optimize_solution_correct():
    optimizer = GenAlg(2)
    assert optimizer.optimize(SIMPLE_PROBLEM) == [1, 1]
Пример #17
0
def ANFISGA():
    anfisga = Problem(anfisga_fitness,
                      decode_function=decode_anfisga_search_space)
    my_genalg = GenAlg(32)
    best_solution = my_genalg.optimize(anfisga)
    print best_solution
Пример #18
0
def SVMGA():
    svmga = Problem(svmga_fitness, decode_function=decode_svmga_search_space)
    my_genalg = GenAlg(32)
    best_solution = my_genalg.optimize(svmga, max_iterations=1000000)
    print best_solution
Пример #19
0
def test_Optimizer_optimize_solution_correct():
    optimizer = GenAlg(2)
    assert optimizer.optimize(SIMPLE_PROBLEM) == [1, 1]
Пример #20
0
def test_Optimizer_optimize_sphere_max_seconds():
    optimizer = GenAlg(32, population_size=10)
    optimizer.optimize(
        problems.sphere_binary, max_iterations=float('inf'),
        max_seconds=10)
    assert optimizer.solution_found
Пример #21
0
PROBLEMS = [
    problems.ackley_binary, problems.levis_binary, problems.eggholder_binary,
    problems.table_binary, problems.shaffer_binary, problems.cross_binary
]


def benchmark_multi(optimizer):
    """Benchmark an optimizer configuration on multiple functions."""
    # Get our benchmark stats
    all_stats = benchmark.compare(optimizer, PROBLEMS, runs=100)
    return benchmark.aggregate(all_stats)


# Create the genetic algorithm configurations to compare
# In reality, we would also want to optimize other hyperparameters
ga_onepoint = GenAlg(32, crossover_function=gaoperators.one_point_crossover)
ga_uniform = GenAlg(32, crossover_function=gaoperators.uniform_crossover)

# Run a benchmark for multiple problems, for robust testing
onepoint_stats = benchmark_multi(ga_onepoint)
uniform_stats = benchmark_multi(ga_uniform)

print
print 'One Point'
pprint.pprint(onepoint_stats)
print
print 'Uniform'
pprint.pprint(uniform_stats)

# We can obtain an easier comparison by performing another aggregate step
aggregate_stats = benchmark.aggregate({
Пример #22
0
    def optimize_hyperparameters(self,
                                 problems,
                                 parameter_locks=None,
                                 smoothing=20,
                                 max_iterations=100,
                                 _meta_optimizer=None,
                                 _low_memory=True):
        """Optimize hyperparameters for a given problem.

        Args:
            parameter_locks: a list of strings, each corresponding to a hyperparamter
                             that should not be optimized.
            problems: Either a single problem, or a list of problem instances,
                     allowing optimization based on multiple similar problems.
            smoothing: int; number of runs to average over for each set of hyperparameters.
            max_iterations: The number of iterations to optimize before stopping.
            _low_memory: disable performance enhancements to save memory
                         (they use a lot of memory otherwise).
        """
        if smoothing <= 0:
            raise ValueError('smoothing must be > 0')

        # problems supports either one or many problem instances
        if isinstance(problems, collections.Iterable):
            for problem in problems:
                if not isinstance(problem, Problem):
                    raise TypeError(
                        'problem must be Problem instance or list of Problem instances'
                    )
        elif isinstance(problems, Problem):
            problems = [problems]
        else:
            raise TypeError(
                'problem must be Problem instance or list of Problem instances'
            )

        # Copy to avoid permanent modification
        meta_parameters = copy.deepcopy(self._hyperparameters)

        # First, handle parameter locks, since it will modify our
        # meta_parameters dict
        locked_values = _parse_parameter_locks(self, meta_parameters,
                                               parameter_locks)

        # We need to know the size of our chromosome,
        # based on the hyperparameters to optimize
        solution_size = _get_hyperparameter_solution_size(meta_parameters)

        # We also need to create a decode function to transform the binary solution
        # into parameters for the metaheuristic
        decode = _make_hyperparameter_decode_func(locked_values,
                                                  meta_parameters)

        # A master fitness dictionary can be stored for use between calls
        # to meta_fitness
        if _low_memory:
            master_fitness_dict = None
        else:
            master_fitness_dict = {}

        additional_parameters = {
            '_optimizer': self,
            '_problems': problems,
            '_runs': smoothing,
            '_master_fitness_dict': master_fitness_dict,
        }
        META_FITNESS = Problem(
            _meta_fitness_func,
            decode_function=decode,
            fitness_kwargs=additional_parameters)
        if _meta_optimizer is None:
            # Initialize default meta optimizer
            # GenAlg is used because it supports both discrete and continous
            # attributes
            from optimal import GenAlg

            # Create metaheuristic with computed decode function and soltuion
            # size
            _meta_optimizer = GenAlg(solution_size)
        else:
            # Adjust supplied metaheuristic for this problem
            _meta_optimizer._solution_size = solution_size

        # Determine the best hyperparameters with a metaheuristic
        best_parameters = _meta_optimizer.optimize(
            META_FITNESS, max_iterations=max_iterations)

        # Set the hyperparameters inline
        self._set_hyperparameters(best_parameters)

        # And return
        return best_parameters
Пример #23
0
    print('lr : ' + str(learning_rate) + ', hidden neuron : ' + str(neuron))
    output = rmse

    finished = output <= err_threshold
    #finished = output <= 0.01
    fitness = 1 / output
    print(finished)
    print(fitness)
    return fitness, finished


# In[10]:

#Pipeine 3 - FS->ANN->GA
ann_ml = Problem(ann_fs_fitness, decode_function=decode_param)
my_genalg = GenAlg(32, mutation_chance=0.1, crossover_chance=0.8)
start_time = time.time()
best_solution = my_genalg.optimize(ann_ml, max_iterations=15, n_processes=1)
elapsed_time = time.time() - start_time
print(time.strftime("%H:%M:%S", time.gmtime(elapsed_time)))

print best_solution

# In[15]:

print best_solution

# In[11]:

#Pipeine 4 - ANN->GA
ann_ml = Problem(ann_fitness, decode_function=decode_param)
Пример #24
0
def test_genalg_sphere_stochastic_selection():
    # Needs higher population size to consistently succeed
    _check_optimizer(
        GenAlg(32,
               population_size=40,
               selection_function=gaoperators.stochastic_selection))
Пример #25
0
def test_Optimizer_optimize_parallel():
    optimzier = GenAlg(2)
    optimzier.optimize(SIMPLE_PROBLEM, n_processes=random.randint(2, 4))
    assert optimzier.solution_found
Пример #26
0
def test_Optimizer_optimize_parallel():
    optimzier = GenAlg(2)
    optimzier.optimize(SIMPLE_PROBLEM, n_processes=random.randint(2, 4))
    assert optimzier.solution_found
Пример #27
0
def test_genalg_chromosome_size_eq_1():
    """Regression test for chromosome_size == 1 edge case."""
    optimizer = GenAlg(1)
    optimizer.optimize(VERY_SIMPLE_PROBLEM)
    assert optimizer.solution_found