Esempio n. 1
0
def test_mutation_perm(name):
    mut = get_mutation(name, prob=0.95)
    method = GA(pop_size=20,
                crossover=get_crossover('perm_erx'),
                mutation=mut,
                sampling=PermutationRandomSampling())
    minimize(create_random_tsp_problem(10), method, ("n_gen", 20))
    assert True
Esempio n. 2
0
def test_crossover_perm(name):
    crossover = get_crossover(name, prob=0.95)
    method = GA(pop_size=20,
                crossover=crossover,
                mutation=InversionMutation(),
                sampling=PermutationRandomSampling())
    minimize(create_random_tsp_problem(10), method, ("n_gen", 20))
    assert True
Esempio n. 3
0
def genetic_algorithm():
    algorithm = GA(
        population_size=100,
        individual_size=2,
        eliminate_duplicates=True)

    res = minimize(problem,
                   algorithm,
                   seed=1,
                   verbose=False)

    print("Best solution found: \nX = %s\nF = %s" % (res.X, res.F))
Esempio n. 4
0
    def _do(self):
        pop_size, n_gen = self.pop_size, self.n_gen
        n_points, n_dim, = self.n_points, self.n_dim
        fun = self.fun

        class MyProblem(Problem):
            def __init__(self):
                self.n_points = n_points
                self.n_dim = n_dim
                self.n_partitions = get_partition_closest_to_points(
                    n_points, n_dim)

                super().__init__(n_var=n_points * n_dim,
                                 n_obj=1,
                                 n_constr=0,
                                 xl=0.0,
                                 xu=1.0,
                                 elementwise_evaluation=True)

            def get_points(self, x):
                _x = x.reshape((self.n_points, self.n_dim))**2
                _x = _x / _x.sum(axis=1)[:, None]
                return _x

            def _evaluate(self, x, out, *args, **kwargs):
                out["F"] = fun(self.get_points(x))

        problem = MyProblem()

        algorithm = GA(pop_size=pop_size, eliminate_duplicates=True)

        res = minimize(problem,
                       algorithm,
                       termination=('n_gen', n_gen),
                       verbose=True)

        ref_dirs = problem.get_points(res.X)
        return ref_dirs
 def test_ga(self):
     moo_algorithm = GA(pop_size=20, eliminate_duplicates=True)
     self.moo(moo_algorithm)
def main():
    # Define search algorithms
    algorithms = list()
    # 1: GA
    algorithm = GA(
        pop_size=config.POPULATION_SIZE,
        sampling=SudoRandomInitialization(),
        # crossover=AdaptiveSinglePointCrossover(prob=0.8),
        crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(),
        eliminate_duplicates=RefactoringSequenceDuplicateElimination()
    )
    algorithms.append(algorithm)

    # 2: NSGA II
    algorithm = NSGA2(pop_size=config.POPULATION_SIZE,
                      sampling=SudoRandomInitialization(),
                      # crossover=AdaptiveSinglePointCrossover(prob=0.8),
                      crossover=get_crossover("real_k_point", n_points=2),
                      mutation=BitStringMutation(),
                      eliminate_duplicates=RefactoringSequenceDuplicateElimination()
                      )
    algorithms.append(algorithm)

    # 3: NSGA III
    # Todo: Ask for best practices in determining ref_dirs
    ref_dirs = get_reference_directions("energy", 8, 90, seed=1)
    algorithm = NSGA3(ref_dirs=ref_dirs,
                      pop_size=config.POPULATION_SIZE,
                      sampling=SudoRandomInitialization(),
                      # crossover=AdaptiveSinglePointCrossover(prob=0.8),
                      crossover=get_crossover("real_k_point", n_points=2),
                      mutation=BitStringMutation(),
                      eliminate_duplicates=RefactoringSequenceDuplicateElimination()
                      )
    algorithms.append(algorithm)

    # Define problems
    problems = list()
    problems.append(
        ProblemSingleObjective(n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND)
    )
    problems.append(
        ProblemMultiObjective(n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND)
    )
    problems.append(
        ProblemManyObjective(n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND)
    )

    # Do optimization for various problems with various algorithms
    res = minimize(problem=problems[2],
                   algorithm=algorithms[2],
                   termination=('n_gen', config.MAX_ITERATIONS),
                   seed=1,
                   verbose=True)
    logger.info("** FINISHED **")

    logger.info("Best Individual:")
    logger.info(res.X)
    logger.info("Objective Values:")
    logger.info(res.F)

    logger.info("==================")
    logger.info("Other Solutions:")
    for ind in res.opt:
        logger.info(ind.X)
        logger.info(ind.F)
        logger.info("==================")

    logger.info(f"Start Time: {res.start_time}")
    logger.info(f"End Time: {res.end_time}")
    logger.info(f"Execution Time in Seconds: {res.exec_time}")
Esempio n. 7
0
def test_mutation_real(name):
    mut = get_mutation(name)
    method = GA(pop_size=20, mutation=mut)
    minimize(get_problem("sphere"), method, ("n_gen", 20))
    assert True
Esempio n. 8
0
def test_crossover_real(name):
    crossover = get_crossover(name, prob=0.95)
    method = GA(pop_size=20, crossover=crossover)
    minimize(get_problem("sphere"), method, ("n_gen", 20))
    assert True
Esempio n. 9
0
        self.mutation = random.choice(self.mutations)
        self.repair = random.choice(self.repairs)

        off = super().do(problem, pop, n_offsprings, **kwargs)
        return off


selections = [RandomSelection()]

# define all the crossovers to be tried
crossovers = [
    SimulatedBinaryCrossover(10.0),
    SimulatedBinaryCrossover(30.0),
    DifferentialEvolutionCrossover()
]
# COMMENT out this line to only use the SBX crossover with one eta value
# crossovers = [SimulatedBinaryCrossover(30)]

mutations = [NoMutation(), PolynomialMutation(10.0), PolynomialMutation(30.0)]
repairs = []

ensemble = EnsembleMating(selections, crossovers, mutations, repairs)

problem = Rastrigin(n_var=30)

algorithm = GA(pop_size=100, mating=ensemble, eliminate_duplicates=True)

res = minimize(problem, algorithm, seed=1, verbose=True)

print("Best solution found: \nX = %s\nF = %s" % (res.X, res.F))
Esempio n. 10
0
def test_sphere_with_constraints():
    problem = SphereWithConstraints()
    for algorithm in [GA(), NelderMead(), PatternSearch()]:
        f, f_opt = run(problem, algorithm)
        np.testing.assert_almost_equal(f, f_opt, decimal=5)
        print(problem.__class__.__name__, algorithm.__class__.__name__, "Yes")
Esempio n. 11
0
def main():
    """

    Optimization module main driver

    """

    # Define initialization objects
    initializer_class = SmellInitialization if config.WARM_START else RandomInitialization
    initializer_object = initializer_class(
        udb_path=config.UDB_PATH,
        population_size=config.POPULATION_SIZE,
        lower_band=config.LOWER_BAND,
        upper_band=config.UPPER_BAND
    )

    # -------------------------------------------
    # Define optimization problems
    problems = list()  # 0: Genetic (Single), 1: NSGA-II (Multi), 2: NSGA-III (Many) objectives problems
    problems.append(
        ProblemSingleObjective(
            n_objectives=config.NUMBER_OBJECTIVES,
            n_refactorings_lowerbound=config.LOWER_BAND,
            n_refactorings_upperbound=config.UPPER_BAND,
            evaluate_in_parallel=False,
        )
    )
    problems.append(
        ProblemMultiObjective(
            n_objectives=config.NUMBER_OBJECTIVES,
            n_refactorings_lowerbound=config.LOWER_BAND,
            n_refactorings_upperbound=config.UPPER_BAND,
            evaluate_in_parallel=False,
        )
    )
    problems.append(
        ProblemManyObjective(
            n_objectives=config.NUMBER_OBJECTIVES,
            n_refactorings_lowerbound=config.LOWER_BAND,
            n_refactorings_upperbound=config.UPPER_BAND,
            evaluate_in_parallel=False,
            verbose_design_metrics=True,
        )
    )

    # Define search algorithms
    algorithms = list()
    # 1: GA
    alg1 = GA(
        pop_size=config.POPULATION_SIZE,
        sampling=PopulationInitialization(initializer_object),
        crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object),
        eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list),
        n_gen=config.NGEN,
    )
    algorithms.append(alg1)

    # 2: NSGA-II
    alg2 = NSGA2(
        pop_size=config.POPULATION_SIZE,
        sampling=PopulationInitialization(initializer_object),
        crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object),
        eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list),
        n_gen=config.NGEN,
    )
    algorithms.append(alg2)

    # 3: NSGA-III
    # pop_size must be equal or larger than the number of reference directions
    number_of_references_points = config.POPULATION_SIZE - int(config.POPULATION_SIZE * 0.20)
    ref_dirs = get_reference_directions(
        'energy',  # algorithm
        config.NUMBER_OBJECTIVES,  # number of objectives
        number_of_references_points,  # number of reference directions
        seed=1
    )
    alg3 = NSGA3(
        ref_dirs=ref_dirs,
        pop_size=config.POPULATION_SIZE,  # 200
        sampling=PopulationInitialization(initializer_object),
        selection=TournamentSelection(func_comp=binary_tournament),
        crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY, ),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object),
        eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list),
        n_gen=config.NGEN,
    )
    algorithms.append(alg3)

    # Termination of algorithms
    my_termination = MultiObjectiveDefaultTermination(
        x_tol=None,
        cv_tol=None,
        f_tol=0.0015,
        nth_gen=5,
        n_last=5,
        n_max_gen=config.MAX_ITERATIONS,  # about 1000 - 1400
        n_max_evals=1e6
    )

    # Do optimization for various problems with various algorithms
    res = minimize(
        problem=problems[config.PROBLEM],
        algorithm=algorithms[config.PROBLEM],
        termination=my_termination,
        seed=1,
        verbose=False,
        copy_algorithm=True,
        copy_termination=True,
        save_history=False,
        callback=LogCallback(),
    )
    # np.save('checkpoint', res.algorithm)

    # Log results
    logger.info(f"***** Algorithm was finished in {res.algorithm.n_gen + config.NGEN} generations *****")
    logger.info(" ")
    logger.info("============ time information ============")
    logger.info(f"Start time: {datetime.fromtimestamp(res.start_time).strftime('%Y-%m-%d %H:%M:%S')}")
    logger.info(f"End time: {datetime.fromtimestamp(res.end_time).strftime('%Y-%m-%d %H:%M:%S')}")
    logger.info(f"Execution time in seconds: {res.exec_time}")
    logger.info(f"Execution time in minutes: {res.exec_time / 60}")
    logger.info(f"Execution time in hours: {res.exec_time / (60 * 60)}")
    # logger.info(f"Number of generations: {res.algorithm.n_gen}")
    # logger.info(f"Number of generations", res.algorithm.termination)

    # Log optimum solutions
    logger.info("============ All opt solutions ============")
    for i, ind in enumerate(res.opt):
        logger.info(f'Opt refactoring sequence {i}:')
        logger.info(ind.X)
        logger.info(f'Opt refactoring sequence corresponding objectives vector {i}:')
        logger.info(ind.F)
        logger.info("-" * 75)

    # Log best refactorings
    logger.info("============ Best refactoring sequences (a set of non-dominated solutions) ============")
    for i, ind in enumerate(res.X):
        logger.info(f'Best refactoring sequence {i}:')
        logger.info(ind)
        logger.info("-" * 75)
    logger.info("============ Best objective values (a set of non-dominated solutions) ============")
    for i, ind_objective in enumerate(res.F):
        logger.info(f'Best refactoring sequence corresponding objectives vector {i}:')
        logger.info(ind_objective)
        logger.info("-" * 75)

    # Save best refactorings
    population_trimmed = []
    objective_values_content = ''
    for chromosome in res.X:
        chromosome_new = []
        if config.PROBLEM == 0:  # i.e., single objective problem
            for gene_ in chromosome:
                chromosome_new.append((gene_.name, gene_.params))
        else:
            for gene_ in chromosome[0]:
                chromosome_new.append((gene_.name, gene_.params))
        population_trimmed.append(chromosome_new)

    for objective_vector in res.F:
        objective_values_content += f'{res.algorithm.n_gen + config.NGEN},'
        if config.PROBLEM == 0:
            objective_values_content += f'{objective_vector},'
        else:
            for objective_ in objective_vector:
                objective_values_content += f'{objective_},'
        objective_values_content += '\n'

    best_refactoring_sequences_path = os.path.join(
        config.PROJECT_LOG_DIR,
        f'best_refactoring_sequences_after_{res.algorithm.n_gen + config.NGEN}gens.json'
    )
    with open(best_refactoring_sequences_path, mode='w', encoding='utf-8') as fp:
        json.dump(population_trimmed, fp, indent=4)

    best_refactoring_sequences_objectives_path = os.path.join(
        config.PROJECT_LOG_DIR,
        f'best_refactoring_sequences_objectives_after_{res.algorithm.n_gen + config.NGEN}gens.csv'
    )
    with open(best_refactoring_sequences_objectives_path, mode='w', encoding='utf-8') as fp:
        fp.write(objective_values_content)

    try:
        pf = res.F
        # dm = HighTradeoffPoints()
        dm = get_decision_making("high-tradeoff")
        I = dm.do(pf)

        logger.info("============ High-tradeoff points refactoring sequences ============")
        for i, ind in enumerate(res.X[I]):
            logger.info(f'High tradeoff points refactoring sequence {i}:')
            logger.info(ind)
            logger.info("-" * 75)
        logger.info("============ High-tradeoff points objective values  ============")
        for i, ind_objective in enumerate(pf[I]):
            logger.info(f'High-tradeoff points refactoring sequence corresponding objectives vector {i}:')
            logger.info(ind_objective)
            logger.info("-" * 75)

        logger.info("High-tradeoff points mean:")
        logger.info(np.mean(pf[I], axis=0))
        logger.info("High-tradeoff points median:")
        logger.info(np.median(pf[I], axis=0))

        # Save high-tradeoff refactorings
        population_trimmed = []
        objective_values_content = ''
        for chromosome in res.X[I]:
            chromosome_new = []
            if config.PROBLEM == 0:  # i.e., single objective problem
                for gene_ in chromosome:
                    chromosome_new.append((gene_.name, gene_.params))
            else:
                for gene_ in chromosome[0]:
                    chromosome_new.append((gene_.name, gene_.params))
            population_trimmed.append(chromosome_new)

        for objective_vector in pf[I]:
            objective_values_content += f'{res.algorithm.n_gen + config.NGEN},'
            if config.PROBLEM == 0:
                objective_values_content += f'{objective_vector},'
            else:
                for objective_ in objective_vector:
                    objective_values_content += f'{objective_},'
            objective_values_content += '\n'

        high_tradeoff_path = os.path.join(
            config.PROJECT_LOG_DIR,
            f'high_tradeoff_points_refactoring_after_{res.algorithm.n_gen + config.NGEN}gens.json'
        )
        with open(high_tradeoff_path, mode='w', encoding='utf-8') as fp:
            json.dump(population_trimmed, fp, indent=4)

        high_tradeoff_path_objectives_path = os.path.join(
            config.PROJECT_LOG_DIR,
            f'high_tradeoff_points_after_{res.algorithm.n_gen + config.NGEN}gens.csv'
        )
        with open(high_tradeoff_path_objectives_path, mode='w', encoding='utf-8') as fp:
            fp.write(objective_values_content)

    except:
        logger.error("No multi-optimal solutions (error in computing high tradeoff points)!")
Esempio n. 12
0
def main():
    # Define search algorithms
    algorithms = list()
    # 1: GA
    algorithm = GA(
        pop_size=config.POPULATION_SIZE,
        sampling=PureRandomInitialization(),
        crossover=AdaptiveSinglePointCrossover(prob=0.9),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=0.1),
        eliminate_duplicates=ElementwiseDuplicateElimination(
            cmp_func=is_equal_2_refactorings_list))
    algorithms.append(algorithm)

    # 2: NSGA II
    algorithm = NSGA2(
        pop_size=config.POPULATION_SIZE,
        sampling=PureRandomInitialization(),
        crossover=AdaptiveSinglePointCrossover(prob=0.9),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=0.1),
        eliminate_duplicates=ElementwiseDuplicateElimination(
            cmp_func=is_equal_2_refactorings_list))
    algorithms.append(algorithm)

    # 3: NSGA III
    # pop_size must be equal or larger than the number of reference directions
    number_of_references_points = config.POPULATION_SIZE - int(
        config.POPULATION_SIZE * 0.20)
    ref_dirs = get_reference_directions(
        'energy',  # algorithm
        8,  # number of objectives
        number_of_references_points,  # number of reference directions
        seed=1)
    algorithm = NSGA3(
        ref_dirs=ref_dirs,
        pop_size=config.POPULATION_SIZE,  # 200
        sampling=PureRandomInitialization(),
        selection=TournamentSelection(func_comp=binary_tournament),
        crossover=AdaptiveSinglePointCrossover(prob=0.8),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=0.1),
        eliminate_duplicates=ElementwiseDuplicateElimination(
            cmp_func=is_equal_2_refactorings_list))
    algorithms.append(algorithm)

    # -------------------------------------------
    # Define problems
    problems = list()
    problems.append(
        ProblemSingleObjective(n_refactorings_lowerbound=config.LOWER_BAND,
                               n_refactorings_upperbound=config.UPPER_BAND))
    problems.append(
        ProblemMultiObjective(n_refactorings_lowerbound=config.LOWER_BAND,
                              n_refactorings_upperbound=config.UPPER_BAND))
    problems.append(
        ProblemManyObjective(n_refactorings_lowerbound=config.LOWER_BAND,
                             n_refactorings_upperbound=config.UPPER_BAND,
                             evaluate_in_parallel=True))

    # Termination of algorithms
    my_termination = MultiObjectiveDefaultTermination(
        x_tol=None,
        cv_tol=None,
        f_tol=0.0015,
        nth_gen=10,
        n_last=20,
        n_max_gen=config.MAX_ITERATIONS,  # about 1000 - 1400
        n_max_evals=1e6)

    # Do optimization for various problems with various algorithms
    res = minimize(
        problem=problems[2],
        algorithm=algorithms[2],
        termination=my_termination,
        seed=1,
        verbose=False,
        copy_algorithm=True,
        copy_termination=True,
        save_history=False,
    )
    # np.save('checkpoint', res.algorithm)

    # Log results
    logger.info("\n** FINISHED **\n")
    logger.info(
        "Best refactoring sequences (a set of non-dominated solutions):")
    logger.info(res.X)
    logger.info("Best objective values (a set of non-dominated solutions):")
    logger.info(res.F)

    logger.info("=" * 75)
    logger.info("Other solutions:")
    for ind in res.opt:
        logger.info(ind.X)
        logger.info(ind.F)
        logger.info("-" * 50)
    logger.info("=" * 75)

    logger.info(f"Start time: {res.start_time}")
    logger.info(f"End time: {res.end_time}")
    logger.info(f"Execution time in seconds: {res.exec_time}")
    logger.info(f"Execution time in minutes: {res.exec_time / 60}")
    logger.info(f"Execution time in hours: {res.exec_time / (60 * 60)}")
    logger.info(f"Number of generations: {res.algorithm.n_gen}")
    # logger.info(f"Number of generations", res.algorithm.termination)

    pf = res.F
    # dm = HighTradeoffPoints()
    dm = get_decision_making("high-tradeoff")
    try:
        I = dm.do(pf)
        logger.info(f"High tradeoff points: {pf[I][0]}")
        logger.info(
            f"High tradeoff points corresponding refactorings: {res.X[I]}")
        logger.info(
            f"The mean improvement of quality attributes: {np.mean(pf[I][0], axis=0)}"
        )
        logger.info(
            f"The median improvement of quality attributes: {np.median(pf[I][0], axis=0)}"
        )
    except:
        logger.info(
            "No multi optimal solutions (error in computing high tradeoff points)!"
        )
Esempio n. 13
0
            for k in range(len(X)):
                i = I[k]
                x = X[k]
                _x = np.concatenate([x[i:], x[:i]])
                pop[k].set("X", _x)

            return pop

    from pymoo.optimize import minimize
    from pymoo.factory import get_crossover, get_mutation, get_sampling
    from pymoo.problems.single.traveling_salesman import create_random_tsp_problem
    from pymoo.util.termination.default import SingleObjectiveDefaultTermination

    algorithm = GA(pop_size=20,
                   sampling=get_sampling("perm_random"),
                   crossover=get_crossover("perm_erx"),
                   mutation=get_mutation("perm_inv"),
                   repair=StartFromZeroRepair(),
                   eliminate_duplicates=True)

    # if the algorithm did not improve the last 200 generations then it will terminate (and disable the max generations)
    termination = SingleObjectiveDefaultTermination(n_last=200,
                                                    n_max_gen=np.inf)

    # res = minimize(
    #     problem,
    #     algorithm,
    #     termination,
    #     seed=1,
    # )
    #
    # print("Traveling Time:", np.round(res.F[0], 3))
Esempio n. 14
0
    def search(self, data: Data, models: Collection[Model], tid: int,
               **kwargs) -> np.ndarray:

        kwargs = kwargs['kwargs']

        # print ("SEARCH!")

        prob = SurrogateProblem(self.problem, self.computer, data, models,
                                self.options, tid, self.models_transfer)
        prob_pymoo = MyProblemPyMoo(self.problem.DP, self.problem.DO, prob)

        if (kwargs['verbose']):
            print("prob: ", prob)
        bestX = []

        if (self.problem.DO == 1):  # single objective optimizer
            if ('ga' == kwargs['search_algo']):
                from pymoo.algorithms.soo.nonconvex.ga import GA
                from pymoo.optimize import minimize
                algo = GA(pop_size=kwargs["search_pop_size"])
            elif ('pso' == kwargs['search_algo']):
                from pymoo.algorithms.soo.nonconvex.pso import PSO
                from pymoo.optimize import minimize
                algo = PSO(pop_size=kwargs["search_pop_size"])
            else:
                raise Exception(
                    f'Unknown optimization algorithm "{kwargs["search_algo"]}"'
                )

            bestX = []
            res = minimize(prob_pymoo, algo, verbose=kwargs['verbose'], seed=1)
            bestX.append(np.array(res.X).reshape(1, self.problem.DP))

        else:  # multi objective
            if ('nsga2' == kwargs['search_algo']):
                from pymoo.algorithms.moo.nsga2 import NSGA2
                from pymoo.optimize import minimize
                algo = NSGA2(pop_size=kwargs["search_pop_size"])
            elif ('moead' == kwargs['search_algo']):
                from pymoo.algorithms.moo.moead import MOEAD
                from pymoo.optimize import minimize
                from pymoo.factory import get_reference_directions
                ref_dirs = get_reference_directions("das-dennis",
                                                    self.problem.DO,
                                                    n_partitions=12)
                algo = MOEAD(ref_dirs,
                             n_neighbors=15,
                             prob_neighbor_mating=0.7)
            else:
                raise Exception(
                    f'Unknown optimization algorithm "{kwargs["search_algo"]}"'
                )
            bestX = []
            res = minimize(prob_pymoo,
                           algo, ("n_gen", kwargs["search_gen"]),
                           verbose=kwargs['verbose'],
                           seed=1)
            firstn = min(int(kwargs['search_more_samples']),
                         np.shape(res.X)[0])
            xss = res.X[0:firstn]
            bestX.append(xss)

        if (kwargs['verbose']):
            print(tid, 'OK' if cond else 'KO')
            sys.stdout.flush()
            print("bestX", bestX)
        return (tid, bestX)
Esempio n. 15
0
    # benchmark.add_problem("himmelblau", Himmelblau(), ("n_evals", 400))
    # benchmark.add_problem("rosenbrock", Rosenbrock(), ("n_evals", 500))
    # benchmark.add_problem("ackley-10d", Ackley(n_var=10), ("n_evals", 1000))
    # benchmark.add_problem("rastrigin-5d", Rastrigin(n_var=5), ("n_evals", 1000))

    instance = 1
    n_evals = 1000
    for n_var in [10, 20, 40]:
        for function in range(1, 25):
            label = f"bbob-f{function:02d}-{instance}"
            benchmark.add_problem(label + "-" + str(n_var),
                                  get_problem(label, n_var=n_var),
                                  ("n_evals", n_evals))

    benchmark.add_algorithm("de", DE())
    benchmark.add_algorithm("ga", GA())
    benchmark.add_algorithm("pso", PSO())
    benchmark.add_algorithm("cmaes", SimpleCMAES())
    benchmark.add_algorithm("ps", PatternSearch())
    benchmark.add_algorithm("nm", NelderMead())

    loader = DefaultLoader(FOLDER)
    # loader = None

    writer = DefaultWriter(FOLDER)

    results = benchmark.run(writer=writer,
                            loader=loader,
                            run_if_loading_fails=True)

    # _ = SingleObjectiveAnalyzer().run(results, benchmark=benchmark, inplace=True)