Exemplo n.º 1
0
def test_min_vs_loop_vs_infill():
    problem = get_problem("zdt1")
    n_gen = 30

    algorithm = NSGA2(pop_size=100)
    min_res = minimize(problem, algorithm, ('n_gen', n_gen), seed=1)

    algorithm = NSGA2(pop_size=100)
    algorithm.setup(problem, ('n_gen', n_gen), seed=1)
    while algorithm.has_next():
        algorithm.next()
    algorithm.finalize()
    loop_res = algorithm.result()

    np.testing.assert_allclose(min_res.X, loop_res.X)

    algorithm = NSGA2(pop_size=100)
    algorithm.setup(problem, ('n_gen', n_gen), seed=1)
    while algorithm.has_next():
        infills = algorithm.infill()
        Evaluator().eval(problem, infills)
        algorithm.advance(infills=infills)
    algorithm.finalize()
    infill_res = algorithm.result()

    np.testing.assert_allclose(min_res.X, infill_res.X)
Exemplo n.º 2
0
def test_no_feasible_solution_found():
    class MyProblem(Problem):
        def __init__(self):
            super().__init__(n_var=2,
                             n_obj=1,
                             n_constr=36,
                             xl=np.array([0, 0]),
                             xu=np.array([100, 100]))

        def _evaluate(self, x, out, *args, **kwargs):
            f1 = x[:, 0] + x[:, 1]
            out["F"] = np.column_stack([f1])
            out["G"] = np.ones(len(x))

    res = minimize(MyProblem(), NSGA2(), ("n_gen", 10), seed=1)

    assert res.X is None
    assert res.F is None
    assert res.G is None

    res = minimize(MyProblem(),
                   NSGA2(), ("n_gen", 10),
                   seed=1,
                   verbose=True,
                   return_least_infeasible=True,
                   save_history=True)

    assert res.CV[0] == 1.0
Exemplo n.º 3
0
def test_mutation_bin(name):
    mut = get_mutation(name)
    method = NSGA2(pop_size=20,
                   crossover=get_crossover('bin_ux'),
                   mutation=mut)
    minimize(get_problem("zdt5"), method, ("n_gen", 20))
    assert True
Exemplo n.º 4
0
    def test_pymoo_nsgaii(self):
        moo_algorithm = NSGA2(pop_size=40,
                              n_offsprings=10,
                              sampling=get_sampling("real_random"),
                              crossover=get_crossover("real_sbx",
                                                      prob=0.9,
                                                      eta=15),
                              mutation=get_mutation("real_pm", eta=20),
                              eliminate_duplicates=True)

        problem = ProblemConstraint()

        algorithm = Pymoo(problem)
        algorithm.options['verbose_level'] = 0
        algorithm.options['n_iterations'] = 40
        algorithm.options['algorithm'] = moo_algorithm
        algorithm.run()

        f_1 = []
        f_2 = []
        for individual in problem.last_population():
            f_1.append(individual.costs[0])
            f_2.append(individual.costs[1])

        # print(len(problem.individuals))
        # for individual in problem.individuals:
        #    print(individual)

        self.assertLess(min(f_1), 1.5)
        self.assertGreater(max(f_1), 74)
        self.assertLess(max(f_2), 1.5)
        self.assertGreater(max(f_2), 0.75)
Exemplo n.º 5
0
 def test_nsga2(self):
     moo_algorithm = NSGA2(pop_size=10,
                           n_offsprings=10,
                           sampling=get_sampling("real_random"),
                           crossover=get_crossover("real_sbx",
                                                   prob=0.9,
                                                   eta=15),
                           mutation=get_mutation("real_pm", eta=20),
                           eliminate_duplicates=True)
     self.moo(moo_algorithm)
Exemplo n.º 6
0
def test_same_seed_same_result():
    problem = get_problem("zdt3")
    algorithm = NSGA2(pop_size=100, eliminate_duplicates=True)

    res1 = minimize(problem, algorithm, ('n_gen', 20), seed=1)
    np.random.seed(200)
    res2 = minimize(problem, algorithm, ('n_gen', 20), seed=1)

    np.testing.assert_almost_equal(res1.X, res2.X)
    np.testing.assert_almost_equal(res1.F, res2.F)
Exemplo n.º 7
0
def test_no_pareto_front_given():
    class ZDT1NoPF(ZDT):
        def _evaluate(self, x, out, *args, **kwargs):
            f1 = x[:, 0]
            g = 1 + 9.0 / (self.n_var - 1) * np.sum(x[:, 1:], axis=1)
            f2 = g * (1 - np.power((f1 / g), 0.5))
            out["F"] = np.column_stack([f1, f2])

    algorithm = NSGA2(pop_size=100, eliminate_duplicates=True)
    minimize(ZDT1NoPF(), algorithm, ('n_gen', 20), seed=1, verbose=True)
    assert True
Exemplo n.º 8
0
def test_thread_pool():
    class MyThreadedProblem(ElementwiseProblem):
        def __init__(self):
            super().__init__(n_var=2,
                             n_obj=1,
                             n_constr=0,
                             parallelization=("threads", 4),
                             xl=np.array([0, 0]),
                             xu=np.array([100, 100]))

        def _evaluate(self, x, out, *args, **kwargs):
            out["F"] = x[0] + x[1]

    minimize(MyThreadedProblem(),
             NSGA2(), ("n_gen", 10),
             seed=1,
             save_history=False)
Exemplo n.º 9
0
def main():
    # Define search algorithms
    algorithms = list()
    # 1: GA
    algorithm = GA(
        pop_size=config.POPULATION_SIZE,
        sampling=SudoRandomInitialization(),
        # crossover=AdaptiveSinglePointCrossover(prob=0.8),
        crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(),
        eliminate_duplicates=RefactoringSequenceDuplicateElimination()
    )
    algorithms.append(algorithm)

    # 2: NSGA II
    algorithm = NSGA2(pop_size=config.POPULATION_SIZE,
                      sampling=SudoRandomInitialization(),
                      # crossover=AdaptiveSinglePointCrossover(prob=0.8),
                      crossover=get_crossover("real_k_point", n_points=2),
                      mutation=BitStringMutation(),
                      eliminate_duplicates=RefactoringSequenceDuplicateElimination()
                      )
    algorithms.append(algorithm)

    # 3: NSGA III
    # Todo: Ask for best practices in determining ref_dirs
    ref_dirs = get_reference_directions("energy", 8, 90, seed=1)
    algorithm = NSGA3(ref_dirs=ref_dirs,
                      pop_size=config.POPULATION_SIZE,
                      sampling=SudoRandomInitialization(),
                      # crossover=AdaptiveSinglePointCrossover(prob=0.8),
                      crossover=get_crossover("real_k_point", n_points=2),
                      mutation=BitStringMutation(),
                      eliminate_duplicates=RefactoringSequenceDuplicateElimination()
                      )
    algorithms.append(algorithm)

    # Define problems
    problems = list()
    problems.append(
        ProblemSingleObjective(n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND)
    )
    problems.append(
        ProblemMultiObjective(n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND)
    )
    problems.append(
        ProblemManyObjective(n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND)
    )

    # Do optimization for various problems with various algorithms
    res = minimize(problem=problems[2],
                   algorithm=algorithms[2],
                   termination=('n_gen', config.MAX_ITERATIONS),
                   seed=1,
                   verbose=True)
    logger.info("** FINISHED **")

    logger.info("Best Individual:")
    logger.info(res.X)
    logger.info("Objective Values:")
    logger.info(res.F)

    logger.info("==================")
    logger.info("Other Solutions:")
    for ind in res.opt:
        logger.info(ind.X)
        logger.info(ind.F)
        logger.info("==================")

    logger.info(f"Start Time: {res.start_time}")
    logger.info(f"End Time: {res.end_time}")
    logger.info(f"Execution Time in Seconds: {res.exec_time}")
Exemplo n.º 10
0
from pymoo.factory import get_problem
from pymoo.optimize import minimize

from pymoo.algorithms.moo.nsga2 import NSGA2

problem = get_problem("zdt2")
algorithm = NSGA2(pop_size=10)
res = minimize(problem,
               algorithm, ('n_gen', 100),
               verbose=True,
               return_least_infeasible=False,
               seed=1)

# all decision variables
print('\nall decision variables:')
print(res.pop.get("X"))
print('\n')

# all objective variables
print('\nall objective variables:')
print(res.pop.get("F"))
print('\n')

# non-dominated set
print('\nnon-dominated set - decision variables:')
print(res.X)
print('\n')
print('\nnon-dominated set - objective variables:')
print(res.F)
Exemplo n.º 11
0
def test_crossover_bin(name):
    crossover = get_crossover(name, prob=0.95)
    method = NSGA2(pop_size=20, crossover=crossover)
    minimize(get_problem("zdt5"), method, ("n_gen", 20))
    assert True
Exemplo n.º 12
0
from pymoo.factory import get_problem, ZDT1, ZDT2, ZDT3
from pymoo.util.termination.max_eval import MaximumFunctionCallTermination

if __name__ == "__main__":
    FOLDER = "/Users/blankjul/moo_benchmark"

    recorder = None
    recorder = DefaultMultiObjectiveRecorder()

    benchmark = Benchmark(n_runs=11, recorder=recorder)

    benchmark.add_problem("zdt1", ZDT1(), termination=("n_gen", 200))
    # benchmark.add_problem("zdt2", ZDT2(), termination=("n_gen", 200))
    # benchmark.add_problem("zdt3", ZDT3(), termination=("n_gen", 200))

    benchmark.add_algorithm("nsga2", NSGA2())
    benchmark.add_algorithm("gde3", GDE3())

    loader = DefaultLoader(FOLDER)
    writer = DefaultWriter(FOLDER)

    results = benchmark.run(writer=writer,
                            loader=loader,
                            run_if_loading_fails=True)

    # set the igd values for each of the problems
    MultiObjectiveAnalyzer().run(results, benchmark=benchmark, inplace=True)

    # now aggregate all the runs to have some representative values
    attrs = [("igd", np.array, "igd"), ("igd", np.mean, "avg"),
             ("igd", np.std, "std")]
Exemplo n.º 13
0
def main():
    """

    Optimization module main driver

    """

    # Define initialization objects
    initializer_class = SmellInitialization if config.WARM_START else RandomInitialization
    initializer_object = initializer_class(
        udb_path=config.UDB_PATH,
        population_size=config.POPULATION_SIZE,
        lower_band=config.LOWER_BAND,
        upper_band=config.UPPER_BAND
    )

    # -------------------------------------------
    # Define optimization problems
    problems = list()  # 0: Genetic (Single), 1: NSGA-II (Multi), 2: NSGA-III (Many) objectives problems
    problems.append(
        ProblemSingleObjective(
            n_objectives=config.NUMBER_OBJECTIVES,
            n_refactorings_lowerbound=config.LOWER_BAND,
            n_refactorings_upperbound=config.UPPER_BAND,
            evaluate_in_parallel=False,
        )
    )
    problems.append(
        ProblemMultiObjective(
            n_objectives=config.NUMBER_OBJECTIVES,
            n_refactorings_lowerbound=config.LOWER_BAND,
            n_refactorings_upperbound=config.UPPER_BAND,
            evaluate_in_parallel=False,
        )
    )
    problems.append(
        ProblemManyObjective(
            n_objectives=config.NUMBER_OBJECTIVES,
            n_refactorings_lowerbound=config.LOWER_BAND,
            n_refactorings_upperbound=config.UPPER_BAND,
            evaluate_in_parallel=False,
            verbose_design_metrics=True,
        )
    )

    # Define search algorithms
    algorithms = list()
    # 1: GA
    alg1 = GA(
        pop_size=config.POPULATION_SIZE,
        sampling=PopulationInitialization(initializer_object),
        crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object),
        eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list),
        n_gen=config.NGEN,
    )
    algorithms.append(alg1)

    # 2: NSGA-II
    alg2 = NSGA2(
        pop_size=config.POPULATION_SIZE,
        sampling=PopulationInitialization(initializer_object),
        crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object),
        eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list),
        n_gen=config.NGEN,
    )
    algorithms.append(alg2)

    # 3: NSGA-III
    # pop_size must be equal or larger than the number of reference directions
    number_of_references_points = config.POPULATION_SIZE - int(config.POPULATION_SIZE * 0.20)
    ref_dirs = get_reference_directions(
        'energy',  # algorithm
        config.NUMBER_OBJECTIVES,  # number of objectives
        number_of_references_points,  # number of reference directions
        seed=1
    )
    alg3 = NSGA3(
        ref_dirs=ref_dirs,
        pop_size=config.POPULATION_SIZE,  # 200
        sampling=PopulationInitialization(initializer_object),
        selection=TournamentSelection(func_comp=binary_tournament),
        crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY, ),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object),
        eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list),
        n_gen=config.NGEN,
    )
    algorithms.append(alg3)

    # Termination of algorithms
    my_termination = MultiObjectiveDefaultTermination(
        x_tol=None,
        cv_tol=None,
        f_tol=0.0015,
        nth_gen=5,
        n_last=5,
        n_max_gen=config.MAX_ITERATIONS,  # about 1000 - 1400
        n_max_evals=1e6
    )

    # Do optimization for various problems with various algorithms
    res = minimize(
        problem=problems[config.PROBLEM],
        algorithm=algorithms[config.PROBLEM],
        termination=my_termination,
        seed=1,
        verbose=False,
        copy_algorithm=True,
        copy_termination=True,
        save_history=False,
        callback=LogCallback(),
    )
    # np.save('checkpoint', res.algorithm)

    # Log results
    logger.info(f"***** Algorithm was finished in {res.algorithm.n_gen + config.NGEN} generations *****")
    logger.info(" ")
    logger.info("============ time information ============")
    logger.info(f"Start time: {datetime.fromtimestamp(res.start_time).strftime('%Y-%m-%d %H:%M:%S')}")
    logger.info(f"End time: {datetime.fromtimestamp(res.end_time).strftime('%Y-%m-%d %H:%M:%S')}")
    logger.info(f"Execution time in seconds: {res.exec_time}")
    logger.info(f"Execution time in minutes: {res.exec_time / 60}")
    logger.info(f"Execution time in hours: {res.exec_time / (60 * 60)}")
    # logger.info(f"Number of generations: {res.algorithm.n_gen}")
    # logger.info(f"Number of generations", res.algorithm.termination)

    # Log optimum solutions
    logger.info("============ All opt solutions ============")
    for i, ind in enumerate(res.opt):
        logger.info(f'Opt refactoring sequence {i}:')
        logger.info(ind.X)
        logger.info(f'Opt refactoring sequence corresponding objectives vector {i}:')
        logger.info(ind.F)
        logger.info("-" * 75)

    # Log best refactorings
    logger.info("============ Best refactoring sequences (a set of non-dominated solutions) ============")
    for i, ind in enumerate(res.X):
        logger.info(f'Best refactoring sequence {i}:')
        logger.info(ind)
        logger.info("-" * 75)
    logger.info("============ Best objective values (a set of non-dominated solutions) ============")
    for i, ind_objective in enumerate(res.F):
        logger.info(f'Best refactoring sequence corresponding objectives vector {i}:')
        logger.info(ind_objective)
        logger.info("-" * 75)

    # Save best refactorings
    population_trimmed = []
    objective_values_content = ''
    for chromosome in res.X:
        chromosome_new = []
        if config.PROBLEM == 0:  # i.e., single objective problem
            for gene_ in chromosome:
                chromosome_new.append((gene_.name, gene_.params))
        else:
            for gene_ in chromosome[0]:
                chromosome_new.append((gene_.name, gene_.params))
        population_trimmed.append(chromosome_new)

    for objective_vector in res.F:
        objective_values_content += f'{res.algorithm.n_gen + config.NGEN},'
        if config.PROBLEM == 0:
            objective_values_content += f'{objective_vector},'
        else:
            for objective_ in objective_vector:
                objective_values_content += f'{objective_},'
        objective_values_content += '\n'

    best_refactoring_sequences_path = os.path.join(
        config.PROJECT_LOG_DIR,
        f'best_refactoring_sequences_after_{res.algorithm.n_gen + config.NGEN}gens.json'
    )
    with open(best_refactoring_sequences_path, mode='w', encoding='utf-8') as fp:
        json.dump(population_trimmed, fp, indent=4)

    best_refactoring_sequences_objectives_path = os.path.join(
        config.PROJECT_LOG_DIR,
        f'best_refactoring_sequences_objectives_after_{res.algorithm.n_gen + config.NGEN}gens.csv'
    )
    with open(best_refactoring_sequences_objectives_path, mode='w', encoding='utf-8') as fp:
        fp.write(objective_values_content)

    try:
        pf = res.F
        # dm = HighTradeoffPoints()
        dm = get_decision_making("high-tradeoff")
        I = dm.do(pf)

        logger.info("============ High-tradeoff points refactoring sequences ============")
        for i, ind in enumerate(res.X[I]):
            logger.info(f'High tradeoff points refactoring sequence {i}:')
            logger.info(ind)
            logger.info("-" * 75)
        logger.info("============ High-tradeoff points objective values  ============")
        for i, ind_objective in enumerate(pf[I]):
            logger.info(f'High-tradeoff points refactoring sequence corresponding objectives vector {i}:')
            logger.info(ind_objective)
            logger.info("-" * 75)

        logger.info("High-tradeoff points mean:")
        logger.info(np.mean(pf[I], axis=0))
        logger.info("High-tradeoff points median:")
        logger.info(np.median(pf[I], axis=0))

        # Save high-tradeoff refactorings
        population_trimmed = []
        objective_values_content = ''
        for chromosome in res.X[I]:
            chromosome_new = []
            if config.PROBLEM == 0:  # i.e., single objective problem
                for gene_ in chromosome:
                    chromosome_new.append((gene_.name, gene_.params))
            else:
                for gene_ in chromosome[0]:
                    chromosome_new.append((gene_.name, gene_.params))
            population_trimmed.append(chromosome_new)

        for objective_vector in pf[I]:
            objective_values_content += f'{res.algorithm.n_gen + config.NGEN},'
            if config.PROBLEM == 0:
                objective_values_content += f'{objective_vector},'
            else:
                for objective_ in objective_vector:
                    objective_values_content += f'{objective_},'
            objective_values_content += '\n'

        high_tradeoff_path = os.path.join(
            config.PROJECT_LOG_DIR,
            f'high_tradeoff_points_refactoring_after_{res.algorithm.n_gen + config.NGEN}gens.json'
        )
        with open(high_tradeoff_path, mode='w', encoding='utf-8') as fp:
            json.dump(population_trimmed, fp, indent=4)

        high_tradeoff_path_objectives_path = os.path.join(
            config.PROJECT_LOG_DIR,
            f'high_tradeoff_points_after_{res.algorithm.n_gen + config.NGEN}gens.csv'
        )
        with open(high_tradeoff_path_objectives_path, mode='w', encoding='utf-8') as fp:
            fp.write(objective_values_content)

    except:
        logger.error("No multi-optimal solutions (error in computing high tradeoff points)!")
Exemplo n.º 14
0
def main():
    # Define search algorithms
    algorithms = list()
    # 1: GA
    algorithm = GA(
        pop_size=config.POPULATION_SIZE,
        sampling=PureRandomInitialization(),
        crossover=AdaptiveSinglePointCrossover(prob=0.9),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=0.1),
        eliminate_duplicates=ElementwiseDuplicateElimination(
            cmp_func=is_equal_2_refactorings_list))
    algorithms.append(algorithm)

    # 2: NSGA II
    algorithm = NSGA2(
        pop_size=config.POPULATION_SIZE,
        sampling=PureRandomInitialization(),
        crossover=AdaptiveSinglePointCrossover(prob=0.9),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=0.1),
        eliminate_duplicates=ElementwiseDuplicateElimination(
            cmp_func=is_equal_2_refactorings_list))
    algorithms.append(algorithm)

    # 3: NSGA III
    # pop_size must be equal or larger than the number of reference directions
    number_of_references_points = config.POPULATION_SIZE - int(
        config.POPULATION_SIZE * 0.20)
    ref_dirs = get_reference_directions(
        'energy',  # algorithm
        8,  # number of objectives
        number_of_references_points,  # number of reference directions
        seed=1)
    algorithm = NSGA3(
        ref_dirs=ref_dirs,
        pop_size=config.POPULATION_SIZE,  # 200
        sampling=PureRandomInitialization(),
        selection=TournamentSelection(func_comp=binary_tournament),
        crossover=AdaptiveSinglePointCrossover(prob=0.8),
        # crossover=get_crossover("real_k_point", n_points=2),
        mutation=BitStringMutation(prob=0.1),
        eliminate_duplicates=ElementwiseDuplicateElimination(
            cmp_func=is_equal_2_refactorings_list))
    algorithms.append(algorithm)

    # -------------------------------------------
    # Define problems
    problems = list()
    problems.append(
        ProblemSingleObjective(n_refactorings_lowerbound=config.LOWER_BAND,
                               n_refactorings_upperbound=config.UPPER_BAND))
    problems.append(
        ProblemMultiObjective(n_refactorings_lowerbound=config.LOWER_BAND,
                              n_refactorings_upperbound=config.UPPER_BAND))
    problems.append(
        ProblemManyObjective(n_refactorings_lowerbound=config.LOWER_BAND,
                             n_refactorings_upperbound=config.UPPER_BAND,
                             evaluate_in_parallel=True))

    # Termination of algorithms
    my_termination = MultiObjectiveDefaultTermination(
        x_tol=None,
        cv_tol=None,
        f_tol=0.0015,
        nth_gen=10,
        n_last=20,
        n_max_gen=config.MAX_ITERATIONS,  # about 1000 - 1400
        n_max_evals=1e6)

    # Do optimization for various problems with various algorithms
    res = minimize(
        problem=problems[2],
        algorithm=algorithms[2],
        termination=my_termination,
        seed=1,
        verbose=False,
        copy_algorithm=True,
        copy_termination=True,
        save_history=False,
    )
    # np.save('checkpoint', res.algorithm)

    # Log results
    logger.info("\n** FINISHED **\n")
    logger.info(
        "Best refactoring sequences (a set of non-dominated solutions):")
    logger.info(res.X)
    logger.info("Best objective values (a set of non-dominated solutions):")
    logger.info(res.F)

    logger.info("=" * 75)
    logger.info("Other solutions:")
    for ind in res.opt:
        logger.info(ind.X)
        logger.info(ind.F)
        logger.info("-" * 50)
    logger.info("=" * 75)

    logger.info(f"Start time: {res.start_time}")
    logger.info(f"End time: {res.end_time}")
    logger.info(f"Execution time in seconds: {res.exec_time}")
    logger.info(f"Execution time in minutes: {res.exec_time / 60}")
    logger.info(f"Execution time in hours: {res.exec_time / (60 * 60)}")
    logger.info(f"Number of generations: {res.algorithm.n_gen}")
    # logger.info(f"Number of generations", res.algorithm.termination)

    pf = res.F
    # dm = HighTradeoffPoints()
    dm = get_decision_making("high-tradeoff")
    try:
        I = dm.do(pf)
        logger.info(f"High tradeoff points: {pf[I][0]}")
        logger.info(
            f"High tradeoff points corresponding refactorings: {res.X[I]}")
        logger.info(
            f"The mean improvement of quality attributes: {np.mean(pf[I][0], axis=0)}"
        )
        logger.info(
            f"The median improvement of quality attributes: {np.median(pf[I][0], axis=0)}"
        )
    except:
        logger.info(
            "No multi optimal solutions (error in computing high tradeoff points)!"
        )
Exemplo n.º 15
0
    def search(self, data: Data, models: Collection[Model], tid: int,
               **kwargs) -> np.ndarray:

        kwargs = kwargs['kwargs']

        # print ("SEARCH!")

        prob = SurrogateProblem(self.problem, self.computer, data, models,
                                self.options, tid, self.models_transfer)
        prob_pymoo = MyProblemPyMoo(self.problem.DP, self.problem.DO, prob)

        if (kwargs['verbose']):
            print("prob: ", prob)
        bestX = []

        if (self.problem.DO == 1):  # single objective optimizer
            if ('ga' == kwargs['search_algo']):
                from pymoo.algorithms.soo.nonconvex.ga import GA
                from pymoo.optimize import minimize
                algo = GA(pop_size=kwargs["search_pop_size"])
            elif ('pso' == kwargs['search_algo']):
                from pymoo.algorithms.soo.nonconvex.pso import PSO
                from pymoo.optimize import minimize
                algo = PSO(pop_size=kwargs["search_pop_size"])
            else:
                raise Exception(
                    f'Unknown optimization algorithm "{kwargs["search_algo"]}"'
                )

            bestX = []
            res = minimize(prob_pymoo, algo, verbose=kwargs['verbose'], seed=1)
            bestX.append(np.array(res.X).reshape(1, self.problem.DP))

        else:  # multi objective
            if ('nsga2' == kwargs['search_algo']):
                from pymoo.algorithms.moo.nsga2 import NSGA2
                from pymoo.optimize import minimize
                algo = NSGA2(pop_size=kwargs["search_pop_size"])
            elif ('moead' == kwargs['search_algo']):
                from pymoo.algorithms.moo.moead import MOEAD
                from pymoo.optimize import minimize
                from pymoo.factory import get_reference_directions
                ref_dirs = get_reference_directions("das-dennis",
                                                    self.problem.DO,
                                                    n_partitions=12)
                algo = MOEAD(ref_dirs,
                             n_neighbors=15,
                             prob_neighbor_mating=0.7)
            else:
                raise Exception(
                    f'Unknown optimization algorithm "{kwargs["search_algo"]}"'
                )
            bestX = []
            res = minimize(prob_pymoo,
                           algo, ("n_gen", kwargs["search_gen"]),
                           verbose=kwargs['verbose'],
                           seed=1)
            firstn = min(int(kwargs['search_more_samples']),
                         np.shape(res.X)[0])
            xss = res.X[0:firstn]
            bestX.append(xss)

        if (kwargs['verbose']):
            print(tid, 'OK' if cond else 'KO')
            sys.stdout.flush()
            print("bestX", bestX)
        return (tid, bestX)
Exemplo n.º 16
0
        # any input reference direction function
        pass
    except Exception as e:
        print(e)
        sys.exit()

    if alg_name == "NSGA2":
        sampling_func = MOO_CONFIG["sampling_func"]
        pop_size = alg_specific_args["pop_size"]
        #################
        # set algorithm #
        #################
        algorithm = NSGA2(
            pop_size=pop_size,
            sampling=get_sampling(sampling_func),
            crossover=get_crossover(crossover_func, **crossover_func_args),
            mutation=get_mutation(mutation_func, **mutation_func_args),
            eliminate_duplicates=True
        )
        #####################
        # algorithm logging #
        #####################
        MOO_log(
            msg="algorithm = {}(\n"
            "pop_size={},\n"
            "sampling=get_sampling({}),\n"
            "crossover=get_crossover({},{}),\n"
            "mutation=get_mutation({},{}),\n"
            "eliminate_duplicates=True\n"
            ")".format(
                alg_name,
Exemplo n.º 17
0
    ES()
]


@pytest.mark.parametrize('problem', SINGLE_OBJECTIVE_PROBLEMS)
@pytest.mark.parametrize('algorithm', SINGLE_OBJECTIVE_ALGORITHMS)
def test_singe_obj(problem, algorithm):
    res = minimize(problem, algorithm, seed=1, verbose=True)
    fmin = problem.pareto_front().flatten()[0]
    np.testing.assert_almost_equal(fmin, res.F[0], decimal=3)


MULTI_OBJECTIVE_PROBLEMS = [ZDT1()]

ref_dirs = get_reference_directions("das-dennis", 2, n_partitions=99)
MULTI_OBJECTIVE_ALGORITHMS = [
    NSGA2(),
    RVEA(ref_dirs),
    MOEAD(ref_dirs),
    ParallelMOEAD(ref_dirs),
    AGEMOEA()
]


@pytest.mark.parametrize('problem', MULTI_OBJECTIVE_PROBLEMS)
@pytest.mark.parametrize('algorithm', MULTI_OBJECTIVE_ALGORITHMS)
def test_multi_obj(problem, algorithm):
    res = minimize(problem, algorithm, ('n_gen', 300), seed=1, verbose=True)
    pf = problem.pareto_front()
    assert IGD(pf).do(res.F) < 0.05