コード例 #1
0
def project():
    conn = sqlite3.connect('products.db')
    companies = get_companies(conn)

    problem = Problem(len(companies), 2)
    #problem.types[:] = [Integer(1, nlojas) for _ range(nprodutos)]

    problem.function = schaffer

    algorithm = NSGAII(problem)
    algorithm.run(10000)

    company = 0
    while (company < len(companies)):
        result = schaffer(companies[company][0], conn)
        print(result)
        company += 1

    conn.close()

    plt.scatter([s.objectives[0] for s in algorithm.result],
                [s.objectives[1] for s in algorithm.result])
    plt.xlim([0, 1.1])
    plt.ylim([0, 1.1])
    plt.xlabel("Preço")
    plt.ylabel("Distancia")
    plt.show()
コード例 #2
0
def _to_robust_problem(model, SOWs, obj_aggregate, constr_aggregate):
    variables = []
    levers = []

    for lever in model.levers:
        vars = lever.to_variables()
        variables.extend(vars)
        levers.append((lever, len(vars)))

    nvars = len(variables)
    nobjs = sum([
        1 if r.dir == Response.MINIMIZE or r.dir == Response.MAXIMIZE else 0
        for r in model.responses
    ])
    nconstrs = len(model.constraints)

    function = functools.partial(_robust_evaluation_function,
                                 model=model,
                                 SOWs=SOWs,
                                 nvars=nvars,
                                 nobjs=nobjs,
                                 nconstrs=nconstrs,
                                 levers=levers,
                                 obj_aggregate=obj_aggregate,
                                 constr_aggregate=constr_aggregate)

    problem = Problem(nvars, nobjs, nconstrs, function)
    problem.types[:] = variables
    problem.directions[:] = [
        Problem.MINIMIZE if r.dir == Response.MINIMIZE else Problem.MAXIMIZE
        for r in model.responses
        if r.dir == Response.MINIMIZE or r.dir == Response.MAXIMIZE
    ]
    problem.constraints[:] = "==0"
    return (problem, levers)
コード例 #3
0
def _to_robust_problem(model, SOWs, obj_aggregate, constr_aggregate):
    variables = []
    levers = []

    for lever in model.levers:
        vars = lever.to_variables()
        variables.extend(vars)
        levers.append((lever, len(vars)))

    nvars = len(variables)
    nobjs = sum([1 if r.type == Response.MINIMIZE or r.type == Response.MAXIMIZE else 0 for r in model.responses])
    nconstrs = len(model.constraints)

    function = functools.partial(
        _robust_evaluation_function,
        model=model,
        SOWs=SOWs,
        nvars=nvars,
        nobjs=nobjs,
        nconstrs=nconstrs,
        levers=levers,
        obj_aggregate=obj_aggregate,
        constr_aggregate=constr_aggregate,
    )

    problem = Problem(nvars, nobjs, nconstrs, function)
    problem.types[:] = variables
    problem.directions[:] = [
        Problem.MINIMIZE if r.type == Response.MINIMIZE else Problem.MAXIMIZE
        for r in model.responses
        if r.type == Response.MINIMIZE or r.type == Response.MAXIMIZE
    ]
    problem.constraints[:] = "==0"
    return problem
コード例 #4
0
ファイル: platypuscube.py プロジェクト: jomorlier/humpday
def platypus_cube(objective, n_trials, n_dim, with_count=False, method=None):
    global feval_count
    feval_count = 0

    def _objective(vars):
        global feval_count
        feval_count += 1
        return float(objective(
            list(vars)))  # Avoid np.array as Platypus may puke

    problem = Problem(n_dim, 1, 0)
    problem.types[:] = [Real(0.0, 1.0)] * n_dim
    problem.constraints[:] = "<=0"
    problem.function = _objective

    strategy_and_args = PLATYPUS_ALGORITHMS[method]
    if isinstance(strategy_and_args, tuple):
        strategy = strategy_and_args[0]
        strategy_args = strategy_and_args[1]
        algorithm = strategy(problem, **strategy_args)
    else:
        strategy = strategy_and_args
        algorithm = strategy(problem)

    algorithm.run(n_trials)
    feasible_solution_obj = sorted([(s.objectives[0], s.variables)
                                    for s in algorithm.result if s.feasible],
                                   reverse=False)
    best_obj, best_x = feasible_solution_obj[0]
    if isinstance(best_x, FixedLengthArray):
        best_x = best_x._data  # CMA-ES returns it this way for some reason
    return (best_obj, best_x, feval_count) if with_count else (best_obj,
                                                               best_x)
コード例 #5
0
    def fit(self, x, y, bound=None, name=None):

        self.y = np.array(y)
        self.x = x

        TS = 1
        ND = len(y) - 1

        y = y / self.N

        t_start = 0.0
        t_end = ND
        t_inc = TS
        t_range = np.arange(t_start, t_end + t_inc, t_inc)

        Model_Input = (self.S0, self.I0, self.R0)

        # GA Parameters
        number_of_generations = 1000
        ga_population_size = 100
        number_of_objective_targets = 1  # The MSE
        number_of_constraints = 0
        number_of_input_variables = 2  # beta and gamma
        problem = Problem(number_of_input_variables,
                          number_of_objective_targets, number_of_constraints)
        problem.function = functools.partial(self.fitness_function,
                                             y=y,
                                             Model_Input=Model_Input,
                                             t_range=t_range)

        algorithm = NSGAII(problem, population_size=ga_population_size)

        problem.types[0] = Real(0, 1)  # beta initial Range
        problem.types[1] = Real(1 / 5, 1 / 14)  # gamma initial Range

        # Running the GA
        algorithm.run(number_of_generations)

        feasible_solutions = [s for s in algorithm.result if s.feasible]

        self.beta = feasible_solutions[0].variables[0]
        self.gamma = feasible_solutions[0].variables[1]

        input_variables = ['beta', 'gamma']
        file_address = 'optimised_coefficients/'
        filename = "ParametrosAjustados_Modelo_{}_{}_{}_Dias.txt".format(
            'SIR_EDO', name, len(x))

        if not os.path.exists(file_address):
            os.makedirs(file_address)

        file_optimised_parameters = open(file_address + filename, "w")
        file_optimised_parameters.close()
        if not os.path.exists(file_address):
            os.makedirs(file_address)
        with open(file_address + filename, "a") as file_optimised_parameters:
            for i in range(len(input_variables)):
                message = '{}:{:.4f}\n'.format(
                    input_variables[i], feasible_solutions[0].variables[i])
                file_optimised_parameters.write(message)
コード例 #6
0
def Pareto_Front_Strategy(Probs, odds, Total_BetMax, Max_individual_bet,
                          Min_individual_bet):

    #NOTE: The order of bets and Probs is as follows: 1st (1) is AWAY win and 2nd (2) is HOME win

    Bets = np.argmax(Probs, 1) + 1

    n = len(Probs)  #number of games

    # Calculate probabilities for ALL the events together (i.e. each outcome). This will be used as weights for our optimisation
    k = n
    num_events = int(pow(2, n))
    Event_Probs = np.zeros(num_events)

    outc = outcomes_HACK(n)

    for i in range(num_events):
        individual_probs = np.diag(Probs[:, outc[i, :] - 1])
        Event_Probs[i] = PoissonBinomialPDF(k, n, individual_probs)

    #Pareto optimization
    problem = Problem(n, 2, 1)  #vars, problem dim, constraints
    problem.types[:] = Real(
        Min_individual_bet,
        Max_individual_bet)  #lower and upper bounds for all decision variables
    problem.function = functools.partial(ExpectationRisk,
                                         arg1=n,
                                         arg2=odds,
                                         arg3=Event_Probs,
                                         arg4=Bets,
                                         arg5=outc)
    problem.constraints[:] = "<=" + str(
        Total_BetMax
    ) + ""  #inequality constraints: sum of bets no more than BetMax
    algorithm = NSGAII(problem)
    algorithm.run(5000)

    plt.scatter([s.objectives[0] for s in algorithm.result],
                [s.objectives[1] for s in algorithm.result])
    plt.show()

    Stakes = []

    Ratio = 0

    for i in range(len(algorithm.result)):
        #objectives are: -data[0] = Expectation, data[1] = Variance

        #Exp/Variance ration threshold
        Exp = algorithm.result[i].objectives._data[0]
        Var = algorithm.result[i].objectives._data[1]
        curRatio = Exp / Var

        if curRatio > Ratio:
            Ratio = curRatio
            Stakes = algorithm.result[i].variables
            ExpOut = Exp
            VarOut = Var

    return (Stakes, Bets, ExpOut, VarOut)
コード例 #7
0
    def update(self, **kwargs):
        """
        Rewrite update
        """
        assert 'X' in kwargs and 'Y' in kwargs
        assert 'eta' in kwargs and 'num_data' in kwargs
        super(USeMO, self).update(**kwargs)

        self.X_dim = self.X.shape[1]
        self.Y_dim = self.Y.shape[1]
        assert self.Y_dim > 1

        # update single acquisition function
        for i in range(self.Y_dim):
            self.single_acq[i].update(model=self.model[i],
                                      eta=self.eta[i],
                                      num_data=self.num_data)
            self.uncertainty_acq[i].update(model=self.model[i],
                                           eta=self.eta[i],
                                           num_data=self.num_data)

        def CMO(x):
            x = np.asarray(x)
            # minimize negative acq
            return [
                -self.single_acq[i](x, convert=False)[0][0]
                for i in range(self.Y_dim)
            ]

        problem = Problem(self.X_dim, self.Y_dim)
        set_problem_types(self.config_space, problem)
        problem.function = CMO

        variator = get_variator(self.config_space)
        algorithm = NSGAII(problem, population_size=100, variator=variator)
        algorithm.run(2500)
        # decode
        for s in algorithm.result:
            s.variables[:] = [
                problem.types[i].decode(s.variables[i])
                for i in range(problem.nvars)
            ]
        cheap_pareto_set = [
            solution.variables for solution in algorithm.result
        ]
        # cheap_pareto_set_unique = []
        # for i in range(len(cheap_pareto_set)):
        #     if not any((cheap_pareto_set[i] == x).all() for x in self.X):
        #         cheap_pareto_set_unique.append(cheap_pareto_set[i])
        cheap_pareto_set_unique = cheap_pareto_set

        single_uncertainty = np.array([
            self.uncertainty_acq[i](np.asarray(cheap_pareto_set_unique),
                                    convert=False) for i in range(self.Y_dim)
        ])  # shape=(Y_dim, N, 1)
        single_uncertainty = single_uncertainty.reshape(self.Y_dim,
                                                        -1)  # shape=(Y_dim, N)
        self.uncertainties = np.prod(single_uncertainty,
                                     axis=0)  # shape=(Y_dim,) todo normalize?
        self.candidates = np.array(cheap_pareto_set_unique)
コード例 #8
0
def platypus_cube(objective,
                  scale,
                  n_trials,
                  n_dim,
                  strategy,
                  with_count=False):

    global feval_count
    feval_count = 0

    def _objective(vars):
        global feval_count
        feval_count += 1
        return objective(list(vars))[0]

    problem = Problem(n_dim, 1, 0)
    problem.types[:] = [Real(-scale, scale)] * n_dim
    problem.constraints[:] = "<=0"
    problem.function = _objective

    algorithm = strategy(problem)
    algorithm.run(n_trials)
    feasible_solution_obj = [
        s.objectives[0] for s in algorithm.result if s.feasible
    ]
    best_obj = min(feasible_solution_obj)
    return (best_obj, feval_count) if with_count else best_obj
コード例 #9
0
ファイル: input.py プロジェクト: quantum-dan/raspy-cal
def autoIterate(model,
                river,
                reach,
                rs,
                flow,
                stage,
                nct,
                plot,
                outf,
                metrics,
                correctDatum,
                evals=None,
                si=False):
    """
    Automatically iterate with NSGA-II
    """
    keys = metrics  # ensure same order
    evalf = evaluator(stage, useTests=keys, correctDatum=correctDatum)
    evals = int(
        input("How many evaluations to run? ")) if evals is None else evals
    plotpath = ".".join(outf.split(".")[:-1]) + ".png"
    count = 1
    print("Running automatic calibration")

    def manningEval(vars):
        n = vars[0]
        metrics = minimized(
            nstageSingleRun(model, river, reach, rs, stage, n, keys,
                            correctDatum))
        values = [metrics[key] for key in keys]
        constraints = [-n, n - 1]
        nonlocal count
        print("Completed %d evaluations" % count)
        count += 1
        return values, constraints

    c_type = "<0"
    problem = Problem(
        1, len(keys),
        2)  # 1 decision variable, len(keys) objectives, and 2 constraints
    problem.types[:] = Real(0.001, 1)  # range of decision variable
    problem.constraints[:] = c_type
    problem.function = manningEval

    algorithm = NSGAII(problem, population_size=nct)
    algorithm.run(evals)
    nondom = nondominated(
        algorithm.result
    )  # nondom: list of Solutions - wanted value is variables[0]
    nondomNs = [sol.variables[0] for sol in nondom]
    results = runSims(model, nondomNs, river, reach, len(stage), range=[rs])
    resultPts = [(nondomNs[ix],
                  [results[ix][rs][jx] for jx in range(1,
                                                       len(stage) + 1)])
                 for ix in range(len(nondomNs))]
    metrics = [(res[0], evalf(res[1]), res[1]) for res in resultPts]
    nDisplay(metrics, flow, stage, plotpath, outf, plot, correctDatum, si)
    return metrics
コード例 #10
0
def cal(args, exeCount):
    start = time.time()
    param_count = 5  # パラメーター数

    problem = Problem(param_count, 2)  # 最適化パラメータの数, 目的関数の数
    problem.types[:] = Real(-2.0, 2.0)  # パラメータの範囲
    problem.function = schaffer
    algorithm = NSGAII(problem)
    algorithm.run(5000)  # 反復回数

    print('{:-^63}'.format('-'))

    # データ整理
    # params: 係数a
    # f1s   : 残留振動 [deg]
    # f2s   : エネルギー
    params = np.empty([100, param_count])
    f1s = np.empty([100])
    f2s = np.empty([100])
    for i, solution in enumerate(algorithm.result):
        result = tuple(solution.variables \
                     + solution.objectives[:])

        params[i, :] = result[:param_count][:]
        f1s[i] = 180 * result[param_count] / np.pi
        f2s[i] = result[param_count + 1]

    # 残留振動が最小になるaの値を表示
    index = np.argmin(f1s)
    print('\n*** 残留振動が最小の時の各値 ***')
    print('残留振動[deg]\t{}'.format(f1s[index]))
    print('エネルギー[J]\t{}'.format(f2s[index]))
    print('係数a\t\t{}'.format(params[index, :]))

    np.savetxt('./results/nsga2/{}/nsga2_params_{}.csv'.format(
        args[1], exeCount),
               params[index, :],
               delimiter=',')

    # 経過時間
    print(f'\nelapsed time: {time.time()-start}')

    # 係数a, 残留振動, エネルギーをCSVファイルに書き出す
    data = np.empty([100, param_count + 2])
    data[:, 0:param_count] = params
    data[:, param_count] = f1s
    data[:, param_count + 1] = f2s
    np.savetxt('./results/nsga2/{}/nsga2_data_{}.csv'.format(
        args[1], exeCount),
               data,
               delimiter=',')
コード例 #11
0
    def update(self, **kwargs):
        """
        Rewrite update to support pareto front sampling.
        """
        assert 'X' in kwargs and 'Y' in kwargs
        assert 'constraint_perfs' in kwargs
        super(MESMOC, self).update(**kwargs)

        self.X_dim = self.X.shape[1]
        self.Y_dim = self.Y.shape[1]

        self.Multiplemes = [None] * self.Y_dim
        self.Multiplemes_constraints = [None] * self.num_constraints
        for i in range(self.Y_dim):
            self.Multiplemes[i] = MaxvalueEntropySearch(self.model[i], self.X, self.Y[:, i],
                                                        random_state=self.random_state)
            self.Multiplemes[i].Sampling_RFM()
        for i in range(self.num_constraints):
            # Caution dim of self.constraint_perfs!
            self.Multiplemes_constraints[i] = MaxvalueEntropySearch(self.constraint_models[i],
                                                                    self.X, self.constraint_perfs[i])
            self.Multiplemes_constraints[i].Sampling_RFM()

        self.min_samples = []
        self.min_samples_constraints = []
        for j in range(self.sample_num):
            for i in range(self.Y_dim):
                self.Multiplemes[i].weigh_sampling()
            for i in range(self.num_constraints):
                self.Multiplemes_constraints[i].weigh_sampling()

            def CMO(xi):
                xi = np.asarray(xi)
                y = [self.Multiplemes[i].f_regression(xi)[0][0] for i in range(self.Y_dim)]
                y_c = [self.Multiplemes_constraints[i].f_regression(xi)[0][0] for i in range(self.num_constraints)]
                return y, y_c

            problem = Problem(self.X_dim, self.Y_dim, self.num_constraints)
            for k in range(self.X_dim):
                problem.types[k] = Real(self.bounds[k][0], self.bounds[k][1])  # todo other types
            problem.constraints[:] = "<=0"  # todo confirm
            problem.function = CMO
            algorithm = NSGAII(problem)
            algorithm.run(1500)
            cheap_pareto_front = [list(solution.objectives) for solution in algorithm.result]
            cheap_constraints_values = [list(solution.constraints) for solution in algorithm.result]
            # picking the min over the pareto: best case
            min_of_functions = [min(f) for f in list(zip(*cheap_pareto_front))]
            min_of_constraints = [min(f) for f in list(zip(*cheap_constraints_values))]  # todo confirm
            self.min_samples.append(min_of_functions)
            self.min_samples_constraints.append(min_of_constraints)
コード例 #12
0
ファイル: BO.py プロジェクト: LilyEvansHogwarts/BNN
    def nn_opt(self, nn):
        with torch.no_grad():

            def obj_cons(x):
                tx = torch.tensor(x)
                out = nn(tx)
                return out[:self.nobj].numpy().tolist(), out[self.nobj:].numpy(
                ).tolist()

            def obj_ucons(x):
                tx = torch.tensor(x)
                return nn(tx).numpy().tolist()

            arch = Archive()
            if self.ncons == 0:
                prob = Problem(self.dim, self.nobj)
                prob.function = obj_ucons
            else:
                prob = Problem(self.dim, self.nobj, self.ncons)
                prob.function = obj_cons
                prob.constraints[:] = "<=0"
            prob.types[:] = [Real(self.lb, self.ub) for i in range(self.dim)]
            self.algo = NSGAII(prob, population=50, archive=arch)
            self.algo.run(5000)

            optimized = self.algo.result
            rand_idx = np.random.randint(len(optimized))
            suggested_x = torch.tensor(optimized[rand_idx].variables)
            suggested_y = nn(suggested_x)
            return suggested_x.view(-1, self.dim), suggested_y.view(
                -1, self.nobj + self.ncons)
コード例 #13
0
    def run_nsgaii_bc():
        def CMO(xi):
            xi = np.asarray(xi)
            y = [branin(xi), Currin(xi)]
            return y

        problem = Problem(2, 2)
        problem.types[:] = Real(0, 1)
        problem.function = CMO
        algorithm = NSGAII(problem)
        algorithm.run(2500)
        cheap_pareto_front = np.array(
            [list(solution.objectives) for solution in algorithm.result])
        return cheap_pareto_front
コード例 #14
0
    def update(self, **kwargs):
        """
        Rewrite update to support pareto front sampling.
        """
        assert 'X' in kwargs and 'Y' in kwargs
        super(MESMO, self).update(**kwargs)

        self.X_dim = self.X.shape[1]
        self.Y_dim = self.Y.shape[1]

        self.Multiplemes = [None] * self.Y_dim
        for i in range(self.Y_dim):
            self.Multiplemes[i] = MaxvalueEntropySearch(
                self.model[i],
                self.X,
                self.Y[:, i],
                random_state=self.rng.randint(10000))
            self.Multiplemes[i].Sampling_RFM()

        self.min_samples = []
        for j in range(self.sample_num):
            for i in range(self.Y_dim):
                self.Multiplemes[i].weigh_sampling()

            def CMO(xi):
                xi = np.asarray(xi)
                y = [
                    self.Multiplemes[i].f_regression(xi)[0][0]
                    for i in range(self.Y_dim)
                ]
                return y

            problem = Problem(self.X_dim, self.Y_dim)
            set_problem_types(self.config_space, problem)
            problem.function = CMO

            variator = get_variator(self.config_space)
            algorithm = NSGAII(problem, population_size=100, variator=variator)
            algorithm.run(1500)
            cheap_pareto_front = [
                list(solution.objectives) for solution in algorithm.result
            ]
            # picking the min over the pareto: best case
            min_of_functions = [min(f) for f in list(zip(*cheap_pareto_front))]
            self.min_samples.append(min_of_functions)
コード例 #15
0
def platypus_cube(objective,scale, n_trials, strategy):

    def _objective(vars):
        u1 = vars[0]
        u2 = vars[1]
        u3 = vars[2]
        return objective([u1,u2,u3])[0]

    problem = Problem(3, 1, 0)
    problem.types[:] = [Real(-scale, scale), Real(-scale, scale), Real(-scale, scale)]
    problem.constraints[:] = "<=0"
    problem.function = _objective

    algorithm = strategy(problem)
    algorithm.run(n_trials)
    feasible_solution_obj = [s.objectives[0] for s in algorithm.result if s.feasible]
    best_obj = min(feasible_solution_obj)
    return best_obj
コード例 #16
0
def make_multiobjective_function_counting(sources,
                                          bounds,
                                          times_more_detectors=1,
                                          interpolation_method="nearest"):
    """
    This balances the number of detectors with the quality of the outcome
    bounds : list[(x_min, x_max), (y_min, y_max), ...]
        The bounds on the feasible region
    """
    objective_function = make_single_objective_function(
        sources, interpolation_method=interpolation_method,
        masked=True)  # the function to be optimized
    counting_function = make_counting_objective()

    def multiobjective_func(x):  # this is the double objective function
        return [objective_function(x), counting_function(x)]

    # there is an x, y, and a mask for each source so there must be three
    # times more input variables
    # the upper bound on the number of detectors n times the number of
    # sources
    parameterized_locations = sources[0].parameterized_locations
    dimensionality = parameterized_locations.shape[1]

    # We add a boolean flag to each location variable
    num_inputs = len(sources) * (dimensionality + 1) * times_more_detectors
    NUM_OUPUTS = 2  # the default for now
    # define the demensionality of input and output spaces
    problem = Problem(num_inputs, NUM_OUPUTS)

    logging.warning(
        f"Creating a multiobjective counting function with dimensionality {dimensionality}"
    )
    logging.warning(f"bounds are {bounds}")

    for i in range(dimensionality):
        # splat "*" notation is expanding the pair which is low, high
        problem.types[i::(dimensionality + 1)] = Real(
            *bounds[i])  # This is the feasible region

    # indicator on whether the source is on
    problem.types[dimensionality::(dimensionality + 1)] = Binary(1)
    problem.function = multiobjective_func
    return problem
コード例 #17
0
    def update(self, **kwargs):
        """
        Rewrite update
        """
        assert 'X' in kwargs and 'Y' in kwargs
        assert 'eta' in kwargs and 'num_data' in kwargs
        super(USeMO, self).update(**kwargs)

        self.X_dim = self.X.shape[1]
        self.Y_dim = self.Y.shape[1]
        assert self.Y_dim > 1

        # update single acquisition function
        for i in range(self.Y_dim):
            self.single_acq[i].update(model=self.model[i],
                                      eta=self.eta[i],
                                      num_data=self.num_data)
            self.uncertainty_acq[i].update(model=self.model[i],
                                           eta=self.eta[i],
                                           num_data=self.num_data)

        def CMO(x):
            x = np.asarray(x)
            # minimize negative acq
            return [-self.single_acq[i](x, convert=False)[0][0] for i in range(self.Y_dim)]

        problem = Problem(self.X_dim, self.Y_dim)
        for k in range(self.X_dim):
            problem.types[k] = Real(self.bounds[k][0], self.bounds[k][1])  # todo other types
        problem.function = CMO
        algorithm = NSGAII(problem)  # todo population_size
        algorithm.run(2500)
        cheap_pareto_set = [solution.variables for solution in algorithm.result]
        # cheap_pareto_set_unique = []
        # for i in range(len(cheap_pareto_set)):
        #     if not any((cheap_pareto_set[i] == x).all() for x in self.X):   # todo convert problem? no this step?
        #         cheap_pareto_set_unique.append(cheap_pareto_set[i])
        cheap_pareto_set_unique = cheap_pareto_set

        single_uncertainty = np.array([self.uncertainty_acq[i](np.asarray(cheap_pareto_set_unique), convert=False)
                                       for i in range(self.Y_dim)])  # shape=(Y_dim, N, 1)
        single_uncertainty = single_uncertainty.reshape(self.Y_dim, -1)  # shape=(Y_dim, N)
        self.uncertainties = np.prod(single_uncertainty, axis=0)  # shape=(Y_dim,) todo normalize?
        self.candidates = np.array(cheap_pareto_set_unique)
コード例 #18
0
def NSGA_test(data_name, input_dim, output_dim, x_bounds, epoch, population):
    """
    bench_mark functionのjson
    "function_name":{
        "hypervolume":,
        "v_ref":[],
        "w_ref":[],
        "x_bounds":[],
        "input_dim":,
        "output_dim":
    }
    """

    problem = Problem(input_dim, output_dim)
    problem.types[:] = [
        Real(x_bounds[i][0], x_bounds[i][1]) for i in range(input_dim)
    ]
    problem.function = eval(data_name)
    algorithm = NSGAII(problem, population_size=population)
    start = time.perf_counter()
    algorithm.run(epoch)
    end = time.perf_counter() - start
    v_ref = []
    w_ref = []
    w_ref_norm = []
    print(len(algorithm.result))
    pareto_frontier = np.zeros((len(algorithm.result), output_dim))
    pareto_frontier_norm = np.zeros((len(algorithm.result), output_dim))
    for i in range(output_dim):
        frontier_i = np.array([s.objectives[i] for s in algorithm.result])
        pareto_frontier[:, i] = frontier_i
        min_i = np.min(frontier_i)
        max_i = np.max(frontier_i)
        frontier_i_norm = (frontier_i - min_i) / (max_i - min_i)
        pareto_frontier_norm[:, i] = frontier_i_norm
        v_ref.append(min_i)
        w_ref.append(max_i)
        w_ref_norm.append(1)

    hyp = Hypervolume(minimum=v_ref, maximum=w_ref)
    HV = hyp(algorithm.result)
    return HV, end
コード例 #19
0
 def generate_problem(self):
     # 1 decision variables, 1 objectives, 2 constraints
     problem = Problem(1, 1, 2)
     problem.types[:] = Binary(len(self.requirements))
     problem.directions[:] = Problem.MAXIMIZE
     problem.constraints[0] = "!=0"
     problem.constraints[1] = "<=0"
     problem.function = self.get_problem_function
     return problem
コード例 #20
0
ファイル: nsga_optimizer.py プロジェクト: PKU-DAIR/open-box
    def __init__(self,
                 objective_function: callable,
                 config_space,
                 num_constraints=0,
                 num_objs=1,
                 max_runs=2500,
                 algorithm='nsgaii',
                 logging_dir='logs',
                 task_id='default_task_id',
                 random_state=None,
                 **kwargs):

        if task_id is None:
            raise ValueError(
                'Task id is not SPECIFIED. Please input task id first.')

        self.num_inputs = len(config_space.get_hyperparameters())
        self.num_constraints = num_constraints
        self.num_objs = num_objs
        self.algo = algorithm
        self.FAILED_PERF = [MAXINT] * num_objs
        super().__init__(objective_function,
                         config_space,
                         task_id=task_id,
                         output_dir=logging_dir,
                         random_state=random_state,
                         max_runs=max_runs)
        random.seed(self.rng.randint(MAXINT))

        # prepare objective function for platypus algorithm
        self.nsga_objective = objective_wrapper(objective_function,
                                                config_space, num_constraints)

        # set problem
        self.problem = Problem(self.num_inputs, num_objs, num_constraints)
        set_problem_types(config_space, self.problem)
        if num_constraints > 0:
            self.problem.constraints[:] = "<=0"
        self.problem.function = self.nsga_objective

        # set algorithm
        if self.algo == 'nsgaii':
            population_size = kwargs.get('population_size', 100)
            if self.max_iterations <= population_size:
                self.logger.warning(
                    'max_runs <= population_size! Please check.')
                population_size = min(max_runs, population_size)
            variator = get_variator(config_space)
            self.algorithm = NSGAII(self.problem,
                                    population_size=population_size,
                                    variator=variator)
        else:
            raise ValueError('Unsupported algorithm: %s' % self.algo)
コード例 #21
0
    def make_platypus_objective_function_competing_function(
            self, sources, bad_sources=[]):
        total_ret_func = make_total_lookup_function(
            sources, interpolation_method=self.interpolation_method
        )  # the function to be optimized
        bad_sources_func = make_total_lookup_function(
            bad_sources,
            type="fastest",
            interpolation_method=self.interpolation_method
        )  # the function to be optimized

        def multiobjective_func(x):  # this is the double objective function
            return [total_ret_func(x), bad_sources_func(x)]

        num_inputs = len(sources) * 2  # there is an x, y for each source
        NUM_OUPUTS = 2  # the default for now
        # define the demensionality of input and output spaces
        problem = Problem(num_inputs, NUM_OUPUTS)
        x, y, time = sources[0]  # expand the first source
        min_x = min(x)
        min_y = min(y)
        max_x = max(x)
        max_y = max(y)
        print("min x : {}, max x : {}, min y : {}, max y : {}".format(
            min_x, max_x, min_y, max_y))
        problem.types[::2] = Real(min_x, max_x)  # This is the feasible region
        problem.types[1::2] = Real(min_y, max_y)
        problem.function = multiobjective_func
        # the second function should be maximized rather than minimized
        problem.directions[1] = Problem.MAXIMIZE
        return problem
コード例 #22
0
def make_multiobjective_function_competing(sources,
                                           bounds=None,
                                           bad_sources=(),
                                           interpolation_method="nearest"):
    """Create an objective function with a false alarm"""

    # Create the two functions
    objective_function = make_single_objective_function(
        sources, interpolation_method=interpolation_method
    )  # the function to be optimized
    bad_objective_function = make_single_objective_function(
        bad_sources,
        function_type="worst_case_TTA",
        interpolation_method=interpolation_method
    )  # the function to be optimized

    def multiobjective_func(x):  # this is the double objective function
        return [objective_function(x), bad_objective_function(x)]

    parameterized_locations = sources[0].parameterized_locations
    dimensionality = parameterized_locations.shape[1]

    num_inputs = len(sources) * dimensionality
    NUM_OUPUTS = 2  # the default for now
    # define the demensionality of input and output spaces
    problem = Problem(num_inputs, NUM_OUPUTS)

    logging.warning(
        f"Creating a multiobjective competing function with dimensionality {dimensionality}"
    )
    logging.warning(f"bounds are {bounds}")
    for i in range(dimensionality):
        # splat "*" notation is expanding the pair which is low, high
        problem.types[i::dimensionality] = Real(
            *bounds[i])  # This is the feasible region

    problem.function = multiobjective_func
    # the second function should be maximized rather than minimized
    problem.directions[1] = Problem.MAXIMIZE
    return problem
コード例 #23
0
def generate_initial_population(pop_size=10, number_of_gd_steps=50):
    p1 = GradientDescentAdam(R, lr=alpha)
    initial_pop = []
    meta_g = None
    vec_len = None

    for x in range(pop_size):
        _, _, _, _, G, S = p1.optimize(number_of_gd_steps, ks=ks)
        v, meta = roll(G, S)
        meta_g = meta
        vec_len = len(v)
        initial_pop.append(v.tolist())

    problem = Problem(vec_len, 1)
    problem.types[:] = Real(0, 1000)
    problem.function = partial(fit, p1, meta_g)

    generator = RandomGenerator()
    population = []

    for i in range(pop_size):
        p = generator.generate(problem)
        p.variables = initial_pop[i]
        problem.evaluate(p)
        population.append(p)

    return problem, population, meta_g, generator
コード例 #24
0
    def make_platypus_objective_function_counting(self,
                                                  sources,
                                                  times_more_detectors=1):
        """
        This balances the number of detectors with the quality of the outcome
        """
        total_ret_func = make_total_lookup_function(
            sources, masked=True)  # the function to be optimized
        counting_func = make_counting_objective()

        def multiobjective_func(x):  # this is the double objective function
            return [total_ret_func(x), counting_func(x)]

        # there is an x, y, and a mask for each source so there must be three
        # times more input variables
        # the upper bound on the number of detectors n times the number of
        # sources
        num_inputs = len(sources) * 3 * times_more_detectors
        NUM_OUPUTS = 2  # the default for now
        # define the demensionality of input and output spaces
        problem = Problem(num_inputs, NUM_OUPUTS)
        x, y, time = sources[0]  # expand the first source
        min_x = min(x)
        min_y = min(y)
        max_x = max(x)
        max_y = max(y)
        print("min x : {}, max x : {}, min y : {}, max y : {}".format(
            min_x, max_x, min_y, max_y))
        problem.types[0::3] = Real(min_x, max_x)  # This is the feasible region
        problem.types[1::3] = Real(min_y, max_y)
        # This appears to be inclusive, so this is really just (0, 1)
        problem.types[2::3] = Binary(1)
        problem.function = multiobjective_func
        return problem
コード例 #25
0
def mo_run(population_size):
    mo_problem = Problem(2, 2, 2)
    mo_problem.types[:] = Binary(len(requirements))
    mo_problem.directions[0] = Problem.MAXIMIZE
    mo_problem.directions[1] = Problem.MINIMIZE
    mo_problem.constraints[:] = "<={}".format(budget)
    mo_problem.function = multi_objective_nrp
    mo_nsga = NSGAII(mo_problem, population_size)
    mo_nsga.run(runs)
    x_mo = [solution.objectives[0] for solution in mo_nsga.result]
    y_mo = [solution.objectives[1] * (-1) for solution in mo_nsga.result]
    return x_mo, y_mo, mo_nsga
コード例 #26
0
def get_currentratio(data_name, pareto_front_list, w_ref, input_dim,
                     output_dim, x_bounds):
    problem = Problem(input_dim, output_dim)
    problem.types[:] = [
        Real(x_bounds[i][0], x_bounds[i][1]) for i in range(input_dim)
    ]
    problem.function = eval(data_name)
    algorithm = NSGAII(problem, population_size=50)
    algorithm.run(10000)
    true_pareto_front = np.zeros((len(algorithm.result), output_dim))
    for i in range(len(algorithm.result)):
        true_pareto_front[i, :] = algorithm.result[i].objectives[:]
    # print(true_pareto_front)
    true_w_ref = np.max(true_pareto_front, axis=0) + 1.0e-2
    for i in range(w_ref.shape[0]):
        if true_w_ref[i] > w_ref[i]:
            w_ref[i] = true_w_ref[i]
    true_HV = utils.calc_hypervolume(true_pareto_front, w_ref)

    ratio_list = []
    for pareto_front in pareto_front_list:
        hv_i = utils.calc_hypervolume(pareto_front, w_ref)
        ratio_list.append(hv_i / true_HV)
    return ratio_list
コード例 #27
0
def so_run(population_size):
    so_problem = Problem(1, 1, 1)
    so_problem.types[:] = Binary(len(requirements))
    so_problem.directions[:] = Problem.MAXIMIZE
    so_problem.constraints[:] = "<={}".format(budget)
    so_problem.function = single_objective_nrp
    so_algorithm = GeneticAlgorithm(so_problem, population_size)
    so_algorithm.run(runs)
    x_so = [solution.objectives[0] for solution in so_algorithm.result]
    y_so = [solution.constraints[0] * (-1) for solution in so_algorithm.result]
    return x_so, y_so, so_algorithm
コード例 #28
0
    def _prune(self):
        problem = Problem(len(self.ensemble_), 2)
        problem.types[:] = Integer(0, 1)
        problem.directions[0] = Problem.MAXIMIZE
        problem.directions[1] = Problem.MAXIMIZE
        problem.function = functools.partial(MCE._evaluate_imbalance,
                                             y_predicts=self._y_predict,
                                             y_true=self._y_valid)

        algorithm = NSGAII(problem)
        algorithm.run(10000)

        solutions = unique(nondominated(algorithm.result))
        objectives = [sol.objectives for sol in solutions]

        def extract_variables(variables):
            extracted = [v[0] for v in variables]
            return extracted

        self._ensemble_quality = self.get_group(
            extract_variables(solutions[objectives.index(
                max(objectives, key=itemgetter(0)))].variables),
            self.ensemble_)
        self._ensemble_diversity = self.get_group(
            extract_variables(solutions[objectives.index(
                max(objectives, key=itemgetter(1)))].variables),
            self.ensemble_)
        self._ensemble_balanced = self.get_group(
            extract_variables(solutions[objectives.index(
                min(objectives, key=lambda i: abs(i[0] - i[1])))].variables),
            self.ensemble_)

        pareto_set, fitnesses = self._genetic_optimalisation(
            optimalisation_type='quality_single')
        self._ensemble_quality_single = self.get_group(
            pareto_set[fitnesses.index(max(fitnesses, key=itemgetter(0)))],
            self.ensemble_)
        # pareto_set, fitnesses = self._genetic_optimalisation(optimalisation_type='diversity_single')
        # self._ensemble_diversity_single = self.get_group(pareto_set[fitnesses.index(min(fitnesses, key=itemgetter(0)))],
        #                                                  self.ensemble_)

        pareto_set, fitnesses = self._genetic_optimalisation(
            optimalisation_type='precision_single')
        self._ensemble_precision_single = self.get_group(
            pareto_set[fitnesses.index(max(fitnesses, key=itemgetter(0)))],
            self.ensemble_)
        pareto_set, fitnesses = self._genetic_optimalisation(
            optimalisation_type='recall_single')
        self._ensemble_recall_single = self.get_group(
            pareto_set[fitnesses.index(max(fitnesses, key=itemgetter(0)))],
            self.ensemble_)
コード例 #29
0
 def build_problems(self):
     self.us = [self.u1, self.u2, self.u3]
     self.ws = [self.w1, self.w2, self.w3]
     temp = sum(self.used_components)
     if temp == 1:
         return self.defuzz_not_none()
     u_problem, w_problem = Problem(1, temp), Problem(1, temp)
     u_universe, w_universe = self.universe_not_none()
     u_problem.types[:] = Subset(u_universe, 1)
     w_problem.types[:] = Subset(w_universe, 1)
     u_problem.directions[:] = Problem.MAXIMIZE
     w_problem.directions[:] = Problem.MAXIMIZE
     u_problem.function, w_problem.function = self.u_function, self.w_function
     return u_problem, w_problem
コード例 #30
0
def generate_initial_population(number_of_gd_steps=50):
    p1 = GradientDescentAdam(R, lr=alpha)
    initial_pop = []
    scale = 0.01

    noise_scale=0.0000001

    Gv, Sv = p1.construct_starting_point(ks, scale=scale)

    _, _, _, _, G, S = p1.optimize(G=Gv.copy(), S=Sv.copy(), steps=number_of_gd_steps, ks=ks)
    v, meta = roll(G, S)
    initial_pop.append(v.tolist())

    GvNoise = []
    for x in Gv:
        GvNoise.append(x+np.random.randn(*x.shape)*noise_scale)

    SvNoise = []
    for x in Sv:
        n=[]
        for y in x:
            n.append(y+np.random.randn(*y.shape)*noise_scale)
        SvNoise.append(n)

    _, _, _, _, G, S = p1.optimize(G=GvNoise.copy(), S=SvNoise.copy(), steps=number_of_gd_steps, ks=ks)
    v, meta = roll(G, S)
    initial_pop.append(v.tolist())

    np.random.rand()
    _, _, _, _, G, S = p1.optimize(number_of_gd_steps, ks=ks)
    v, meta = roll(G, S)
    meta_g = meta
    vec_len = len(v)
    initial_pop.append(v.tolist())


    problem = Problem(vec_len, 1)
    problem.types[:] = Real(0, 1000)
    problem.function = partial(fit, p1, meta_g)

    generator = RandomGenerator()
    population = []

    for i in range(3):
        p = generator.generate(problem)
        p.variables = initial_pop[i]
        problem.evaluate(p)
        population.append(p)

    return problem, population, meta_g, p1
コード例 #31
0
def platypus_grid(w_, garden_index, grids):
    # first compute convex hull of grids
    # parametrize problem by just the center of the grid
    points_list = []
    for i in range(grids.shape[0]):
        p = Point(grids[i, :])
        points_list.append(p)

    mtp = MultiPoint(points_list)
    cvx_hull = mtp.convex_hull
    gminx, gminy, gmaxx, gmaxy = cvx_hull.bounds

    problem = Problem(3, 2)
    problem.types[0] = Real(gminx, gmaxx)
    problem.types[1] = Real(gminy, gmaxy)
    problem.types[2] = Real(0, 180)
    problem.function = platypus_obj
    algorithm = NSGAII(problem)

    algorithm.run(100)
    print(algorithm.result)

    return algorithm.result
コード例 #32
0
from platypus import NSGAII, Problem, Real

def belegundu(vars):
    x = vars[0]
    y = vars[1]
    return [-2*x + y, 2*x + y], [-x + y - 1, x + y - 7]

problem = Problem(2, 2, 2)
problem.types[:] = [Real(0, 5), Real(0, 3)]
problem.constraints[:] = "<=0"
problem.function = belegundu

algorithm = NSGAII(problem)
algorithm.run(10000)
コード例 #33
0
import functools
from platypus import NSGAII, Problem, Real

def problem_with_args(x, arg1, arg2=5):
    print("x:", x)
    print("arg1:", arg1)
    print("arg2:", arg2)

    return [x[0]**2, (x[0]-2)**2]

problem = Problem(1, 2)
problem.types[:] = Real(-10, 10)
problem.function = functools.partial(problem_with_args, arg1=2)

algorithm = NSGAII(problem)
algorithm.run(100)
コード例 #34
0
ファイル: custom_problem_1.py プロジェクト: quaquel/Platypus
from platypus import NSGAII, Problem, Real

def schaffer(x):
    return [x[0]**2, (x[0]-2)**2]

problem = Problem(1, 2)
problem.types[:] = Real(-10, 10)
problem.function = schaffer

algorithm = NSGAII(problem)
algorithm.run(10000)
コード例 #35
0
ファイル: knapsack.py プロジェクト: quaquel/Platypus
from platypus import GeneticAlgorithm, Problem, Constraint, Binary, nondominated, unique

# This simple example has an optimal value of 15 when picking items 1 and 4.
items = 7
capacity = 9
weights = [2, 3, 6, 7, 5, 9, 4]
profits = [6, 5, 8, 9, 6, 7, 3]
    
def knapsack(x):
    selection = x[0]
    total_weight = sum([weights[i] if selection[i] else 0 for i in range(items)])
    total_profit = sum([profits[i] if selection[i] else 0 for i in range(items)])
    
    return total_profit, total_weight

problem = Problem(1, 1, 1)
problem.types[0] = Binary(items)
problem.directions[0] = Problem.MAXIMIZE
problem.constraints[0] = Constraint("<=", capacity)
problem.function = knapsack

algorithm = GeneticAlgorithm(problem)
algorithm.run(10000)

for solution in unique(nondominated(algorithm.result)):
    print(solution.variables, solution.objectives)
コード例 #36
0
ファイル: tsp.py プロジェクト: quaquel/Platypus
        (4493, 7102), (3600, 6950), (3100, 7250), (4700, 8450), (5400, 8450),
        (5610, 10053), (4492, 10052), (3600, 10800), (3100, 10950), (4700, 11650),
        (5400, 11650), (6650, 10800), (7300, 10950), (7300, 7250), (6650, 6950),
        (7300, 3300), (6650, 2300), (5400, 1600), (8350, 2300), (7850, 3300),
        (9450, 5750), (10150, 5750), (10358, 7103), (9243, 7102), (8350, 6950),
        (7850, 7250), (9450, 8450), (10150, 8450), (10360, 10053), (9242, 10052),
        (8350, 10800), (7850, 10950), (9450, 11650), (10150, 11650), (11400, 10800),
        (12050, 10950), (12050, 7250), (11400, 6950), (12050, 3300), (11400, 2300),
        (10150, 1600), (13100, 2300), (12600, 3300), (14200, 5750), (14900, 5750),
        (15108, 7103), (13993, 7102), (13100, 6950), (12600, 7250), (14200, 8450),
        (14900, 8450), (15110, 10053), (13992, 10052), (13100, 10800), (12600, 10950),
        (14200, 11650), (14900, 11650), (16150, 10800), (16800, 10950), (16800, 7250),
        (16150, 6950), (16800, 3300), (16150, 2300), (14900, 1600), (19800, 800),
        (19800, 10000), (19800, 11900), (19800, 12200), (200, 12200), (200, 1100),
        (200, 800)]

def dist(x, y):
    return round(math.sqrt((x[0] - y[0])**2 + (x[1] - y[1])**2))
    
def tsp(x):
    tour = x[0]
    return sum([dist(cities[tour[i]], cities[tour[(i + 1) % len(cities)]]) for i in range(len(tour))])

problem = Problem(1, 1)
problem.types[0] = Permutation(range(len(cities)))
problem.directions[0] = Problem.MINIMIZE
problem.function = tsp
 
algorithm = GeneticAlgorithm(problem)
algorithm.run(100000, callback = lambda a : print(a.nfe, unique(nondominated(algorithm.result))[0].objectives[0]))