Esempio n. 1
0
def ZDT_Test():
    ref_dirs = {
        ZDT1: [[(0.2, 0.4), (0.8, 0.4)],
               [(0.2, 0.6), (0.4, 0.6), (0.5, 0.2), (0.7, 0.2), (0.9, 0)]],
        ZDT2: [[(0.2, 0.8), (0.7, 1), (0.8, 0.2)]],
        ZDT3: [[(0.1, 0.6), (0.3, 0.2), (0.7, -0.25)]]
    }
    crossover = SimulatedBinaryCrossover(10)

    p = []
    name_list = ["ZDT1_1", "ZDT1_2", "ZDT2_1", "ZDT3_1"]
    for problem, ref_points in ref_dirs.items():
        for i, points in enumerate(ref_points):

            sublist = []
            for algorithm in [
                    RNSGAIII("real",
                             pop_size=100,
                             ep=0.001,
                             crossover=crossover,
                             ref_dirs=points,
                             verbose=0)
            ]:
                for run in range(1, n_runs + 1):
                    name = problem.__class__.__name__ + '_' + str(
                        i) + '_' + str(run)
                    sublist.append((algorithm, problem, run, points, name))
                    # yield (algorithm, problem, run)
            p.append(sublist)
    return p, name_list
Esempio n. 2
0
def ZDT_Test():
    ref_dirs = {ZDT1(): [[(0.2, 0.4), (0.8, 0.4)],
                         [(0.2, 0.6), (0.4, 0.6), (0.5, 0.2), (0.7, 0.2), (0.9, 0)]],
                ZDT2(): [[(0.2, 0.8), (0.7, 1), (0.8, 0.2)]],
                ZDT3(): [[(0.1, 0.6), (0.3, 0.2), (0.7, -0.25)]]}
    crossover = SimulatedBinaryCrossover(10)

    for problem, ref_points in ref_dirs.items():
        for points in ref_points:
            for algorithm in [RNSGAIII("real", pop_size=100, ep=0.001, crossover=crossover, ref_dirs=points, verbose=0)]:
                for run in range(1, n_runs + 1):
                    yield (algorithm, problem, run)
Esempio n. 3
0
def set_default_if_none(var_type, kwargs):
    set_if_none(kwargs, 'pop_size', 100)
    set_if_none(kwargs, 'verbose', False)
    set_if_none(kwargs, 'selection', RandomSelection())

    # values for mating
    if var_type == "real":
        set_if_none(kwargs, 'sampling', RealRandomSampling())
        set_if_none(kwargs, 'crossover', SimulatedBinaryCrossover())
        set_if_none(kwargs, 'mutation', PolynomialMutation())
    elif var_type == "binary":
        set_if_none(kwargs, 'sampling', BinaryRandomSampling())
        set_if_none(kwargs, 'crossover', BinaryUniformCrossover())
        set_if_none(kwargs, 'mutation', BinaryBitflipMutation())
        set_if_none(kwargs, 'eliminate_duplicates', True)
Esempio n. 4
0
def DTLZ_test():
    ref_dirs = {DTLZ2: [{"n_var": 11, "n_obj": 3, "ref_points": [[[0.2, 0.2, 0.6], [0.8, 0.8, 0.8]]]},
                          {"n_var": 14, "n_obj": 5, "ref_points": [[[0.5, 0.5, 0.5, 0.5, 0.5], [0.2, 0.2, 0.2, 0.2, 0.8]]]},
                          {"n_var": 19, "n_obj": 10, "ref_points": [[[0.25 for i in range(10)]]]}
                          ]
                }
    crossover = SimulatedBinaryCrossover(10)

    for problem, setup in ref_dirs.items():
        for params in setup:
            parameters = {"n_var": params["n_var"], "n_obj": params["n_obj"]}
            print(problem)
            prob = problem(**parameters)
            for points in params["ref_points"]:
                for algorithm in [RNSGAIII("real", pop_size=100, ep=0.01, crossover=crossover, ref_dirs=points, verbose=0)]:
                    for run in range(1, n_runs + 1):
                        yield (algorithm, prob, run)
Esempio n. 5
0
def set_default_if_none(var_type, kwargs):
    set_if_none(kwargs, 'pop_size', 100)
    set_if_none(kwargs, 'disp', False)
    set_if_none(kwargs, 'selection', RandomSelection())
    set_if_none(kwargs, 'survival', None)

    # values for mating
    if var_type == "real":
        set_if_none(kwargs, 'sampling', RealRandomSampling())
        set_if_none(kwargs, 'crossover',
                    SimulatedBinaryCrossover(prob_cross=0.9, eta_cross=20))
        set_if_none(kwargs, 'mutation',
                    PolynomialMutation(prob_mut=None, eta_mut=15))
    elif var_type == "binary":
        set_if_none(kwargs, 'sampling', BinaryRandomSampling())
        set_if_none(kwargs, 'crossover', BinaryUniformCrossover())
        set_if_none(kwargs, 'mutation', BinaryBitflipMutation())
        set_if_none(kwargs, 'eliminate_duplicates', True)
Esempio n. 6
0
def DTLZ_test():
    ref_dirs = {
        DTLZ2: [{
            "n_var": 11,
            "n_obj": 3,
            "ref_points": [[[0.2, 0.2, 0.6], [0.8, 0.8, 0.8]]]
        }, {
            "n_var":
            14,
            "n_obj":
            5,
            "ref_points": [[[0.5, 0.5, 0.5, 0.5, 0.5],
                            [0.2, 0.2, 0.2, 0.2, 0.8]]]
        }, {
            "n_var": 19,
            "n_obj": 10,
            "ref_points": [[[0.25 for i in range(10)]]]
        }]
    }
    crossover = SimulatedBinaryCrossover(10)
    p = []
    name_list = ["DTLZ2_1", "DTLZ2_2", "DTLZ2_3"]
    for problem, setup in ref_dirs.items():
        for params in setup:
            parameters = {"n_var": params["n_var"], "n_obj": params["n_obj"]}
            prob = problem(**parameters)
            for i, points in enumerate(params["ref_points"]):
                sublist = []
                for algorithm in [
                        RNSGAIII("real",
                                 pop_size=100,
                                 ep=0.01,
                                 crossover=crossover,
                                 ref_dirs=points,
                                 verbose=0)
                ]:
                    for run in range(1, n_runs + 1):
                        name = problem.__class__.__name__ + '_' + str(
                            i) + '_' + str(run)
                        sublist.append((algorithm, prob, run, points, name))
                        # yield (algorithm, prob, run)
            p.append(sublist)
    return p, name_list
Esempio n. 7
0
    def _fit(self, X, F, data):

        self.F = F
        n_var = X.shape[1]

        if self.kernel == "linear":
            kernel = george.kernels.LinearKernel(order=2,
                                                 log_gamma2=0.2,
                                                 ndim=n_var)
        elif self.kernel == "expsqrt":
            kernel = george.kernels.ExpSquaredKernel(metric=np.ones(n_var),
                                                     ndim=n_var)
        elif self.kernel == "rational_quad":
            kernel = george.kernels.RationalQuadraticKernel(
                log_alpha=0.2, metric=np.ones(n_var), ndim=n_var)
        elif self.kernel == "exp":
            kernel = george.kernels.ExpKernel(metric=np.ones(n_var),
                                              ndim=n_var)
        elif self.kernel == "polynomial":
            kernel = george.kernels.PolynomialKernel(metric=np.ones(n_var))
        else:
            raise ValueError("Parameter %s for kernel unknown." % self.kernel)

        gp = george.GP(kernel, fit_mean=True)
        gp.compute(X)

        def nll(p):
            gp.set_parameter_vector(p)
            ll = gp.log_likelihood(F, quiet=True)
            return -ll if np.isfinite(ll) else 1e25

        def grad_nll(p):
            gp.set_parameter_vector(p)
            return -gp.grad_log_likelihood(F, quiet=True)

        if 'expensive' in data and data['expensive']:
            n_restarts = 20
        else:
            n_restarts = 5

        n_hyper_var = len(gp.get_parameter_vector())

        # print(gp.get_parameter_bounds(include_frozen=False))

        class HyperparameterProblem(Problem):
            def __init__(self, **kwargs):
                Problem.__init__(self, **kwargs)
                self.n_var = n_hyper_var
                self.n_constr = 0
                self.n_obj = 1
                self.func = self.evaluate_
                self.xl = 0 * np.ones(self.n_var)
                self.xu = 10 * np.ones(self.n_var)

            def evaluate_(self, x, f):
                for i in range(x.shape[0]):
                    gp.set_parameter_vector(x[i, :])
                    ll = gp.log_likelihood(F, quiet=True)
                    f[i, :] = -ll if np.isfinite(ll) else 1e25

        if self.opt == "ga":
            X, _, _ = GeneticAlgorithm(
                pop_size=20,
                sampling=LHS().sample_by_bounds(0, 1, n_hyper_var, 100, {}),
                selection=TournamentSelection(),
                crossover=SimulatedBinaryCrossover(),
                mutation=PolynomialMutation(),
                survival=FitnessSurvival(),
                verbose=0).solve(HyperparameterProblem(), 8000)

            gp.set_parameter_vector(X[0, :])

        elif self.opt == "best_lhs":

            n_initial_points = 1000

            p = LHS().sample_by_bounds(0, 1, n_hyper_var, 1000, {})
            likelihoods = np.zeros(n_initial_points)
            for i, row in enumerate(p):
                likelihoods[i] = nll(row)

            print()

        elif self.opt == "lhs":

            initial_points = LHS().sample(HyperparameterProblem(), n_restarts,
                                          {})

            likelihoods = np.zeros(n_restarts)
            X = np.zeros((n_restarts, n_hyper_var))
            for i, p in enumerate(initial_points):
                result = op.minimize(nll, p, jac=grad_nll, method="L-BFGS-B")
                likelihoods[i] = result.fun
                X[i, :] = result.x

            idx = np.argmin(likelihoods)
            gp.set_parameter_vector(X[idx, :])

        # p0 = gp.get_parameter_vector()
        # results = op.minimize(nll, p0, jac=grad_nll, method="L-BFGS-B")
        # gp.set_parameter_vector(results.x)

        self.model = gp
        return self