예제 #1
0
def test_tlbo_variance():

    lb = np.zeros(5)
    lb[::2] = 1.1

    ub = np.ones(5)
    ub[::2] = 2.2

    o = TLBO(f=lambda x: np.abs(np.mean(x) - 0.5),
             lower_bound=lb,
             upper_bound=ub)

    o = helper_n_generations(o, 20)

    print("costs history", o.bestcosts_)
    solution, fitness = o.best()
    print("best fitness", fitness[0])
    print("best solution", solution[0])

    assert len(o.fitness_) == 50
    assert len(o.population_) == 50
    assert len(o.bestcosts_) == 21

    assert len(solution) == 1
    assert len(fitness) == 1
예제 #2
0
def test_tlbo_constrained_himmelblau():
    # function to minimize
    def f(x):
        x1, x2 = x
        return ((x1**2) + x2 - 11)**2 + (x1 + (x2**2) - 7)**2

    # constraint 2
    def g1(x):
        x1, x2 = x
        return 26 - (x1 - 5)**2 - x2**2

    # constraint 1 g2(x) >= 0
    def g2(x):
        x1, x2 = x
        return 20 - (4 * x1) - x2

    # penalty:
    #   10 * v^2, if v < 0
    #   0, otherwise
    def p(v):
        return 10 * min(v, 0)**2

    # f'(x) - add penalties for constraints
    def fp(x):
        return f(x) + p(g1(x)) + p(g2(x))

    lower_bounds = np.ones(2) * -5
    upper_bounds = np.ones(2) * 5

    tlbo = TLBO(fp, lower_bounds, upper_bounds, n_population=5)
    tlbo = helper_n_generations(tlbo, 100)
    best_solution, best_fitness = tlbo.best()
    print("TLBO solution", best_solution, "fitness", best_fitness)
    def build_for_class(self, X):

        distance_fitness = lambda P: self.distance_sum(P, X)

        # setup GA
        ga = GeneticAlgorithm(fitness_function=distance_fitness,
                              elitism=3,
                              n_chromosomes=100,
                              n_genes=X.shape[1],
                              p_mutation=0.3)

        ga = helper_n_generations(ga, self.n_iterations)  # advance the GA

        # return the best found parameters for this class.
        chromosomes, fitness = ga.best(1)
        return chromosomes[0]
예제 #4
0
파일: garules.py 프로젝트: sorend/fylearn
    def build_for_class(self, X):

        distance_fitness = lambda P: self.distance_sum(P, X)

        # setup GA
        ga = GeneticAlgorithm(fitness_function=distance_fitness,
                              elitism=3,
                              n_chromosomes=100,
                              n_genes=X.shape[1],
                              p_mutation=0.3)

        ga = helper_n_generations(ga, self.n_iterations)  # advance the GA

        # return the best found parameters for this class.
        chromosomes, fitness = ga.best(1)
        return chromosomes[0]
예제 #5
0
    def build_for_class(self, X, y, class_idx):

        # take column-wise min/mean/max for class
        mins = np.nanmin(X[class_idx], 0)
        means = np.nanmean(X[class_idx], 0)
        maxs = np.nanmax(X[class_idx], 0)
        ds = (maxs - mins) / 2.0

        n_genes = 2 * self.m  # adjustment for r and shrinking/expanding value for p/q

        B = np.ones(n_genes)

        def decode_with_shrinking_expanding(C):
            def dcenter(j):
                return min(1.0, max(0.0,
                                    C[j])) - 0.5 if self.adjust_center else 1.0

            return [
                fl.PiSet(r=means[j] * dcenter(j),
                         p=means[j] - (ds[j] * C[j + 1]),
                         q=means[j] + (ds[j] * C[j + 1]))
                for j in range(self.m)
            ]

        y_target = np.zeros(y.shape)  # create the target of 1 and 0.
        y_target[class_idx] = 1.0

        def rmse_fitness_function(chromosome):
            proto = decode_with_shrinking_expanding(chromosome)
            y_pred = _predict_one(proto, self.aggregation, X)
            return mean_squared_error(y_target, y_pred)

        logger.info("initializing GA %d iterations" % (self.iterations, ))
        # initialize
        ga = UnitIntervalGeneticAlgorithm(
            fitness_function=helper_fitness(rmse_fitness_function),
            crossover_function=UniformCrossover(0.5),
            elitism=3,
            n_chromosomes=100,
            n_genes=n_genes,
            p_mutation=0.3)

        ga = helper_n_generations(ga, self.iterations)
        chromosomes, fitnesses = ga.best(1)

        return decode_with_shrinking_expanding(
            chromosomes[0]), decode_with_shrinking_expanding(B)
예제 #6
0
파일: garules.py 프로젝트: sorend/fylearn
    def build_for_class(self, rs, X):

        def distance_fitness(c):
            return np.sum(np.abs(X - c))

        # setup GA
        ga = GeneticAlgorithm(fitness_function=helper_fitness(distance_fitness),
                              elitism=3,
                              n_chromosomes=100,
                              n_genes=X.shape[1],
                              p_mutation=0.3,
                              random_state=rs)

        ga = helper_n_generations(ga, self.n_iterations)  # advance the GA

        # return the best found parameters for this class.
        chromosomes, fitness = ga.best(1)
        return chromosomes[0]
    def build_for_class(self, rs, X):
        def distance_fitness(c):
            return np.sum(np.abs(X - c))

        # setup GA
        ga = GeneticAlgorithm(
            fitness_function=helper_fitness(distance_fitness),
            elitism=3,
            n_chromosomes=100,
            n_genes=X.shape[1],
            p_mutation=0.3,
            random_state=rs)

        ga = helper_n_generations(ga, self.n_iterations)  # advance the GA

        # return the best found parameters for this class.
        chromosomes, fitness = ga.best(1)
        return chromosomes[0]
def test_jaya_sphere():
    """Example given in paper"""

    o = JayaOptimizer(f=lambda x: np.sum(x**2),
                      lower_bound=np.ones(10) * -10.0,
                      upper_bound=np.ones(10) * 10.0,
                      n_population=34)

    o = helper_n_generations(o, 100)

    solution, fitness = o.best()

    print("costs history", o.bestcosts_)
    print("best fitness", fitness)
    print("best solution", solution)

    assert len(o.fitness_) == 34
    assert len(o.population_) == 34
    assert len(o.bestcosts_) == 101
예제 #9
0
파일: test_jaya.py 프로젝트: sorend/fylearn
def test_jaya_sphere_bounds():
    """ Another example with strange domain """

    o = JayaOptimizer(f=lambda x: np.sum(x**2),
                      lower_bound=np.array([1, 0.001, 100]),
                      upper_bound=np.array([10, 0.2, 1000]),
                      n_population=34)

    o = helper_n_generations(o, 100)

    solution, fitness = o.best()

    print("costs history", o.bestcosts_)
    print("best fitness", fitness)
    print("best solution", solution)

    assert len(o.fitness_) == 34
    assert len(o.population_) == 34
    assert len(o.bestcosts_) == 101
예제 #10
0
파일: test_jaya.py 프로젝트: sorend/fylearn
def test_jaya_sphere():
    """Example given in paper"""

    o = JayaOptimizer(f=lambda x: np.sum(x**2),
                      lower_bound=np.ones(10) * -10.0,
                      upper_bound=np.ones(10) * 10.0,
                      n_population=34)

    o = helper_n_generations(o, 100)

    solution, fitness = o.best()

    print("costs history", o.bestcosts_)
    print("best fitness", fitness)
    print("best solution", solution)

    assert len(o.fitness_) == 34
    assert len(o.population_) == 34
    assert len(o.bestcosts_) == 101
예제 #11
0
def test_jaya_sphere_bounds():
    """ Another example with strange domain """

    o = JayaOptimizer(f=lambda x: np.sum(x**2),
                      lower_bound=np.array([1, 0.001, 100]),
                      upper_bound=np.array([10, 0.2, 1000]),
                      n_population=34)

    o = helper_n_generations(o, 100)

    solution, fitness = o.best()

    print("costs history", o.bestcosts_)
    print("best fitness", fitness)
    print("best solution", solution)

    assert len(o.fitness_) == 34
    assert len(o.population_) == 34
    assert len(o.bestcosts_) == 101
예제 #12
0
파일: fpcga.py 프로젝트: sorend/fylearn
    def build_for_class(self, X, y, class_idx):

        # take column-wise min/mean/max for class
        mins = np.nanmin(X[class_idx], 0)
        means = np.nanmean(X[class_idx], 0)
        maxs = np.nanmax(X[class_idx], 0)
        ds = (maxs - mins) / 2.0

        n_genes = 2 * self.m  # adjustment for r and shrinking/expanding value for p/q

        B = np.ones(n_genes)

        def decode_with_shrinking_expanding(C):
            def dcenter(j):
                return min(1.0, max(0.0, C[j])) - 0.5 if self.adjust_center else 1.0

            return [ fl.PiSet(r=means[j] * dcenter(j),
                              p=means[j] - (ds[j] * C[j + 1]),
                              q=means[j] + (ds[j] * C[j + 1]))
                     for j in range(self.m) ]

        y_target = np.zeros(y.shape)  # create the target of 1 and 0.
        y_target[class_idx] = 1.0

        def rmse_fitness_function(chromosome):
            proto = decode_with_shrinking_expanding(chromosome)
            y_pred = _predict_one(proto, self.aggregation, X)
            return mean_squared_error(y_target, y_pred)

        logger.info("initializing GA %d iterations" % (self.iterations,))
        # initialize
        ga = UnitIntervalGeneticAlgorithm(fitness_function=helper_fitness(rmse_fitness_function),
                                          crossover_function=UniformCrossover(0.5),
                                          elitism=3,
                                          n_chromosomes=100,
                                          n_genes=n_genes,
                                          p_mutation=0.3)

        ga = helper_n_generations(ga, self.iterations)
        chromosomes, fitnesses = ga.best(1)

        return decode_with_shrinking_expanding(chromosomes[0]), decode_with_shrinking_expanding(B)
예제 #13
0
파일: test_jaya.py 프로젝트: sorend/fylearn
def test_jaya_variance():

    lb = np.zeros(5)
    lb[::2] = 1.1

    ub = np.ones(5)
    ub[::2] = 2.2

    o = JayaOptimizer(f=lambda x: np.abs(np.mean(x) - 0.5), lower_bound=lb, upper_bound=ub)

    o = helper_n_generations(o, 20)

    print("costs history", o.bestcosts_)
    solution, fitness = o.best()
    print("best fitness", fitness)
    print("best solution", solution)

    assert len(o.fitness_) == 50
    assert len(o.population_) == 50
    assert len(o.bestcosts_) == 21
예제 #14
0
def test_tlbo_alan_2():
    def fitness(X):
        print("X", X, "len", len(X))
        x1, x2, x3, x4 = X.tolist()
        return 0.0358 + (0.7349 * x1) + (.0578 * x2) - (0.3151 * x3) + (
            0.6888 * x4) + (0.1803 * (x1**2)) - (0.0481 * (x2**2)) + (
                0.1699 *
                (x3**2)) - (0.0494 * (x4**2)) - (0.3555 * (x1 * x2)) - (
                    0.6316 * (x1 * x3)) + (0.5973 * (x1 * x4)) + (
                        0.0826 * (x2 * x3)) - (0.4736 *
                                               (x2 * x4)) - (0.6547 *
                                                             (x3 * x4))

    lower_bounds = np.array([-10, -10, -10, -10])
    upper_bounds = np.array([10, 10, 10, 10])

    tlbo = TeachingLearningBasedOptimizer(fitness, lower_bounds, upper_bounds)
    tlbo = helper_n_generations(tlbo, 100)
    best_solution, best_fitness = tlbo.best()
    print("TLBO solution", best_solution, "fitness", best_fitness)
예제 #15
0
파일: test_tlbo.py 프로젝트: sorend/fylearn
def test_tlbo_sphere():
    """Example given in matlab code"""

    o = TeachingLearningBasedOptimizer(f=lambda x: np.sum(x**2),
                                       lower_bound=np.ones(10) * -10.0,
                                       upper_bound=np.ones(10) * 10.0,
                                       n_population=34)

    o = helper_n_generations(o, 50)

    solution, fitness = o.best(3)

    print("costs history", o.bestcosts_)
    print("best fitness", fitness[0])
    print("best solution", solution[0])

    assert len(o.fitness_) == 34
    assert len(o.population_) == 34

    assert len(solution) == 3
    assert len(fitness) == 3
예제 #16
0
파일: garules.py 프로젝트: sorend/fylearn
    def fit_weights(self, rs, models, X, y):

        n_genes = self.n_models * len(self.classes_)

        def fitness_function(c):
            M = self.predict_(X, models, c)
            y_pred = np.argmin(M, 1)
            return 1.0 - accuracy_score(y, y_pred)

        ga = GeneticAlgorithm(fitness_function=helper_fitness(fitness_function),
                              elitism=3,
                              n_chromosomes=100,
                              n_genes=n_genes,
                              p_mutation=0.3,
                              random_state=rs)

        ga = helper_n_generations(ga, self.n_iterations_weights)  # advance the GA

        chromosomes, fitness = ga.best(1)

        return chromosomes[0]
예제 #17
0
def test_tlbo_sphere():
    """Example given in matlab code"""

    o = TeachingLearningBasedOptimizer(f=lambda x: np.sum(x**2),
                                       lower_bound=np.ones(10) * -10.0,
                                       upper_bound=np.ones(10) * 10.0,
                                       n_population=34)

    o = helper_n_generations(o, 50)

    solution, fitness = o.best(3)

    print("costs history", o.bestcosts_)
    print("best fitness", fitness[0])
    print("best solution", solution[0])

    assert len(o.fitness_) == 34
    assert len(o.population_) == 34

    assert len(solution) == 3
    assert len(fitness) == 3
    def fit_weights(self, rs, models, X, y):

        n_genes = self.n_models * len(self.classes_)

        def fitness_function(c):
            M = self.predict_(X, models, c)
            y_pred = np.argmin(M, 1)
            return 1.0 - accuracy_score(y, y_pred)

        ga = GeneticAlgorithm(
            fitness_function=helper_fitness(fitness_function),
            elitism=3,
            n_chromosomes=100,
            n_genes=n_genes,
            p_mutation=0.3,
            random_state=rs)

        ga = helper_n_generations(ga,
                                  self.n_iterations_weights)  # advance the GA

        chromosomes, fitness = ga.best(1)

        return chromosomes[0]