Esempio n. 1
0
def get_best_nest(nest, newnest, fitness, n, dim, objf):
    # Evaluating all new solutions
    tempnest = numpy.zeros((n, dim))
    tempnest = numpy.copy(nest)
    fbench = Function(objf, dim)
    info = fbench.info()

    ub = info['upper']
    lb = info['lower']
    optimum = info['best']

    fun_fitness = fbench.get_eval_function()
    for j in range(0, n):
        #for j=1:size(nest,1),
        fnew = fun_fitness(newnest[j, :])
        if fnew <= fitness[j]:
            fitness[j] = fnew
            tempnest[j, :] = newnest[j, :]

    # Find the current best

    fmin = min(fitness)
    K = numpy.argmin(fitness)
    bestlocal = tempnest[K, :]

    return fmin, bestlocal, tempnest, fitness
Esempio n. 2
0
def GA(function, mutation_op, population_size=100, num_gen=50):
    """
	function - the benchmark function to be run on
	mutation_op - the type of mutation the children will be subjected to
	population_size - number of possible solutions within a generation
	num_gen - number of iterations
	"""
    num_func = bench_funcs[function]
    bench = Function(num_func, 50)
    info = bench.info()
    fitness = bench.get_eval_function()

    population = first_population(population_size, info)
    best = population[0]

    best_of_gen = []

    for _ in range(num_gen):
        for pop in population:
            new_pop_fit = fitness(pop)
            best_fit = fitness(best)
            if (new_pop_fit < best_fit):
                best = pop
                best_fit = new_pop_fit
        breeders = tournament(population, fitness)
        children = create_children(breeders, mutation_op, info)
        population = np.array(children)
        best_of_gen.append(best_fit)

    # each generations best, overall best
    return best_of_gen, best_fit
Esempio n. 3
0
def PSO(function, inf_count, swarm_size=100, num_movements=50):
    """
	function - the benchmark function to be run on
	inf_count - number of informants that each particle has
	swarm_size - number of possible solutions within a generation
	num_movements - the number of times the particles adjust their position
	"""
    num_func = bench_funcs[function]
    bench = Function(num_func, 50)
    info = bench.info()
    fitness = bench.get_eval_function()

    swarm = generate_swarm(swarm_size, info)
    velocities = generate_velocities(swarm_size, info)
    informants = get_informants(swarm_size, inf_count, info)
    alpha, beta, gamma, delta = generate_weights()

    # the index best known position of and individual particle i, init = self
    p_best = np.arange(swarm_size, dtype=int)
    best_inf_position = np.zeros(
        swarm_size
    )  # the index of the best known position of an individual i's informants
    g_best = 0  # index of global best location

    # the best fitness calculated after position adjustment
    best_of_movement = []
    for _ in range(num_movements):
        for i in range(len(swarm)):
            particle = swarm[i]
            curr_fit = fitness(particle)
            p_fit = fitness(swarm[p_best[i]])
            if (curr_fit < p_fit):
                p_best[i] = i
            if (fitness(swarm[p_best[i]]) < fitness(swarm[g_best])):
                g_best = p_best[i]
        for i in range(len(swarm)):
            curr_best = swarm[p_best[i]]
            inf_best_index = get_best_of_inf(swarm, informants[i], fitness)
            inf_best = swarm[inf_best_index]
            best_inf_position[i] = inf_best_index
            overall_best = swarm[g_best]
            particle = swarm[i]

            for dim in range(len(particle)):
                b = np.random.uniform(0, beta)
                c = np.random.uniform(0, gamma)
                d = np.random.uniform(0, delta)
                velocities[i] = (alpha * velocities[i]) + (
                    b *
                    (curr_best - particle)) + (c * (inf_best - particle)) + (
                        d * (overall_best - particle))
        for particle, vel in zip(swarm, velocities):
            particle += vel
        best_of_movement.append(fitness(swarm[g_best]))

    # each adjustments best fitness, overall best fitness
    return best_of_movement, fitness(swarm[g_best])
Esempio n. 4
0
def main(args):
    "Main program."
    parser = argparse.ArgumentParser(description="Running SHADE with 2005 Benchmark")
    parser.add_argument('-f', dest='fun', type=int, choices=range(1, 26),
                         required=True,
                         help="the function value [1-25]")
    
    parser.add_argument('-d', dest='dim', type=int, choices=[2, 10, 30, 50],
                        required=True,
                         help="the dimensionality [2, 10, 30, 50]")
    
    parser.add_argument('-r', dest='run', default=25, type=int,
                         help="run times")

    parser.add_argument('-s', dest='seedid', required=True, type=int,
                         help="seed", choices=range(1, 6))

    params = parser.parse_args(args)
    seeds = [12345679, 32379553, 235325, 5746435, 253563]

    if (params.run <= 0):
        parser.print_help()
        return

    # Set the seeds
    numpy.random.seed(seeds[params.seedid-1])

    dim = params.dim
    fid = params.fun
    fun = Function(fid, dim)
    info = fun.info()
    fitness_fun = fun.get_eval_function()
    output = "results/shade_cec2005_f{0}d{1}_s{2}r{3}".format(fid, dim, params.seedid, params.run)
    info['best'] = 0
    ignoreLimits = (fid != 7 and fid != 25)
    noisy = (fid == 4 or fid == 25)

    
    if os.path.exists(output):
        return

    for r in range(params.run):
        result,bestIndex = shade.improve(fitness_fun, info, dim, 10000*dim,
                                         name_output=output, replace=False, times=params.run, popsize=min(dim, 10), H=2*dim, ignoreLimits=ignoreLimits)
        best_sol = result.solution
        best_fitness = result.fitness

        if not noisy:
            assert(fitness_fun(best_sol)==best_fitness)
Esempio n. 5
0
# Import all the DE algorithm variants from python Advanced DE libarary
import numpy as np
from helper import functions, algos, updateRuns, plotMedians, storeMeanResult, RUNS
import os
import commons
from cec2005real.cec2005 import Function

############################################
#              Main Function               #
############################################
dims = [2, 10, 30]
for dim in dims:
    for funcNum in functions.keys():
        fbench = Function(funcNum, dim)
        info = fbench.info()
        function = fbench.get_eval_function()
        bounds = [(info['lower'], info['upper'])]
        startingPopulations = [
            commons.init_population(10 * dim, dim, np.array(bounds))
            for x in range(RUNS)
        ]
        for j, algo in enumerate(algos.keys()):
            for x in range(0, RUNS):
                params = algo.get_default_params(dim=dim)
                bounds = np.array(bounds * dim)
                params['func'] = function
                params['bounds'] = bounds
                #params['max_evals'] = 10000
                params['opts'] = None
                params['answer'] = None
                params['population'] = startingPopulations[x].copy()
Esempio n. 6
0
def CS(objf, dim, n, N_IterTotal):
    # objf,n,dim,MaxGeneration
    fbench = Function(objf, dim)
    info = fbench.info()

    ub = info['upper']
    lb = info['lower']
    optimum = info['best']
    #lb=-1
    #ub=1
    #n=50
    #N_IterTotal=1000
    #dim=30

    # Discovery rate of alien eggs/solutions
    pa = 0.25

    nd = dim

    #    Lb=[lb]*nd
    #    Ub=[ub]*nd
    convergence = []

    # RInitialize nests randomely
    nest = numpy.random.rand(n, dim) * (ub - lb) + lb

    new_nest = numpy.zeros((n, dim))
    new_nest = numpy.copy(nest)

    bestnest = [0] * dim

    fitness = numpy.zeros(n)
    fitness.fill(float("inf"))

    s = solution()

    print("CS is optimizing " + str(objf))

    timerStart = time.time()
    s.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")

    fmin, bestnest, nest, fitness = get_best_nest(nest, new_nest, fitness, n,
                                                  dim, objf)
    convergence = []
    # Main loop counter
    for iter in range(0, N_IterTotal):
        # Generate new solutions (but keep the current best)

        new_nest = get_cuckoos(nest, bestnest, lb, ub, n, dim)

        # Evaluate new solutions and find best
        fnew, best, nest, fitness = get_best_nest(nest, new_nest, fitness, n,
                                                  dim, objf)

        new_nest = empty_nests(new_nest, pa, n, dim)

        # Evaluate new solutions and find best
        fnew, best, nest, fitness = get_best_nest(nest, new_nest, fitness, n,
                                                  dim, objf)

        if fnew < fmin:
            fmin = fnew
            bestnest = best

        if (iter % 100 == 0):
            print([
                'At iteration ' + str(iter) + ' the best fitness is ' +
                str(fmin) + ": CS" + " :" + str(objf)
            ])
            convergence.append(fmin)
    convergence.append(fitness[0])
    convergence.append(fitness[6])
    convergence.append(fitness[12])
    convergence.append(fitness[18])
    convergence.append(fitness[24])
    convergence.append(numpy.sum(fitness) / n)
    convergence.append(numpy.std(fitness))
    timerEnd = time.time()
    s.endTime = time.strftime("%Y-%m-%d-%H-%M-%S")
    s.executionTime = timerEnd - timerStart
    s.convergence = convergence
    s.optimizer = "CS"
    s.objfname = "F" + str(objf)

    return s
Esempio n. 7
0
def PFA(objf, n, dim, MaxGeneration):

    fbench = Function(objf, dim)
    info = fbench.info()

    ub = info['upper']
    lb = info['lower']
    optimum = info['best']
    print(optimum)

    #General parameters

    #n=50 #number of fireflies
    # dim=10000 #dim
    #lb=-50
    #ub=50
    #MaxGeneration=500

    #FFA parameters
    alpha = 0.50  # Randomness 0--1 (highly random)
    betamin = 0.50  # minimum value of beta
    gamma = 1  # Absorption coefficient

    zn = numpy.ones(n)
    zn.fill(float("inf"))

    #ns(i,:)=Lb+(Ub-Lb).*rand(1,d);
    ns = numpy.random.uniform(0, 1, (n, dim)) * (ub - lb) + lb
    Lightn = numpy.ones(n)
    Lightn.fill(float("inf"))
    Lightnprev = numpy.ones(n)
    Lightnprev.fill(float("inf"))

    #[ns,Lightn]=init_ffa(n,d,Lb,Ub,u0)

    convergence = []
    s = solution()

    print("PFA is optimizing F" + str(objf))

    timerStart = time.time()
    s.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")

    # Main loop
    for k in range(0, MaxGeneration):  # start iterations

        #% This line of reducing alpha is optional
        #alpha=alpha_new(alpha,MaxGeneration);
        Lightnprev = Lightn
        #% Evaluate new solutions (for all n fireflies)
        fun_fitness = fbench.get_eval_function()
        for i in range(0, n):
            zn[i] = fun_fitness(ns[i, :])
            Lightn[i] = zn[i]

        # Ranking fireflies by their light intensity/objectives

        Lightn = numpy.sort(zn)
        Index = numpy.argsort(zn)
        ns = ns[Index, :]

        #Find the current best
        nso = ns
        Lighto = Lightn
        nbest = ns[0, :]
        Lightbest = Lightn[0]

        #% For output only
        fbest = Lightbest

        #% Move all fireflies to the better locations
        #    [ns]=ffa_move(n,d,ns,Lightn,nso,Lighto,nbest,...
        #          Lightbest,alpha,betamin,gamma,Lb,Ub);
        scale = numpy.ones(dim) * abs(ub - lb)
        for i in range(0, n):
            # The attractiveness parameter beta=exp(-gamma*r)
            for j in range(0, n):
                # r=numpy.sqrt(numpy.sum((ns[i,:]-ns[j,:])**2));
                # r2=numpy.sqrt(numpy.sum((ns[i,:]-ns[0,:])**2));
                r = numpy.sum((ns[i, :] - ns[j, :]))
                r2 = numpy.sum((ns[0, :] - ns[j, :]))
                #r=1
                # Update moves
                if Lightn[i] > Lighto[j]:  # Brighter and more attractive
                    # PropFA parameters
                    per = ((k / MaxGeneration) * 100) / 85
                    per2 = numpy.heaviside(per - 1, 0.5)
                    ratA = (numpy.absolute(Lightn[i]) - numpy.absolute(
                        Lightnprev[i])) / max(numpy.absolute(Lightn[i]),
                                              numpy.absolute(Lightnprev[i]))
                    ratB = (numpy.absolute(Lightn[j]) - numpy.absolute(
                        Lightn[i])) / max(numpy.absolute(Lightn[j]),
                                          numpy.absolute(Lightn[i]))
                    ratC = (numpy.absolute(fbest) - numpy.absolute(
                        Lightn[i])) / max(numpy.absolute(fbest),
                                          numpy.absolute(Lightn[i]))
                    ratAvg = (ratA + ratB + ratC) / 3
                    scale2 = numpy.absolute(ub - lb)

                    #    bet=3/2;
                    #    sigma=(math.gamma(1+bet)*math.sin(math.pi*bet/2)/(math.gamma((1+bet)/2)*bet*2**((bet-1)/2)))**(1/bet);
                    #    u=numpy.random.randn(dim)*sigma
                    #    v=numpy.random.randn(dim)
                    #    step=u/abs(v)**(1/bet)
                    #    stepsize=0.001*(step*(ns[i,:]-ns[0,:]))

                    if (Lightnprev[i] == Lightn[i]):
                        alpha = 10
                    else:
                        r3 = numpy.sum((ns[0, :] - ns[n - 1, :]))
                        alpha = (r2 / 1000) * ratAvg * numpy.exp(-k * per2)

                    if (Lightnprev[i] == Lightn[i]):
                        gamma = 1
                    else:
                        gamma = (ratB / ratC)

                    beta0 = 1
                    beta = (beta0 - betamin) * numpy.exp(
                        -gamma * r**2) + betamin
                    beta2 = (beta0 - betamin) * numpy.exp(
                        -gamma * r2**2) + betamin
                    tmpf = alpha * (numpy.random.rand(dim) - 0.5) * 1
                    #    tmpf=stepsize*numpy.random.randn(dim)
                    #

                    #ns[i,:]=ns[i,:]*(1-beta)+nso[j,:]*beta+tmpf

                    ns[i, :] = ns[i, :] + (beta * (nso[j, :] - ns[i, :])) + (
                        beta2 * (nso[0, :] - ns[i, :])) + tmpf
        ns = numpy.clip(ns, lb, ub)

        IterationNumber = k
        BestQuality = fbest

        if (k % 1 == 0):
            print([
                'At iteration ' + str(k) + ' the best fitness is ' +
                str(BestQuality) + ": PFA" + " :" + str(objf)
            ])
        if (k % 100 == 0):
            convergence.append(fbest)
    #
    ####################### End main loop
    convergence.append(Lightn[0])
    convergence.append(Lightn[6])
    convergence.append(Lightn[12])
    convergence.append(Lightn[18])
    convergence.append(Lightn[24])
    convergence.append(numpy.sum(Lightn) / n)
    convergence.append(numpy.std(Lightn))
    timerEnd = time.time()
    s.endTime = time.strftime("%Y-%m-%d-%H-%M-%S")
    s.executionTime = timerEnd - timerStart
    s.convergence = convergence
    s.optimizer = "PFA"
    s.objfname = "F" + str(objf)

    return s