Exemplo n.º 1
0
def test_es():
    #Define the fitness function
    def FIT(individual):
        """Sphere test objective function.
                    F(x) = sum_{i=1}^d xi^2
                    d=1,2,3,...
                    Range: [-100,100]
                    Minima: 0
            """
        y = sum(x**2 for x in individual)
        return y

    #Setup the parameter space (d=5)
    nx = 5
    BOUNDS = {}
    for i in range(1, nx + 1):
        BOUNDS['x' + str(i)] = ['float', -100, 100]

    es = ES(mode='min',
            bounds=BOUNDS,
            fit=FIT,
            lambda_=80,
            mu=40,
            mutpb=0.25,
            cxmode='blend',
            cxpb=0.7,
            ncores=1,
            seed=1)
    x_best, y_best, es_hist = es.evolute(ngen=100, verbose=0)
Exemplo n.º 2
0
def tune_fit(cxpb, mu, alpha, cxmode):

    #--setup the parameter space
    nx = 5
    BOUNDS = {}
    for i in range(1, nx + 1):
        BOUNDS['x' + str(i)] = ['float', -100, 100]

    #--setup the ES algorithm
    es = ES(mode='min',
            bounds=BOUNDS,
            fit=sphere,
            lambda_=80,
            mu=mu,
            mutpb=0.1,
            alpha=alpha,
            cxmode=cxmode,
            cxpb=cxpb,
            ncores=1,
            seed=1)

    #--Evolute the ES object and obtains y_best
    #--turn off verbose for less algorithm print-out when tuning
    x_best, y_best, es_hist = es.evolute(ngen=100, verbose=0)

    return y_best  #returns the best score
Exemplo n.º 3
0
def tune_fit(cxpb, mu, alpha, cxmode, mutpb):

    #--setup the ES algorithm
    es = ES(mode='min',
            bounds=BOUNDS,
            fit=BEAM,
            lambda_=80,
            mu=mu,
            mutpb=mutpb,
            alpha=alpha,
            cxmode=cxmode,
            cxpb=cxpb,
            ncores=1,
            seed=1)

    #--Evolute the ES object and obtains y_best
    #--turn off verbose for less algorithm print-out when tuning
    x_best, y_best, es_hist = es.evolute(ngen=100, verbose=0)

    return y_best  #returns the best score
Exemplo n.º 4
0

#Define the fitness function
def FIT(individual):
    """Sphere test objective function.
            F(x) = sum_{i=1}^d xi^2
            d=1,2,3,...
            Range: [-100,100]
            Minima: 0
    """
    y = sum(x**2 for x in individual)
    return y


#Setup the parameter space (d=5)
nx = 5
BOUNDS = {}
for i in range(1, nx + 1):
    BOUNDS['x' + str(i)] = ['float', -100, 100]

es = ES(mode='min',
        bounds=BOUNDS,
        fit=FIT,
        lambda_=80,
        mu=40,
        mutpb=0.25,
        cxmode='blend',
        cxpb=0.7,
        ncores=1,
        seed=1)
x_best, y_best, es_hist = es.evolute(ngen=100, verbose=1)
Exemplo n.º 5
0
bounds['x2'] = ['grid', (0.0625, 0.125, 0.1875, 0.25, 0.3125, 0.375, 0.4375, 0.5, 0.5625, 0.625)]
bounds['x3'] = ['float', 10, 200]
bounds['x4'] = ['float', 10, 200]

########################
# Setup and evolute HHO
########################
hho = HHO(mode='min', bounds=bounds, fit=Vessel, nhawks=50, 
                  int_transform='minmax', ncores=1, seed=1)
x_hho, y_hho, hho_hist=hho.evolute(ngen=200, verbose=False)
assert Vessel(x_hho) == y_hho

########################
# Setup and evolute ES 
########################
es = ES(mode='min', fit=Vessel, cxmode='cx2point', bounds=bounds, 
                 lambda_=60, mu=30, cxpb=0.7, mutpb=0.2, seed=1)
x_es, y_es, es_hist=es.evolute(ngen=200, verbose=False)
assert Vessel(x_es) == y_es

########################
# Setup and evolute PESA
########################
pesa=PESA(mode='min', bounds=bounds, fit=Vessel, npop=60, mu=30, alpha_init=0.01,
          alpha_end=1.0, cxpb=0.7, mutpb=0.2, alpha_backdoor=0.05)
x_pesa, y_pesa, pesa_hist=pesa.evolute(ngen=200, verbose=False)
assert Vessel(x_pesa) == y_pesa

########################
# Setup and evolute BAT
########################
bat=BAT(mode='min', bounds=bounds, fit=Vessel, nbats=50, fmin = 0 , fmax = 1, 
Exemplo n.º 6
0
    1]  #drop the cases with scores < 1 (violates the constraints)
bayesres = bayesres.sort_values(
    ['score'], axis='index',
    ascending=True)  #rank the scores from best (lowest) to worst (high)
print(bayesres.iloc[0:10, :]
      )  #the results are saved in dataframe and ranked from best to worst

#*************************************************************
# Part V: Rerun ES with the best hyperparameter set
#*************************************************************
es = ES(mode='min',
        bounds=BOUNDS,
        fit=BEAM,
        lambda_=80,
        mu=bayesres['mu'].iloc[0],
        mutpb=bayesres['mutpb'].iloc[0],
        alpha=bayesres['alpha'].iloc[0],
        cxmode=bayesres['cxmode'].iloc[0],
        cxpb=bayesres['cxpb'].iloc[0],
        ncores=1,
        seed=1)

x_best, y_best, es_hist = es.evolute(ngen=100, verbose=0)

print('Best fitness (y) found:', y_best)
print('Best individual (x) found:', x_best)

#---------------------------------
# Plot
#---------------------------------
#Plot fitness convergence
Exemplo n.º 7
0
# Parameter Space
#---------------------------------
d = 20
space = {}
for i in range(1, d + 1):
    space['x' + str(i)] = ['float', -32, 32]

#---------------------------------
# GA
#---------------------------------
ga = ES(mode='min',
        bounds=space,
        fit=ACKLEY,
        lambda_=50,
        mu=25,
        mutpb=0.15,
        alpha=0.5,
        cxmode='blend',
        cxpb=0.85,
        ncores=1,
        seed=1)
x_ga, y_ga, ga_hist = ga.evolute(ngen=150, verbose=0)

#---------------------------------
# GWO
#---------------------------------
gwo = GWO(mode='min', fit=ACKLEY, bounds=space, nwolves=50, ncores=1, seed=1)
x_gwo, y_gwo, gwo_hist = gwo.evolute(ngen=150, verbose=0)

#---------------------------------
# WOA
Exemplo n.º 8
0
          npop=sorted_res['npop'].iloc[0], 
          mu=int(sorted_res['frac'].iloc[0]*sorted_res['npop'].iloc[0]), 
          alpha_init=sorted_res['alpha_init'].iloc[0],
          alpha_end=1.0, 
          cxpb=sorted_res['cxpb'].iloc[0], 
          mutpb=sorted_res['mutpb'].iloc[0],
          c1=2.05, c2=2.05,
          alpha_backdoor=0.1, 
          seed=1)
x_pesa, y_pesa, pesa_hist=pesa.evolute(ngen=300, x0=x0, verbose=False)

#Run GA
#use same optimized hyperparameters from PESA (or repeat tuning again for GA)
ga=ES(mode='min', bounds=bounds, fit=Vessel, cxmode='cx2point',
      lambda_=sorted_res['npop'].iloc[0], 
      mu=int(sorted_res['frac'].iloc[0]*sorted_res['npop'].iloc[0]), 
      mutpb=sorted_res['mutpb'].iloc[0],
      cxpb=sorted_res['cxpb'].iloc[0], 
      ncores=1, seed=1)

#filter initial guess for GA
#lambda_=int(sorted_res['npop'].iloc[0])
#x0_ga=x0[:lambda_]
x_ga, y_ga, ga_hist=ga.evolute(ngen=300, x0=None, verbose=0)  #or use x0_ga if you like 
                                                              #(random guess seems to be better)

#*************************************************************
# Part VI: Post-processing
#*************************************************************
#plot results
plt.figure()
plt.plot(pesa_hist, label='PESA')