def __init__(self, of, maxeval, E, alfa0, N_general, N_local, correction): """ Initialization :param of: any objective function to be optimized :param maxeval: maximum allowed number of evaluations :param E: maximum number of epochs, i.e. maximum number of the random vector generation :param alfa0: the first dx is generated from range [-alfa0, alfa0] :param N_general: alfa doesn't change for N_general steps during global search :param N_local: alfa doesn't change for N_local steps during local search :param correction: correction for x values """ Heuristic.__init__(self, of, maxeval) self.k = 0 # alfa counter - it is in the article, but I don't use it self.epoch = 0 # epoch counter self.E = E self.N_general = N_general self.GSE = self.E - 2 * self.N_general # general search stop criterion, # number of epochs during general search self.N_local = N_local self.alfa = [alfa0] # we will save all alpha values self.x_curr = None # current value of x self.alfa_best = None # alpha connected with the best solution self.y_gen = np.zeros( self.E + 1, dtype=float ) # we save all y values - it is needed for alpha calculation # +1 because of the initial value self.correction = correction
def __init__(self, of, maxeval, N, CR, F): Heuristic.__init__(self, of, maxeval) assert N >= 4, 'N should be at least equal to 4' self.N = N self.CR = CR assert 0 <= F <= 2, 'F should be from [0; 2]' self.F = F
def __init__(self, of, maxeval, N, CR, F): Heuristic.__init__(self, of, maxeval) assert N >= 4, 'N should be at least equal to 4' self.N = N # Population size self.n = np.size(of.a) self.CR = CR # Crossover probability assert 0 <= F <= 2, 'F should be from [0; 2]' self.F = F # Differential weight self.name = 'DE'
def __init__(self, of, maxeval, N, M, Tsel1, Tsel2, mutation, crossover): Heuristic.__init__(self, of, maxeval) assert M > N, 'M should be larger than N' self.N = N # population size self.M = M # working population size self.Tsel1 = Tsel1 # first selection temperature self.Tsel2 = Tsel2 # second selection temperature self.mutation = mutation self.crossover = crossover
def __init__(self, of, maxeval, hmax=np.inf, random_descent=False): """ Initialization :param of: any objective function to be optimized :param maxeval: maximum allowed number of evaluations :param hmax: maximum number of local improvements (0 = Random Shooting) :param random_descent: turns on random descent, instead of the steepest one (default) """ Heuristic.__init__(self, of, maxeval) self.hmax = hmax self.random_descent = random_descent
def __init__(self, of, maxevalFsa, maxevalGo, N, M, Tsel1, Tsel2, mutation, crossover, T0, n0, alpha): Heuristic.__init__(self, of, maxevalGo) self.N = N self.M = M self.Tsel1 = Tsel1 self.Tsel2 = Tsel2 self.mutation = mutation self.crossover = crossover self.T0 = T0 self.n0 = n0 self.alpha = alpha self.geneLenth = np.size(self.of.a) self.maxevalFsa = maxevalFsa self.maxevalGo = maxevalGo
def __init__(self, of, maxeval, T0, n0, alpha, mutation): """ Initialization :param of: any objective function to be optimized :param maxeval: maximum allowed number of evaluations :param T0: initial temperature :param n0: cooling strategy parameter - number of steps :param alpha: cooling strategy parameter - exponent :param mutation: mutation to be used for the specific objective function (see heur_aux.py) """ Heuristic.__init__(self, of, maxeval) self.T0 = T0 self.n0 = n0 self.alpha = alpha self.mutation = mutation
def __init__(self, of, maxeval, N): Heuristic.__init__(self, of, maxeval) # assert M > N, 'M should be larger than N' self.N = N # population size self.P = int(N/2) # parents' size
def __init__(self, of, maxeval, N): Heuristic.__init__(self, of, maxeval) self.N = N # population size self.P = int(N / 2) # parents' size