def get_best(self, non_dom_leaves): """ Return the best row from all the non dominated leaves :param non_dom_leaves: :return: """ bests = [] evals = 0 for leaf in non_dom_leaves: east = leaf._pop[0] west = leaf._pop[-1] if not east.evaluated: east.evaluate(self.problem) evals += 1 if not west.evaluated: west.evaluate(self.problem) evals += 1 weights = self.problem.directional_weights() weighted_west = [c*w for c,w in zip(west.objectives, weights)] weighted_east = [c*w for c,w in zip(east.objectives, weights)] objs = self.problem.objectives west_loss = Algorithm.dominates_continuous(weighted_west, weighted_east, mins=[o.low for o in objs], maxs=[o.high for o in objs]) east_loss = Algorithm.dominates_continuous(weighted_east, weighted_west, mins=[o.low for o in objs], maxs=[o.high for o in objs]) if east_loss < west_loss: bests.append(east) else: bests.append(west) return bests, evals
def divide(self, threshold, abort = False): """ Recursively partition tree :param threshold: :return: """ def afew(pop): clones = [point.clone() for point in pop] return clones self._pop = self.fastmap(self.problem, self._pop) self.n = len(self._pop) n = len(self._pop) cut, _ = self.binary_chop(self._pop, n//2, None, 2*n ** 0.5, n) self.abort = abort if not abort and n >= threshold: # Splitting wests, easts = self.split(self._pop, cut) if self.west != self.east: if self.N > cut: little_n = cut else: little_n = self.N west_abort = False east_abort = False if not self.east.evaluated: self.east.evaluate(self.problem) if not self.west.evaluated: self.west.evaluate(self.problem) weights = self.problem.directional_weights() weighted_west = [c*w for c,w in zip(self.west.objectives, weights)] weighted_east = [c*w for c,w in zip(self.east.objectives, weights)] objs = self.problem.objectives west_loss = Algorithm.dominates_continuous( weighted_west, weighted_east, mins=[o.low for o in objs], maxs=[o.high for o in objs]) east_loss = Algorithm.dominates_continuous( weighted_east, weighted_west, mins=[o.low for o in objs], maxs=[o.high for o in objs]) EPSILON = 1.0 if west_loss < EPSILON * east_loss: east_abort = True if east_loss < EPSILON * west_loss: west_abort = True self.left = Node(self.problem, afew(wests), self.total_size, parent=self, level=self.level+1, n=little_n)\ .divide(threshold, abort=west_abort) self.right = Node(self.problem, afew(easts), self.total_size, parent=self, level=self.level+1, n=little_n)\ .divide(threshold, abort=east_abort) return self
def __init__(self, problem, **settings): """ Initialize GALE algorithm :param problem: Instance of the problem :param gens: Max number of generations """ Algorithm.__init__(self, GALE.__name__, problem) self.select = self._select self.evolve = self._evolve self.recombine = self._recombine self.settings = default_settings().update(**settings)
def get_best(self, non_dom_leaves): """ Return the best row from all the non dominated leaves :param non_dom_leaves: :return: """ bests = [] evals = 0 for leaf in non_dom_leaves: east = leaf._pop[0] west = leaf._pop[-1] if not east.evaluated: east.evaluate(self.problem) evals += 1 if not west.evaluated: west.evaluate(self.problem) evals += 1 weights = self.problem.directional_weights() weighted_west = [c * w for c, w in zip(west.objectives, weights)] weighted_east = [c * w for c, w in zip(east.objectives, weights)] objs = self.problem.objectives west_loss = Algorithm.dominates_continuous( weighted_west, weighted_east, mins=[o.low for o in objs], maxs=[o.high for o in objs]) east_loss = Algorithm.dominates_continuous( weighted_east, weighted_west, mins=[o.low for o in objs], maxs=[o.high for o in objs]) if east_loss < west_loss: bests.append(east) else: bests.append(west) return bests, evals
def _evolve(self, selected): evals = 0 GAMMA = self.settings.gamma for leaf in selected: #Poles east = leaf._pop[0] west = leaf._pop[-1] # Evaluate poles if required if not east.evaluated: east.evaluate(self.problem) evals += 1 if not west.evaluated: west.evaluate(self.problem) evals += 1 weights = self.problem.directional_weights() weighted_west = [c*w for c,w in zip(west.objectives, weights)] weighted_east = [c*w for c,w in zip(east.objectives, weights)] objs = self.problem.objectives west_loss = Algorithm.dominates_continuous(weighted_west, weighted_east, mins=[o.low for o in objs], maxs=[o.high for o in objs]) east_loss = Algorithm.dominates_continuous(weighted_east, weighted_west, mins=[o.low for o in objs], maxs=[o.high for o in objs]) # Determine better Pole if east_loss < west_loss: south_pole,north_pole = east,west else: south_pole,north_pole = west,east # Magnitude of the mutations g = abs(south_pole.x - north_pole.x) for row in leaf._pop: clone = row.clone() clone_x = row.x for dec_index in range(len(self.problem.decisions)): # Few naming shorthands me = row.decisions[dec_index] good = south_pole.decisions[dec_index] bad = north_pole.decisions[dec_index] dec = self.problem.decisions[dec_index] if me > good: d = -1 elif me < good: d = +1 else : d = 0 # Mutating towards the better solution row.decisions[dec_index] = min(dec.high, max(dec.low, me + me * g * d)) # Project the mutant a = row.dist(self.problem, north_pole, is_obj=False) b = row.dist(self.problem, south_pole, is_obj=False) x = (a**2 + row.c**2 - b**2) / (2*row.c+0.00001) row.x = x GALE.count += 1 if abs(x - clone_x) > (g * GAMMA) or not self.problem.check_constraints(row.decisions): GALE.unsatisfied += 1 row.decisions = clone.decisions row.x = clone_x pop = [] for leaf in selected: for row in leaf._pop: if row.evaluated: row.evaluate(self.problem) # Re-evaluating pop.append(row) return pop, evals
def __init__(self, problem, **settings): """ Initialize DE for Algorithm """ Algorithm.__init__(self, DE.__name__, problem) self.settings = default_settings().update(**settings)
def _evolve(self, selected): evals = 0 GAMMA = self.settings.gamma for leaf in selected: #Poles east = leaf._pop[0] west = leaf._pop[-1] # Evaluate poles if required if not east.evaluated: east.evaluate(self.problem) evals += 1 if not west.evaluated: west.evaluate(self.problem) evals += 1 weights = self.problem.directional_weights() weighted_west = [c * w for c, w in zip(west.objectives, weights)] weighted_east = [c * w for c, w in zip(east.objectives, weights)] objs = self.problem.objectives west_loss = Algorithm.dominates_continuous( weighted_west, weighted_east, mins=[o.low for o in objs], maxs=[o.high for o in objs]) east_loss = Algorithm.dominates_continuous( weighted_east, weighted_west, mins=[o.low for o in objs], maxs=[o.high for o in objs]) # Determine better Pole if east_loss < west_loss: south_pole, north_pole = east, west else: south_pole, north_pole = west, east # Magnitude of the mutations g = abs(south_pole.x - north_pole.x) for row in leaf._pop: clone = row.clone() clone_x = row.x for dec_index in range(len(self.problem.decisions)): # Few naming shorthands me = row.decisions[dec_index] good = south_pole.decisions[dec_index] bad = north_pole.decisions[dec_index] dec = self.problem.decisions[dec_index] if me > good: d = -1 elif me < good: d = +1 else: d = 0 # Mutating towards the better solution row.decisions[dec_index] = min( dec.high, max(dec.low, me + me * g * d)) # Project the mutant a = row.dist(self.problem, north_pole, is_obj=False) b = row.dist(self.problem, south_pole, is_obj=False) x = (a**2 + row.c**2 - b**2) / (2 * row.c + 0.00001) row.x = x if abs(x - clone_x) > ( g * GAMMA) or not self.problem.check_constraints(row): row.decisions = clone.decisions row.x = clone_x pop = [] for leaf in selected: for row in leaf._pop: if row.evaluated: row.evaluate(self.problem) # Re-evaluating pop.append(row) return pop, evals
def __init__(self, problem, **settings): Algorithm.__init__(self, 'GALE', problem) self.select = self._select self.evolve = self._evolve self.recombine = self._recombine self.settings = default_settings().update(**settings)
def divide(self, threshold, abort=False): """ Recursively partition tree :param threshold: :return: """ def afew(pop): clones = [point.clone() for point in pop] return clones self._pop = self.fastmap(self.problem, self._pop) self.n = len(self._pop) n = len(self._pop) cut, _ = self.binary_chop(self._pop, n // 2, None, 2 * n**0.5, n) self.abort = abort if not abort and n >= threshold: # Splitting wests, easts = self.split(self._pop, cut) if self.west != self.east: if self.N > cut: little_n = cut else: little_n = self.N west_abort = False east_abort = False if not self.east.evaluated: self.east.evaluate(self.problem) if not self.west.evaluated: self.west.evaluate(self.problem) weights = self.problem.directional_weights() weighted_west = [ c * w for c, w in zip(self.west.objectives, weights) ] weighted_east = [ c * w for c, w in zip(self.east.objectives, weights) ] objs = self.problem.objectives west_loss = Algorithm.dominates_continuous( weighted_west, weighted_east, mins=[o.low for o in objs], maxs=[o.high for o in objs]) east_loss = Algorithm.dominates_continuous( weighted_east, weighted_west, mins=[o.low for o in objs], maxs=[o.high for o in objs]) EPSILON = 1.0 if west_loss < EPSILON * east_loss: east_abort = True if east_loss < EPSILON * west_loss: west_abort = True self.left = Node(self.problem, afew(wests), self.total_size, parent=self, level=self.level+1, n=little_n)\ .divide(threshold, abort=west_abort) self.right = Node(self.problem, afew(easts), self.total_size, parent=self, level=self.level+1, n=little_n)\ .divide(threshold, abort=east_abort) return self