Пример #1
0
 def __init__(self, code, *args):
    self.code = code
    from pyec.util.registry import BENCHMARKS
    alpha, bm1, bm2 = code.split("_")
    self.alpha = float(alpha)
    self.bm1 = BENCHMARKS.load(bm1)
    self.bm2 = BENCHMARKS.load(bm2)
Пример #2
0
 def run(self, fitness=None, history=None, extraArgs=None, **kwargs):
    """
      Run this :class:`PopulationDistribution` instance to maximize a fitness function.
      
      After running this method, the property ``PopulationDistribution.trainer`` will contain the :class:`Trainer` object used to optimize the function. 
      
      :param fitness: The fitness function (objective) to be maximized. 
                      If ``None``, then the function will be looked up from 
                      ``pyec.util.registry.BENCHMARKS`` based on the 
                      ``function`` property of the :class:`Config`.
      :type fitness: any callable object with a single argument, or ``None``
      :param history: A history object to extend, or ``None`` to create
                      a new history from the class in the ``history``
                      property of the :class:`Config` object.
      :type history: :class:`History` or ``None``
      :param extraArgs: Any extra args to be passed to 
                        ``pyec.util.registry.BENCHMARKS``.
      :type extraArgs: list 
      :returns: The :class:`History for the object
    """
   
    # Set up the fitness if necessary
    if fitness is None:
       if extraArgs is None:
          extraArgs = []
       fitness = BENCHMARKS.load(self.config.function, *extraArgs)
       try:
          fitness.algorithm = self
          fitness.config = self.config
       except Exception:
          pass
       self.config.fitness = fitness
   
    # Set up history
    if history is None:
       history = self.config.history(self.config)
   
    if not self.compatible(history):
       name = history.__class__.__name__
       raise ValueError("Incompatible history class {0}".format(name))
  
    self.update(history, fitness)
      
    # get the sample
    pop = self.population(self.config.populationSize)
   
    # update the history
    self.history.update(pop, fitness, self.config.space)
   
    return self.history
Пример #3
0
 def __getitem__(self, pair):
    """Call ``update`` by splitting out the pair into a :class:`History` and
    a fitness function. Returns the optimizer for use as a continuation.
    
    :param pair: A tuple with the history and fitness.
    :type pair: A ``tuple`` of a :class:`History` and a callable object
    :returns: This optimizer (``self``)
    
    """
    history, fitness = pair
   
    # Set up history
    if history is None:
       history = self.config.history(self.config)
    
    if not self.compatible(history):
        err = ("Got an incompatible history in __getitem__; "
               "expected [history,fitness]")
        raise ValueError(err)
        
    # Set up the fitness if necessary
    if fitness is None:
        fitness = self.config.function
    
    if isinstance(fitness, basestring):
        fitness = BENCHMARKS.load(fitness)
        try:
            fitness.algorithm = self
            fitness.config = self.config
        except Exception:
            pass
        self.config.fitness = fitness    
  
    if not inspect.isfunction(fitness) and not hasattr(fitness, '__call__'):
        err = ("Second object in __getitem__ is not a function; "
               "expected [history,fitness]")
        raise ValueError(err)
  
    self.update(history, fitness)
    return self
Пример #4
0
def optimize(optimizer, func, dimension=5, population=25, generations=100,**kwargs):
   """
      Configure and run an optimizer on a function.
      
      By default the function will be minimize, but maximization can be performed by setting the keyword argument *minimize* to ``False``.
      
      Benchmark functions can be optimized by name. The following names are supported:
      
      - ackley -- A checker-board like oscillator, minimum is -13.37 in 5 dimensions.
      - ackley2 -- Exponentiated and centered version of ackley, minimum is 0 at 0.
      - griewank -- Oscillator with large scale, minimum at 0.
      - langerman -- Sparse, rough, multi-modal. Minimum is 0.98 in five dimensions. 
      - rosenbrock -- Standard benchmark.
      - rastrigin -- Oscillator. Minimum at 
      - salomon -- Ring oscillation. Minimum 0 at 0.
      - schwefel -- Deceptive multimodal function. Minimum is -418 on (-512,512).
      - shekel2 -- Shekel's foxholes, modified. Minimum is -10.4 in five dimensions. 
      - sphere -- A spherical paraboloid, minimum is 0 at 0
      - whitley -- Complex, fractal like shape with small relevant area. Minimum is 0.0.
      - weierstrass -- Everywhere continuous, nowhere differentiable; minimum is 0 at 0.
      
      
      :param optimizer: A :class:`PopulationDistribution` subclass
      :type optimizer: ``class``
      :param func: The function to be optimized, or a lookup key for a benchmark.
      :type func: any callable object or str
      :param dimension: The vector dimension in the search domain
      :type dimension: int
      :param population: The population size (sample size) for the optimizer.
      :type population: int
      :param generations: The number of populations to build (number of samples) during optimization.
      :type generations: int
      :returns: A tuple (best solution, best value) where the first element is the *best solution* located during optimization and *best value* is the value of the function at *best solution*.
      
      
      Keyword arguments:
      
      * minimize -- Whether to minimize the function, otherwise maximize; default is True.
      * initial -- A callable (no arguments) that returns random starting points for the initial distribution of the optimizer.
      * display -- Show progress information once every second.
      * constraint -- A :class:`Boundary` object implementing a constraint region (default is unconstrained). 
      
   """
   space = ("constraint" in kwargs and kwargs["constraint"]
            or Euclidean(dim=dimension))
   config = {
      "minimize":True,
      "space":space,
      "populationSize":population
   }
   config.update(kwargs)
   
   if isinstance(func, basestring):
      from pyec.util.registry import BENCHMARKS
      func = BENCHMARKS.load(func)
      #if config["minimize"]:
      #   h = func
      #   func = lambda x: -h(x)
         
   if config["minimize"]:
     optfunc = lambda x: -func(x)
   else:
     optfunc = func
   
   config = Config(**config)
   alg = (optimizer[config] << generations)()
   pop = alg[None, optfunc]()
   alg.history.update(pop, optfunc, space, alg)
   return alg.history.best()
Пример #5
0
def optimize(optimizer,
             func,
             dimension=5,
             population=25,
             generations=100,
             **kwargs):
    """
      Configure and run an optimizer on a function.
      
      By default the function will be minimize, but maximization can be performed by setting the keyword argument *minimize* to ``False``.
      
      Benchmark functions can be optimized by name. The following names are supported:
      
      - ackley -- A checker-board like oscillator, minimum is -13.37 in 5 dimensions.
      - ackley2 -- Exponentiated and centered version of ackley, minimum is 0 at 0.
      - griewank -- Oscillator with large scale, minimum at 0.
      - langerman -- Sparse, rough, multi-modal. Minimum is 0.98 in five dimensions. 
      - rosenbrock -- Standard benchmark.
      - rastrigin -- Oscillator. Minimum at 
      - salomon -- Ring oscillation. Minimum 0 at 0.
      - schwefel -- Deceptive multimodal function. Minimum is -418 on (-512,512).
      - shekel2 -- Shekel's foxholes, modified. Minimum is -10.4 in five dimensions. 
      - sphere -- A spherical paraboloid, minimum is 0 at 0
      - whitley -- Complex, fractal like shape with small relevant area. Minimum is 0.0.
      - weierstrass -- Everywhere continuous, nowhere differentiable; minimum is 0 at 0.
      
      
      :param optimizer: A :class:`PopulationDistribution` subclass
      :type optimizer: ``class``
      :param func: The function to be optimized, or a lookup key for a benchmark.
      :type func: any callable object or str
      :param dimension: The vector dimension in the search domain
      :type dimension: int
      :param population: The population size (sample size) for the optimizer.
      :type population: int
      :param generations: The number of populations to build (number of samples) during optimization.
      :type generations: int
      :returns: A tuple (best solution, best value) where the first element is the *best solution* located during optimization and *best value* is the value of the function at *best solution*.
      
      
      Keyword arguments:
      
      * minimize -- Whether to minimize the function, otherwise maximize; default is True.
      * initial -- A callable (no arguments) that returns random starting points for the initial distribution of the optimizer.
      * display -- Show progress information once every second.
      * constraint -- A :class:`Boundary` object implementing a constraint region (default is unconstrained). 
      
   """
    space = ("constraint" in kwargs and kwargs["constraint"]
             or Euclidean(dim=dimension))
    config = {"minimize": True, "space": space, "populationSize": population}
    config.update(kwargs)

    if isinstance(func, basestring):
        from pyec.util.registry import BENCHMARKS
        func = BENCHMARKS.load(func)
        #if config["minimize"]:
        #   h = func
        #   func = lambda x: -h(x)

    if config["minimize"]:
        optfunc = lambda x: -func(x)
    else:
        optfunc = func

    config = Config(**config)
    alg = (optimizer[config] << generations)()
    pop = alg[None, optfunc]()
    alg.history.update(pop, optfunc, space, alg)
    return alg.history.best()