Ejemplo n.º 1
0
    def __init__(self, fi, K, methods):
        self.fi = fi
        self.K = K
        self.methods = methods

        # A population of solution x points
        self.points = 10. * np.random.rand(self.K, self.fi.dim) - 5.
        # A population of solution y points
        self.values = np.zeros(self.K) + 1e10
        # A population of minimizers
        self.minimizers = [self._minimizer_make(i) for i in range(self.K)]
        # A population of iteration counters
        self.iters = np.zeros(self.K, dtype=np.int)

        self.total_steps = 0
        self.total_iters = 0
        self.data = SteppingData(self.fi)
Ejemplo n.º 2
0
def minimize_f(fi, method = None, wantrestarts = None):
    """
    Minimize the ``fi`` function instance.  Returns the number of minimization
    iterations performed.
    """
    f = fi.f
    n_restarts = -1
    n_iters = 0

    mm = MinimizeMethod(method, fi)
    mmdata = SteppingData(fi)

    # independent restarts until maxfunevals or ftarget is reached
    while not ((f.evaluations > 1 and f.fbest < f.ftarget)
               or f.evaluations > fi.maxfunevals):
        n_restarts += 1
        if n_restarts > 0:
            f.restart('independent restart')  # additional info
        maxfevals = fi.maxfunevals / (wantrestarts + 1)

        x0 = 10. * np.random.rand(fi.dim) - 5.

        class MMCallback:
            def __init__(self, fi, f, maxfevals, mm, data, n_iters):
                self.restarts = 0
                self.fi = fi
                self.f = f
                self.maxfevals = maxfevals
                self.basefevals = self.f.evaluations
                self.mm = mm
                self.data = data
                self.n_iters = n_iters
            def __call__(self, x):
                self.n_iters += 1
                y = self.fi.evalfun(x)
                self.data.record(0, self.mm.name, self.n_iters, y - self.fi.f.fopt, x)

                if y < self.f.ftarget:
                    raise MMCancel()
                elif self.f.evaluations - self.basefevals > self.maxfevals:
                    raise MMCancel()
                elif self.f.evaluations > self.fi.maxfunevals:
                    raise MMCancel()
        cb = MMCallback(fi, f, maxfevals, mm, mmdata, n_iters)

        try:
            warnings.simplefilter("ignore") # ignore warnings about unused/ignored options
            mm(f.evalfun, x0, inner_cb = cb)
        except MMCancel:
            pass # Ok.

        n_iters = cb.n_iters

    return n_restarts
Ejemplo n.º 3
0
    def __init__(self, fi, K, methods):
        self.fi = fi
        self.K = K
        self.methods = methods

        # A population of solution x points
        self.points = 10. * np.random.rand(self.K, self.fi.dim) - 5.
        # A population of solution y points
        self.values = np.zeros(self.K) + 1e10
        # A population of minimizers
        self.minimizers = [self._minimizer_make(i) for i in range(self.K)]
        # A population of iteration counters
        self.iters = np.zeros(self.K, dtype = np.int)

        self.total_steps = 0
        self.total_iters = 0
        self.data = SteppingData(self.fi)
Ejemplo n.º 4
0
class Population:
    """
    ``points`` contains the solution points of the population.
    ``minimizers`` contains the optimizer instances associated with these points.
    """
    def __init__(self, fi, K, methods):
        self.fi = fi
        self.K = K
        self.methods = methods

        # A population of solution x points
        self.points = 10. * np.random.rand(self.K, self.fi.dim) - 5.
        # A population of solution y points
        self.values = np.zeros(self.K) + 1e10
        # A population of minimizers
        self.minimizers = [self._minimizer_make(i) for i in range(self.K)]
        # A population of iteration counters
        self.iters = np.zeros(self.K, dtype = np.int)

        self.total_steps = 0
        self.total_iters = 0
        self.data = SteppingData(self.fi)

    def _minimizer_make(self, i):
        warnings.simplefilter("ignore") # ignore warnings about unused/ignored options
        return MinimizeStepping(self.fi.f.evalfun, self.points[i],
                self.methods[i % len(self.methods)])

    def step_one(self, i):
        """
        Perform a single minimization step with member i.
        Returns an (x,y) tuple.
        """
        for retry in [0,1]: # retry once if StopIteration
            try:
                # Step by a single iteration of the minimizer
                self.points[i] = self.minimizers[i].next()
                x = self.points[i]
                break
            except StopIteration:
                # Local optimum, pick a new random point
                x = self.points[i]
                self.restart_one(i)
                # We did no computation for [i] yet in this iteration
                # so make a step right away
                continue

        # Get the value at this point
        y = self.fi.evalfun(x)
        self.values[i] = y
        self.iters[i] += 1
        self.total_steps += 1
        self.data.record(i, self.minimizers[i].minmethod.name, self.iters[i], self.values[i] - self.fi.f.fopt, self.points[i])
        return (x, y)

    def restart_one(self, i):
        """
        Reinitialize a given population member.
        """
        self.points[i] = 10. * np.random.rand(self.fi.dim) - 5.
        self.values[i] = 1e10
        self.minimizers[i] = self._minimizer_make(i)
        self.iters[i] = 0

        #y = self.fi.f.evalfun(self.points[i]) # This is just for the debug print
        #print("#%d reached local optimum %s=%s" % (i, self.points[i], y))
        #time.sleep(1)

    def add(self):
        """
        Add another population member.
        """
        self.points = np.append(self.points, [10. * np.random.rand(self.fi.dim) - 5.], axis = 0)
        self.values = np.append(self.values, [1e10], axis = 0)
        i = len(self.points) - 1
        self.minimizers.append(self._minimizer_make(i))
        self.iters = np.append(self.iters, [0], axis = 0)

        #y = self.fi.f.evalfun(self.points[i]) # This is just for the debug print
        #print("#%d new member %s=%s" % (i, self.points[i], y))
        #time.sleep(1)
        return i


    def end_iter(self):
        """
        Notify the population that a single portfolio iteration has passed.
        This is useful in case we step multiple method instances within
        a single iteration (e.g. in MetaMax).
        """
        self.total_iters += 1
        self.data.end_iter()

    def stop(self):
        for m in self.minimizers:
            m.stop()
Ejemplo n.º 5
0
class Population:
    """
    ``points`` contains the solution points of the population.
    ``minimizers`` contains the optimizer instances associated with these points.
    """
    def __init__(self, fi, K, methods):
        self.fi = fi
        self.K = K
        self.methods = methods

        # A population of solution x points
        self.points = 10. * np.random.rand(self.K, self.fi.dim) - 5.
        # A population of solution y points
        self.values = np.zeros(self.K) + 1e10
        # A population of minimizers
        self.minimizers = [self._minimizer_make(i) for i in range(self.K)]
        # A population of iteration counters
        self.iters = np.zeros(self.K, dtype=np.int)

        self.total_steps = 0
        self.total_iters = 0
        self.data = SteppingData(self.fi)

    def _minimizer_make(self, i):
        warnings.simplefilter(
            "ignore")  # ignore warnings about unused/ignored options
        return MinimizeStepping(self.fi.f.evalfun, self.points[i],
                                self.methods[i % len(self.methods)])

    def step_one(self, i):
        """
        Perform a single minimization step with member i.
        Returns an (x,y) tuple.
        """
        for retry in [0, 1]:  # retry once if StopIteration
            try:
                # Step by a single iteration of the minimizer
                self.points[i] = self.minimizers[i].next()
                x = self.points[i]
                break
            except StopIteration:
                # Local optimum, pick a new random point
                x = self.points[i]
                self.restart_one(i)
                # We did no computation for [i] yet in this iteration
                # so make a step right away
                continue

        # Get the value at this point
        y = self.fi.evalfun(x)
        self.values[i] = y
        self.iters[i] += 1
        self.total_steps += 1
        self.data.record(i, self.minimizers[i].minmethod.name, self.iters[i],
                         self.values[i] - self.fi.f.fopt, self.points[i])
        return (x, y)

    def restart_one(self, i):
        """
        Reinitialize a given population member.
        """
        self.points[i] = 10. * np.random.rand(self.fi.dim) - 5.
        self.values[i] = 1e10
        self.minimizers[i] = self._minimizer_make(i)
        self.iters[i] = 0

        #y = self.fi.f.evalfun(self.points[i]) # This is just for the debug print
        #print("#%d reached local optimum %s=%s" % (i, self.points[i], y))
        #time.sleep(1)

    def add(self):
        """
        Add another population member.
        """
        self.points = np.append(self.points,
                                [10. * np.random.rand(self.fi.dim) - 5.],
                                axis=0)
        self.values = np.append(self.values, [1e10], axis=0)
        i = len(self.points) - 1
        self.minimizers.append(self._minimizer_make(i))
        self.iters = np.append(self.iters, [0], axis=0)

        #y = self.fi.f.evalfun(self.points[i]) # This is just for the debug print
        #print("#%d new member %s=%s" % (i, self.points[i], y))
        #time.sleep(1)
        return i

    def end_iter(self):
        """
        Notify the population that a single portfolio iteration has passed.
        This is useful in case we step multiple method instances within
        a single iteration (e.g. in MetaMax).
        """
        self.total_iters += 1
        self.data.end_iter()

    def stop(self):
        for m in self.minimizers:
            m.stop()