Ejemplo n.º 1
0
def run_exercise(func,
                 f_string,
                 interval,
                 plot_func=True,
                 seed=32,
                 epsilon=1e-5,
                 textpos=(3, 5)):
    all_fx_names = [
        'Brute Force', 'Dichotomous Search', 'Fibonacci Search',
        'Golden-Section Search', 'Quadratic Interpolation Method',
        'Cubic interpolation Method', 'Davies, Swann and Campey Algorithm',
        'Backtracking Line Search'
    ]

    np.random.seed(seed)  # forces repeatability

    # objects that log all info during minimization
    f_x = functionObj(func)
    f_x_DS = functionObj(func)
    f_x_FBS = functionObj(func)
    f_x_GSS = functionObj(func)
    f_x_QIM = functionObj(func)
    f_x_CIM = functionObj(func)
    f_x_DSC = functionObj(func)
    f_x_BLS = functionObj(func)
    all_fx = [
        f_x, f_x_DS, f_x_FBS, f_x_GSS, f_x_QIM, f_x_CIM, f_x_DSC, f_x_BLS
    ]

    # Brute Force
    start_time = time.process_time()
    min_brute = brute(f_x, (tuple(interval), ), full_output=True)
    brute_time = time.process_time() - start_time
    # Plot function if wanted
    if plot_func == True:
        x = np.linspace(interval[0], interval[1], 100)
        plt.plot(x, f_x(x, save_eval=False), label=f_string)
        plt.annotate('min_x: %.6f' % (min_brute[0][0]) + '\nmin_fx: %.6f' %
                     (min_brute[1]),
                     xy=textpos,
                     xycoords='axes pixels')
        plt.xlabel('$x$')
        plt.ylabel('$f(x)$')
        plt.legend()
        plt.show()

    timings = []
    # Minimizations
    timings.append(time.process_time())
    DichotomousSearch(f_x_DS,
                      epsilon=epsilon / 10,
                      interval=interval,
                      xtol=epsilon).find_min()
    timings.append(time.process_time())
    FibonacciSearch(f_x_FBS, interval=interval, xtol=epsilon).find_min()
    timings.append(time.process_time())

    GoldenSectionSearch(f_x_GSS, interval=interval, xtol=epsilon).find_min()
    timings.append(time.process_time())
    QuadraticInterpolationSearch(f_x_QIM, interval=interval,
                                 xtol=epsilon).find_min()
    timings.append(time.process_time())
    CubicInterpolation(f_x_CIM, interval=interval, xtol=epsilon).find_min()
    timings.append(time.process_time())
    DaviesSwannCampey(f_x_DSC,
                      x_0=np.random.uniform(interval[0], interval[1], size=1),
                      interval=interval,
                      xtol=epsilon).find_min()
    timings.append(time.process_time())

    BacktrackingLineSearch(f_x_BLS,
                           initial_x=np.random.uniform(interval[0],
                                                       interval[1],
                                                       size=1),
                           interval=interval,
                           xtol=epsilon).find_min()
    timings.append(time.process_time())

    timings = list(map(operator.sub, timings[1:], timings[:-1]))
    timings = [brute_time] + timings
    # Create dataframe
    methods = ['best_x', 'best_f', 'fevals', 'all_evals', 'all_x']
    dict_fx = {fx_name: {method: getattr(fx, method) for method in methods}\
               for fx_name, fx in zip(all_fx_names, all_fx)}
    df = pd.DataFrame(dict_fx).T
    df['best_f'] = df['best_f'].map(lambda x: x
                                    if not hasattr(x, '__iter__') else x[0])
    df['best_x'] = df['best_x'].map(lambda x: x
                                    if not hasattr(x, '__iter__') else x[0])
    df['all_evals'] = df['all_evals'].map(lambda x: np.array(x) if not hasattr(x[0], '__iter__') \
                                          else np.array(x).flatten())
    df['all_x'] = df['all_x'].map(lambda x: np.array(x) if not hasattr(x[0], '__iter__') \
                                  else np.array(x).flatten())
    df['run_time (s)'] = timings
    return df
Ejemplo n.º 2
0
from functions import functionObj
from models.optimizers import BacktrackingLineSearch
import autograd.numpy as np
import matplotlib.pyplot as plt

f_x = lambda x: x**2 - 4 * x + 4
f_x_obj = functionObj(f_x)

np.random.seed(42)

x_0 = np.random.randn(1) * 100

opt = BacktrackingLineSearch(f_x_obj, initial_x=x_0, alpha=0.01, beta=0.5)

x_min = opt.find_min()
print('X: %.9f \nF_x: %.9f' % (x_min, f_x_obj(x_min)))
print('Function evals: %d' % (f_x_obj.fevals - 1))

plt.plot(f_x_obj.all_evals, label='F(x)')
plt.legend()
plt.show()
Ejemplo n.º 3
0
import numpy as np
from functions import order4_polynomial, functionObj
from models.optimizers import InexactLineSearch, BacktrackingLineSearch

x_0 = np.array([-np.pi, np.pi])
d_0 = np.array([1.0, -1.1])
func = functionObj(order4_polynomial)
item_d_optimizer = InexactLineSearch(func, x_0, d_0)
backtracking_opt = BacktrackingLineSearch(func, x_0, d_0)
alpha_f, f0_f = item_d_optimizer._line_search()
alpha_b, f0_b = backtracking_opt._backtracking_line_search(func.grad(x_0))
print('Inexact Line Search Methods line search step:')
print(' - Fletcher solution\n   ' + 'alpha' + ': %.7f\n   ' % alpha_f + '.' +
      'f: %.7f' % f0_f)
print(' - Backtracking solution\n   ' + 'alpha' + ': %.7f\n   ' % alpha_b +
      '.' + 'f: %.7f' % f0_b)

func_f = functionObj(order4_polynomial)
func_b = functionObj(order4_polynomial)
item_d_optimizer = InexactLineSearch(func_f, x_0, d_0)
#backtracking_opt = BacktrackingLineSearch(func_b, x_0, d_0)
item_d_optimizer.find_min()
#backtracking_opt.find_min
print('Inexact Line Search Methods for minimization:')
print(' - Fletcher solution\n   ' + 'x_min' + ': %s\n   ' % func_f.best_x +
      '.' + 'f_min: %.7f' % func_f.best_f)
#print(' - Backtracking solution\n   '+ 'x_min'+': %s\n   '%func_b.best_x + '.' + 'f_min: %.7f'%func_b.best_f)
Ejemplo n.º 4
0
def run_exercise(func,
                 eqc,
                 iqc,
                 optimizers,
                 initial_x,
                 mu=20,
                 line_search=None,
                 seed=42,
                 epsilon=1e-6,
                 maxIter=1e3,
                 plot_charts=True):
    opt_names = []

    np.random.seed(seed)  # forces repeatability
    optimizers += [minimize]
    all_fx = [functionObj(func, eqc=eqc, iqc=iqc) for _ in optimizers]
    timings = []

    timings.append(time.process_time())
    for fx, opt in zip(all_fx, optimizers):
        if type(opt) is tuple:
            opt, line_search = opt
        opt_names += [
            opt.__name__ if line_search is None else opt.__name__ + ' + ' +
            line_search.__name__
        ]
        try:
            if line_search is not None:
                UnconstrainProblem(func=fx,
                                   x_0=initial_x,
                                   opt=opt,
                                   line_search_optimizer=line_search,
                                   xtol=epsilon,
                                   maxIter=maxIter).find_min()
            elif opt is minimize:
                x0 = initial_x
                while fx.niq / fx.smooth_log_constant > epsilon:
                    res = minimize(fun=fx, x0=x0)
                    if fx._has_eqc:
                        x0 = fx.best_z
                    else:
                        x0 = fx.best_x
                    fx.smooth_log_constant *= mu
                fx.grad_evals = res.njev + res.nhev
            else:
                UnconstrainProblem(func=fx,
                                   x_0=initial_x,
                                   opt=opt,
                                   xtol=epsilon,
                                   maxIter=maxIter).find_min()
        except Exception as e:
            print(opt.__name__ + " didn't converge. " + repr(e))
        line_search = None
        timings.append(time.process_time())

    timings = list(map(operator.sub, timings[1:], timings[:-1]))

    df = create_df(opt_names, all_fx, timings)

    if plot_charts == True:
        opt_name = opt.__name__
        _plot_charts(df, opt_name)

    return df
Ejemplo n.º 5
0
from autograd import grad
from functions import functionObj

f = lambda x: x**2 - 4 * x

func = functionObj(f)

print(func(-2), func.fevals)

print(func.grad(-2), func.fevals)

grad_func = grad(func)

print(grad_func(-2.0), func.fevals)