Exemplo n.º 1
0
def test_fmin_lbfgs():
    def f(x, g, *args):
        g[0] = 2 * x
        return x ** 2

    xmin = fmin_lbfgs(f, 100., line_search='armijo')
    assert_array_equal(xmin, [0])

    xmin = fmin_lbfgs(f, 100., line_search='strongwolfe')
    assert_array_equal(xmin, [0])
Exemplo n.º 2
0
def test_fmin_lbfgs():
    def f(x, g, *args):
        g = [2 * x]
        for (i, e) in enumerate(res):
            g[i] = e;
        return x ** 2

    xmin = fmin_lbfgs(f, 100., line_search='armijo')
    assert_array_equal(xmin, [0])

    xmin = fmin_lbfgs(f, 100., line_search='strongwolfe')
    assert_array_equal(xmin, [0])
Exemplo n.º 3
0
    def test_owl_line_search_warning_explicit(self):
        def f(x, g, *args):
            g[0] = 2 * x
            return x ** 2

        with pytest.warns(UserWarning, match="OWL-QN"):
            xmin = fmin_lbfgs(f, 100., orthantwise_c=1, line_search='default')
        with pytest.warns(UserWarning, match="OWL-QN"):            
            xmin = fmin_lbfgs(f, 100., orthantwise_c=1, line_search='morethuente')
        with pytest.warns(UserWarning, match="OWL-QN"):   
            xmin = fmin_lbfgs(f, 100., orthantwise_c=1, line_search='armijo')
        with pytest.warns(UserWarning, match="OWL-QN"):        
            xmin = fmin_lbfgs(f, 100., orthantwise_c=1, line_search='strongwolfe')
Exemplo n.º 4
0
def test_fmin_lbfgs():
    def f(x, g, *args):
        g[0] = 2 * x
        return x ** 2

    xmin = fmin_lbfgs(f, 100.)
    assert_array_equal(xmin, [0])
Exemplo n.º 5
0
def test_2d():
    def f(x, g, f_calls):
        #f_calls, = args
        assert_equal(x.shape, (2, 2))
        assert_equal(g.shape, x.shape)
        g[:] = 2 * x
        f_calls[0] += 1
        return (x ** 2).sum()

    def progress(x, g, fx, xnorm, gnorm, step, k, ls, *args):
        assert_equal(x.shape, (2, 2))
        assert_equal(g.shape, x.shape)

        assert_equal(np.sqrt((x ** 2).sum()), xnorm)
        assert_equal(np.sqrt((g ** 2).sum()), gnorm)

        p_calls[0] += 1
        return 0

    f_calls = [0]
    p_calls = [0]

    xmin = fmin_lbfgs(f, [[10., 100.], [44., 55.]], progress, args=[f_calls])
    assert_greater(f_calls[0], 0)
    assert_greater(p_calls[0], 0)
    assert_array_almost_equal(xmin, [[0, 0], [0, 0]])
Exemplo n.º 6
0
def test_2d():
    def f(x, g, f_calls):
        #f_calls, = args
        assert x.shape == (2, 2)
        assert g.shape == x.shape
        g[:] = 2 * x
        f_calls[0] += 1
        return (x ** 2).sum()

    def progress(x, g, fx, xnorm, gnorm, step, k, ls, *args):
        assert x.shape == (2, 2)
        assert g.shape == x.shape

        assert np.sqrt((x ** 2).sum()) == xnorm
        assert np.sqrt((g ** 2).sum()) == gnorm

        p_calls[0] += 1
        return 0

    f_calls = [0]
    p_calls = [0]

    xmin = fmin_lbfgs(f, [[10., 100.], [44., 55.]], progress, args=[f_calls])
    assert f_calls[0] > 0
    assert p_calls[0] > 0
    assert_array_almost_equal(xmin, [[0, 0], [0, 0]])
Exemplo n.º 7
0
    def test_owl_line_search_default(self):
        def f(x, g, *args):
            g[0] = 2 * x
            return x ** 2

        with pytest.warns(UserWarning, match="OWL-QN"):
            xmin = fmin_lbfgs(f, 100., orthantwise_c=1)
Exemplo n.º 8
0
    def test_owl_qn(self):
        def f(x, g, *args):
            g[0] = 2 * x
            return x ** 2

        xmin = fmin_lbfgs(f, 100., orthantwise_c=1, line_search='wolfe')
        assert_array_equal(xmin, [0])
Exemplo n.º 9
0
def lbfgs_modified_logistic_regression(X, y, b=None):
    """Same as modified LR, but solved using lbfgs."""
    X, theta, N, M = prepend_and_vars(X)

    if b is None:
        fix_b, b = False, DEFAULT_B
    else:
        fix_b, b = True, b

    def f(w, g, X, y):
        """Accepts x, and g.  Returns value at x, and gradient at g.
        """
        b = w[0]
        theta = w[1:]
        value = np.sum(np.abs(y - (1.0 / (1.0 + (b ** 2) + X.dot(theta)))))
        # now fill in the g

        ewx = np.exp(-X.dot(theta))
        b2ewx = (b * b) + ewx
        p = ((y - 1.0) / b2ewx) + (1.0 / (1.0 + b2ewx))
        
        dLdw = (p * ewx).reshape((X.shape[0], 1)) * X

        if not fix_b:
            w[0] = np.sum(-2 * b * p)
        w[1:] = np.sum(dLdw, axis=0)
        return value
    import lbfgs
    w = np.hstack([np.array([b,]), theta])
    answer = lbfgs.fmin_lbfgs(f, w, args=(X, y,))
    theta, b = answer[1:], answer[0]
    return theta, b
Exemplo n.º 10
0
    def test_owl_wolfe_no_warning(self):
        """ This test is an attempt to show that wolfe throws no warnings.
        """

        def f(x, g, *args):
            g[0] = 2 * x
            return x ** 2

        with pytest.warns(UserWarning, match="OWL-QN"):
            xmin = fmin_lbfgs(f, 100., orthantwise_c=1, line_search='wolfe')
Exemplo n.º 11
0
def test_input_validation():
    with pytest.raises(TypeError):
        fmin_lbfgs([], 1e4)
    with pytest.raises(TypeError):
        fmin_lbfgs(lambda x: x, 1e4, "ham")
    with pytest.raises(TypeError):
        fmin_lbfgs(lambda x: x, "spam")
Exemplo n.º 12
0
"""Trivial example: minimize x**2 from any start value"""

import lbfgs
import sys


def f(x, g):
    """Returns x**2 and stores its gradient in g[0]"""
    x = x[0]
    g[0] = 2*x
    return x**2


def progress(x, g, f_x, xnorm, gnorm, step, k, ls):
    """Report optimization progress."""
    print("x = %8.2g     f(x) = %8.2g     f'(x) = %8.2g" % (x, f_x, g))


try:
    x0 = float(sys.argv[1])
except IndexError:
    print("usage: python %s start-value" % sys.argv[0])
    sys.exit(1)

print("Minimum found: %f" % lbfgs.fmin_lbfgs(f, x0, progress)[0])
Exemplo n.º 13
0
"""Trivial example: minimize x**2 from any start value"""

import lbfgs
import sys


from scipy.optimize import minimize, rosen, rosen_der
import numpy as np

x0 = np.array([1.3, 0.7])

def f(x, g):
    g[:] = rosen_der(x)
    print "one call"
    return rosen(x)


def progress(x, g, f_x, xnorm, gnorm, step, k, ls):
    """Report optimization progress."""
    #print("x = %8.2g     f(x) = %8.2g     f'(x) = %8.2g" % (x, f_x, g))
    pass


print("Minimum found", lbfgs.fmin_lbfgs(f, x0, progress))
Exemplo n.º 14
0
"""Trivial example: minimize x**2 from any start value"""

import lbfgs
import numpy as np
import sys

def f(x, g):
    """Returns x**2 and stores its gradient in g[0]"""
    x = x[0]
    g[0] = 2*x
    return x**2

x0 = np.asarray([float(sys.argv[1])])
print lbfgs.fmin_lbfgs(f, x0)[0]