Exemple #1
0
def test_est_1d_2():
    u = Unknown('u', ldim=1)
    phi = Constant('phi')

    # ... define a partial differential operator as a lambda function
    from sympy.abc import x

    L = lambda u: x*dx(u) + phi*u
    L_expected = lambda u: x*dx(u) + 2.*u
    # ...

    # ... symbolic functions for unknown and rhs
    from sympy.abc import x
    from sympy import sin, cos

    u_sym = sin(x)
    f_sym = L_expected(u_sym)
    # ...

    # ... lambdification + evaluation
    from numpy import linspace, pi

    u_num = lambdify((x), u_sym, "numpy")
    f_num = lambdify((x), f_sym, "numpy")

    x_u = linspace(0, 2*pi, 10)
    x_f = x_u

    us = u_num(x_u)
    fs = f_num(x_f)
    # ...

    # compute the likelihood
    nlml = NLML(L(u), u, 'RBF')

    # set values
    nlml.set_u(x_u, us)
    nlml.set_f(x_f, fs)

    from numpy.random import rand
    from numpy import exp, ones
    from time import time

    # ... using pure python implementation
    from symgp.nelder_mead import nelder_mead

    x_start = rand(len(nlml.args))

    tb = time()
    m = nelder_mead(nlml, x_start,
                    step=0.1, no_improve_thr=10e-6, no_improv_break=10,
                    max_iter=0, alpha=1., gamma=2., rho=-0.5, sigma=0.5,
                    verbose=False)
    te = time()
    elapsed_python = te-tb

    args = exp(m[0])
    print('> estimated phi = ', nlml.map_args(args)['phi'])

    print('> elapsed time python = ', elapsed_python)
Exemple #2
0
def test_kernel_2d_9():
    beta = Constant('beta')
    alpha = Constant('alpha')
    mu = Constant('mu')
    L = beta*dx(dx(u)) + alpha*dx(u) + mu*u

    # ...
    expected = alpha*Derivative(k, xi) + beta*Derivative(k, xi, xi) + mu*k(xi, yi)
    assert(evaluate(L, u, Kernel('K'), (Tuple(xi, yi),)) == expected)
    # ...

    # ...
    expected = alpha*Derivative(k, xj) + beta*Derivative(k, xj, xj) + mu*k(xj, yj)
    assert(evaluate(L, u, Kernel('K'), (Tuple(xj, yj),)) == expected)
    # ...

    # ...
    expected = (alpha**2*Derivative(k, xi, xj) +
                alpha*beta*Derivative(k, xi, xi, xj) +
                alpha*beta*Derivative(k, xi, xj, xj) +
                alpha*mu*Derivative(k, xi) +
                alpha*mu*Derivative(k, xj) +
                beta**2*Derivative(k, xi, xi, xj, xj) +
                beta*mu*Derivative(k, xi, xi) +
                beta*mu*Derivative(k, xj, xj) +
                mu**2*k(xi, yi, xj, yj))
    assert(evaluate(L, u, Kernel('K'), (Tuple(xi,yi), Tuple(xj,yj))) == expected)
Exemple #3
0
def test_kernel_2d_5():
    L = dx(dx(u))

    # ...
    expected = Derivative(k, xi, xi)
    assert(evaluate(L, u, Kernel('K'), (Tuple(xi, yi),)) == expected)
    # ...

    # ...
    expected = Derivative(k, xj, xj)
    assert(evaluate(L, u, Kernel('K'), (Tuple(xj, yj),)) == expected)
    # ...

    # ...
    expected = Derivative(k, xi, xi, xj, xj)
    assert(evaluate(L, u, Kernel('K'), (Tuple(xi,yi), Tuple(xj,yj))) == expected)
Exemple #4
0
def test_kernel_2d_11():
    phi = Constant('phi')
    L = phi * u + dx(u) + dy(u) + dz(dz(u))

    # ...
    expected = phi * k(xi, yi, zi) + Derivative(k, xi) + Derivative(
        k, yi) + Derivative(k, zi, zi)
    assert (evaluate(L, u, Kernel('K'), (Tuple(xi, yi, zi), )) == expected)
    # ...

    # ...
    expected = phi * k(xj, yj, zj) + Derivative(k, xj) + Derivative(
        k, yj) + Derivative(k, zj, zj)
    assert (evaluate(L, u, Kernel('K'), (Tuple(xj, yj, zj), )) == expected)
    # ...

    # ...
    expected = (phi**2 * k(xi, yi, zi, xj, yj, zj) + phi * Derivative(k, xi) +
                phi * Derivative(k, xj) + phi * Derivative(k, yi) +
                phi * Derivative(k, yj) + phi * Derivative(k, zi, zi) +
                phi * Derivative(k, zj, zj) + Derivative(k, xi, xj) +
                Derivative(k, xi, yj) + Derivative(k, yi, xj) +
                Derivative(k, yi, yj) + Derivative(k, xi, zj, zj) +
                Derivative(k, yi, zj, zj) + Derivative(k, zi, zi, xj) +
                Derivative(k, zi, zi, yj) + Derivative(k, zi, zi, zj, zj))
    assert (evaluate(L, u, Kernel('K'),
                     (Tuple(xi, yi, zi), Tuple(xj, yj, zj))) == expected)
Exemple #5
0
def test_kernel_1d_2():
    L = dx(u) + alpha*u

    # ...
    K = evaluate(L, u, Kernel('K'), xi)
    K = update_kernel(K, RBF, (xi, xj))

    expected = theta_1*(1.0*alpha - 1.0*xi + 1.0*xj)*exp(-0.5*(xi - xj)**2)
    assert(K == expected)
    # ...

    # ...
    K = evaluate(L, u, Kernel('K'), xj)
    K = update_kernel(K, RBF, (xi, xj))

    expected = theta_1*(1.0*alpha + 1.0*xi - 1.0*xj)*exp(-0.5*(xi - xj)**2)
    assert(K == expected)
    # ...

    # ...
    K = evaluate(L, u, Kernel('K'), (xi, xj))
    K = update_kernel(K, RBF, (xi, xj))

    expected = theta_1*(alpha**2 - 1.0*(xi - xj)**2 + 1.0)*exp(-0.5*(xi - xj)**2)
    assert(K == expected)
Exemple #6
0
def test_kernel_1d_2():
    L = dx(dx(u))

    # ...
    expected = Derivative(k, xi, xi)
    assert (evaluate(L, u, Kernel('K'), xi) == expected)
    # ...

    # ...
    expected = Derivative(k, xj, xj)
    assert (evaluate(L, u, Kernel('K'), xj) == expected)
    # ...

    # ...
    expected = Derivative(k, xi, xi, xj, xj)
    assert (evaluate(L, u, Kernel('K'), (xi, xj)) == expected)
Exemple #7
0
def test_kernel_1d_4():
    L = dx(dx(u)) + dx(u) + u

    # ...
    expected = k(xi) + Derivative(k, xi) + Derivative(k, xi, xi)
    assert (evaluate(L, u, Kernel('K'), xi) == expected)
    # ...

    # ...
    expected = k(xj) + Derivative(k, xj) + Derivative(k, xj, xj)
    assert (evaluate(L, u, Kernel('K'), xj) == expected)
    # ...

    # ...
    expected = (k(xi, xj) + Derivative(k, xi) + Derivative(k, xj) +
                Derivative(k, xi, xi) + Derivative(k, xi, xj) +
                Derivative(k, xj, xj) + Derivative(k, xi, xi, xj) +
                Derivative(k, xi, xj, xj) + Derivative(k, xi, xi, xj, xj))
    assert (evaluate(L, u, Kernel('K'), (xi, xj)) == expected)
Exemple #8
0
def test_kernel_2d_10():
    L = dx(dx(u)) + dy(dy(u)) + dz(dz(u))

    # ...
    expected = Derivative(k, xi, xi) + Derivative(k, yi, yi) + Derivative(
        k, zi, zi)
    assert (evaluate(L, u, Kernel('K'), (Tuple(xi, yi, zi), )) == expected)
    # ...

    # ...
    expected = Derivative(k, xj, xj) + Derivative(k, yj, yj) + Derivative(
        k, zj, zj)
    assert (evaluate(L, u, Kernel('K'), (Tuple(xj, yj, zj), )) == expected)
    # ...

    # ...
    expected = (Derivative(k, xi, xi, xj, xj) + Derivative(k, xi, xi, yj, yj) +
                Derivative(k, xi, xi, zj, zj) + Derivative(k, yi, yi, xj, xj) +
                Derivative(k, yi, yi, yj, yj) + Derivative(k, yi, yi, zj, zj) +
                Derivative(k, zi, zi, xj, xj) + Derivative(k, zi, zi, yj, yj) +
                Derivative(k, zi, zi, zj, zj))
    assert (evaluate(L, u, Kernel('K'),
                     (Tuple(xi, yi, zi), Tuple(xj, yj, zj))) == expected)
Exemple #9
0
def test_kernel_3d_2():
    L = phi * u + dx(u) + dy(u) + dz(dz(u))

    # ...
    K = evaluate(L, u, Kernel('K'), (Tuple(xi, yi, zi)))
    K = update_kernel(K, RBF, ((xi, yi, zi), (xj, yj, zj)))

    expected = theta_1 * theta_2 * theta_3 * (
        phi**3 + 1.0 * phi**2 * (-xi + xj) + 1.0 * phi**2 *
        (-yi + yj) + 1.0 * phi**2 * (-zi + zj) + 1.0 * phi * (xi - xj) *
        (yi - yj) + 1.0 * phi * (xi - xj) * (zi - zj) + 1.0 * phi * (yi - yj) *
        (zi - zj) - 1.0 * (xi - xj) * (yi - yj) *
        (zi - zj)) * exp(-0.5 *
                         (xi - xj)**2) * exp(-0.5 *
                                             (yi - yj)**2) * exp(-0.5 *
                                                                 (zi - zj)**2)
    assert (simplify(K - expected) == 0)
    # ...

    # ...
    K = evaluate(L, u, Kernel('K'), (Tuple(xj, yj, zj)))
    K = update_kernel(K, RBF, ((xi, yi, zi), (xj, yj, zj)))

    expected = theta_1 * theta_2 * theta_3 * (
        phi**3 + 1.0 * phi**2 * (xi - xj) + 1.0 * phi**2 *
        (yi - yj) + 1.0 * phi**2 * (zi - zj) + 1.0 * phi * (xi - xj) *
        (yi - yj) + 1.0 * phi * (xi - xj) * (zi - zj) + 1.0 * phi * (yi - yj) *
        (zi - zj) + 1.0 * (xi - xj) * (yi - yj) *
        (zi - zj)) * exp(-0.5 *
                         (xi - xj)**2) * exp(-0.5 *
                                             (yi - yj)**2) * exp(-0.5 *
                                                                 (zi - zj)**2)
    assert (simplify(K - expected) == 0)
    # ...

    # ...
    K = evaluate(L, u, Kernel('K'), (Tuple(xi, yi, zi), Tuple(xj, yj, zj)))
    K = update_kernel(K, RBF, ((xi, yi, zi), (xj, yj, zj)))

    expected = theta_1 * theta_2 * theta_3 * (
        phi**2 + 2.0 * phi * ((zi - zj)**2 - 1) - 1.0 * (xi - xj)**2 - 2.0 *
        (xi - xj) * (yi - yj) - 1.0 * (yi - yj)**2 + 1.0 * (zi - zj)**4 - 6.0 *
        (zi - zj)**2 + 5.0) * exp(-0.5 * (xi - xj)**2) * exp(
            -0.5 * (yi - yj)**2) * exp(-0.5 * (zi - zj)**2)
    assert (simplify(K - expected) == 0)
Exemple #10
0
def test_kernel_1d_5():
    alpha = Constant('alpha')
    L = dx(u) + alpha * u

    # ...
    expected = alpha * k(xi) + Derivative(k, xi)
    assert (evaluate(L, u, Kernel('K'), xi) == expected)
    # ...

    # ...
    expected = alpha * k(xj) + Derivative(k, xj)
    assert (evaluate(L, u, Kernel('K'), xj) == expected)
    # ...

    # ...
    expected = (alpha**2 * k(xi, xj) + alpha * Derivative(k, xi) +
                alpha * Derivative(k, xj) + Derivative(k, xi, xj))
    assert (evaluate(L, u, Kernel('K'), (xi, xj)) == expected)
Exemple #11
0
def test_kernel_2d_2():
    L = phi * u + dx(u) + dy(dy(u))

    # ...
    K = evaluate(L, u, Kernel('K'), (Tuple(xi, yi)))
    K = update_kernel(K, RBF, ((xi, yi), (xj, yj)))

    expected = theta_1 * theta_2 * (phi**2 - 1.0 * phi *
                                    (xi - xj) - 1.0 * phi * (yi - yj) + 1.0 *
                                    (xi - xj) * (yi - yj)) * exp(
                                        -0.5 *
                                        (xi - xj)**2) * exp(-0.5 *
                                                            (yi - yj)**2)
    assert (simplify(K - expected) == 0)
    # ...

    # ...
    K = evaluate(L, u, Kernel('K'), (Tuple(xj, yj)))
    K = update_kernel(K, RBF, ((xi, yi), (xj, yj)))

    expected = theta_1 * theta_2 * (phi**2 + 1.0 * phi *
                                    (xi - xj) + 1.0 * phi * (yi - yj) + 1.0 *
                                    (xi - xj) * (yi - yj)) * exp(
                                        -0.5 *
                                        (xi - xj)**2) * exp(-0.5 *
                                                            (yi - yj)**2)
    assert (simplify(K - expected) == 0)
    # ...

    # ...
    K = evaluate(L, u, Kernel('K'), (Tuple(xi, yi), Tuple(xj, yj)))
    K = update_kernel(K, RBF, ((xi, yi), (xj, yj)))

    expected = theta_1 * theta_2 * (phi**2 + 2.0 * phi *
                                    ((yi - yj)**2 - 1) - 1.0 *
                                    (xi - xj)**2 + 1.0 * (yi - yj)**4 - 6.0 *
                                    (yi - yj)**2 + 4.0) * exp(
                                        -0.5 *
                                        (xi - xj)**2) * exp(-0.5 *
                                                            (yi - yj)**2)
    assert (simplify(K - expected) == 0)
Exemple #12
0
def test_est_1d_7():
    """Explicit time step for Burgers"""

    u = Unknown('u', ldim=1)
    nu = Constant('nu')

    # ... define a partial differential operator as a lambda function
    from sympy.abc import x
    from sympy import sin, cos

    Dt = 0.0010995574287564279
    nu_expected = 0.07

    from numpy import genfromtxt
    xn = genfromtxt('x.txt')
    unew = genfromtxt('unew.txt')
    un = genfromtxt('un.txt')

    from scipy.interpolate import interp1d
    unew = interp1d(xn, unew)
    un = interp1d(xn, un)

    fn = Function('fn')

    L = lambda u: u + Dt*fn(x)*dx(u) + nu*Dt*dx(dx(u))
    # ...

    # ... lambdification + evaluation
    from numpy import linspace, pi
    from numpy.random import rand

#    x_u = linspace(0, 2*pi, 50)

    x_u = rand(50) * 2*pi

    x_f = x_u

    us = un(x_u)
    fs = unew(x_f)
    # ...

    from numpy.random import rand
    from numpy import exp, ones, log
    from time import time
    from scipy.optimize import minimize

    # compute the likelihood
    nlml = NLML(L(u), u, 'SE')

    # set values
    nlml.set_u(x_u, us)
    nlml.set_f(x_f, fs)

    x_start = rand(len(nlml.args))

    # ... using pure python implementation
    from symgp.nelder_mead import nelder_mead

    x_start = rand(len(nlml.args))

    m = nelder_mead(nlml, x_start,
                    step=1., no_improve_thr=1e-5, no_improv_break=6,
                    max_iter=0, alpha=1., gamma=1.5, rho=-0.5, sigma=.5,
                    verbose=False)

    args = exp(m[0])
    print('> estimated nu = ', nlml.map_args(args)['nu'])
Exemple #13
0
def test_est_1d_6():
    """Explicit time step for Burgers"""

    u = Unknown('u', ldim=1)
    nu = Constant('nu')

    # ... define a partial differential operator as a lambda function
    from sympy.abc import x
    from sympy import sin, cos

    Dt = 0.0010995574287564279
    nu_expected = 0.07

    from numpy import genfromtxt
    xn = genfromtxt('x.txt')
    unew = genfromtxt('unew.txt')
    un = genfromtxt('un.txt')

    from scipy.interpolate import interp1d
    unew = interp1d(xn, unew)
    un = interp1d(xn, un)

    fn = Function('fn')

    L = lambda u: u + Dt*fn(x)*dx(u) + nu*Dt*dx(dx(u))
    # ...

    # ... lambdification + evaluation
    from numpy import linspace, pi
    from numpy.random import rand

#    x_u = linspace(0, 2*pi, 30)

    x_u = rand(100) * 2*pi

    x_f = x_u

    us = un(x_u)
    fs = unew(x_f)
    # ...

    from numpy.random import rand
    from numpy import exp, ones, log
    from time import time
    from scipy.optimize import minimize
    from tabulate import tabulate

    # ...
    def solve(kernel):
        print('>>>> using : ', kernel)

        # compute the likelihood
        nlml = NLML(L(u), u, kernel)

        # set values
        nlml.set_u(x_u, us)
        nlml.set_f(x_f, fs)

        x_start = rand(len(nlml.args))

#        methods = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'L-BFGS-B', 'TNC', 'COBYLA']
#        methods = ['Nelder-Mead', 'Powell', 'CG']
        methods = ['Nelder-Mead']
        phis = []
        for method in methods:
            print('> {} method in progress ... '.format(method))
            m = minimize(nlml, x_start, method=method, jac=False)

            args = exp(m.x)
            phi_h = nlml.map_args(args)['nu']
            print('> estimated phi = ', phi_h)
            phis.append(phi_h)

        print(tabulate([phis], headers=methods))
    # ...

#    for kernel in ['RBF', 'SE', 'GammaSE', 'RQ', 'Linear', 'Periodic']:
#    for kernel in ['RBF', 'SE']:
    for kernel in ['SE']:
        solve(kernel)
Exemple #14
0
def test_est_2d_2():
    u = Unknown('u', ldim=2)
    phi = Constant('phi')

    # ... define a partial differential operator as a lambda function
    L = lambda u: phi * u + dx(u) + dy(dy(u))
    L_expected = lambda u: 2. * u + dx(u) + dy(dy(u))
    # ...

    # ... symbolic functions for unknown and rhs
    from sympy.abc import x, y
    from sympy import sin, cos

    u_sym = x**2 + y
    f_sym = L_expected(u_sym)
    # ...

    # ... lambdification + evaluation
    from numpy import linspace, meshgrid, zeros
    from numpy.random import rand

    u_num = lambdify((x, y), u_sym, "numpy")
    f_num = lambdify((x, y), f_sym, "numpy")

    x_u = rand(50, 2)
    x_f = x_u

    us = u_num(x_u[:, 0], x_u[:, 1])
    fs = f_num(x_f[:, 0], x_f[:, 1])
    # ...

    #    eps = 1.e-6
    eps = 1.e-5
    #    eps = 1.e-4

    niter = 10
    #    niter = 1
    phis = []
    scores = []
    for i in range(0, niter):
        print('> sample ', i)

        # compute the likelihood
        #        nlml = NLML(L(u), u, 'CSE', debug=False)
        nlml = NLML(L(u), u, 'SE', debug=False)

        # set values
        nlml.set_u(x_u, us)
        nlml.set_f(x_f, fs)

        # ... using pure python implementation
        x_start = rand(len(nlml.args))

        _nlml = lambda y: nlml(y, s_u=eps, s_f=eps)

        m = nelder_mead(_nlml,
                        x_start,
                        step=0.1,
                        no_improve_thr=10e-4,
                        no_improv_break=4,
                        max_iter=0,
                        alpha=.5,
                        gamma=1.5,
                        rho=-0.5,
                        sigma=0.5,
                        verbose=False)

        args = exp(m[0])
        score = m[1]
        phis.append(nlml.map_args(args)['phi'])
        scores.append(score)
    # ...

    phis = asarray(phis)
    scores = asarray(scores)
    i_max = argmax(scores)
    print(phis[i_max])
    print(scores[i_max])
    print(scores)
    print(phis)
Exemple #15
0
def test_est_2d_3():
    u = Unknown('u', ldim=2)
    phi = Constant('phi')

    # ... define a partial differential operator as a lambda function
    L = lambda u: phi * u + dx(u) + dy(dy(u))
    L_expected = lambda u: 2. * u + dx(u) + dy(dy(u))
    # ...

    # ... symbolic functions for unknown and rhs
    from sympy.abc import x, y
    from sympy import sin, cos

    u_sym = x**2 + y
    f_sym = L_expected(u_sym)
    # ...

    # ... lambdification + evaluation
    from numpy import linspace, meshgrid, zeros
    from numpy.random import rand

    u_num = lambdify((x, y), u_sym, "numpy")
    f_num = lambdify((x, y), f_sym, "numpy")

    x_u = rand(50, 2)

    x_f = x_u

    us = u_num(x_u[:, 0], x_u[:, 1])
    fs = f_num(x_f[:, 0], x_f[:, 1])
    # ...

    # compute the likelihood
    #    nlml = NLML(L(u), u, 'SE')
    nlml = NLML(L(u), u, 'RBF')

    # set values
    nlml.set_u(x_u, us)
    nlml.set_f(x_f, fs)

    from numpy.random import rand, normal
    from numpy import exp, ones, log

    # ... using pure python implementation
    from symgp.nelder_mead import nelder_mead

    phi_expected = 2.
    phi_var = 0.5

    n_samples = 1000
    x_starts = rand(len(nlml.args), n_samples)
    i_phi = list(nlml.args).index('phi')
    x_starts[i_phi, :] = log(normal(phi_expected, phi_var, n_samples))

    phis = []
    scores = []
    phiso = []
    scoreso = []
    for i in range(0, n_samples):
        print('> sample ', i)

        x_start = x_starts[:, i]

        # ...
        def f(params):
            params[i_phi] = x_starts[i_phi, i]
            return nlml(params)

        m = nelder_mead(f,
                        x_start,
                        step=0.1,
                        no_improve_thr=10e-4,
                        no_improv_break=4,
                        max_iter=0,
                        alpha=.5,
                        gamma=1.5,
                        rho=-0.5,
                        sigma=0.5,
                        verbose=False)

        args = exp(m[0])
        score = m[1]

        phis.append(nlml.map_args(args)['phi'])
        scores.append(score)
        # ...

        # ...
        m = nelder_mead(nlml,
                        x_start,
                        step=0.1,
                        no_improve_thr=10e-4,
                        no_improv_break=4,
                        max_iter=0,
                        alpha=.5,
                        gamma=1.5,
                        rho=-0.5,
                        sigma=0.5,
                        verbose=False)

        args = exp(m[0])
        score = m[1]

        phiso.append(nlml.map_args(args)['phi'])
        scoreso.append(score)
        # ...

    from numpy import savetxt
    savetxt('est_2d/phis.txt', asarray(phis))
    savetxt('est_2d/scores.txt', asarray(scores))
    savetxt('est_2d/phiso.txt', asarray(phiso))
    savetxt('est_2d/scoreso.txt', asarray(scoreso))

    plt.plot(phis, scores, '.b', label=r'fixed $\phi$', alpha=0.4)
    plt.plot(phiso, scoreso, '.r', label=r'free $\phi$', alpha=0.4)
    plt.axvline(x=phi_expected, color='green', alpha=0.5)

    plt.xlabel(r'$\phi$')
    plt.ylabel(r'Likelihood $\mathcal{L}$')
    title = '$L u := {}$'.format(latex(L(u)))
    title += '\n'
    title += r'$\phi_{exact}' + ' := {}$'.format(phi_expected)
    plt.title(title)
    plt.legend()
    plt.show()
Exemple #16
0
def test_est_2d_1():
    u = Unknown('u', ldim=2)
    phi = Constant('phi')

    # ... define a partial differential operator as a lambda function
    L = lambda u: phi * u + dx(u) + dy(dy(u))
    L_expected = lambda u: 2. * u + dx(u) + dy(dy(u))
    # ...

    # ... symbolic functions for unknown and rhs
    from sympy.abc import x, y
    from sympy import sin, cos

    u_sym = x**2 + y
    f_sym = L_expected(u_sym)
    # ...

    # ... lambdification + evaluation
    from numpy import linspace, meshgrid, zeros
    from numpy.random import rand

    u_num = lambdify((x, y), u_sym, "numpy")
    f_num = lambdify((x, y), f_sym, "numpy")

    #    t = linspace(0, 1, 5)
    #    x,y = meshgrid(t, t)
    #    x_u = zeros((x.size, 2))
    #    x_u[:,0] = x.reshape(x.size)
    #    x_u[:,1] = y.reshape(y.size)

    x_u = rand(100, 2)

    x_f = x_u

    us = u_num(x_u[:, 0], x_u[:, 1])
    fs = f_num(x_f[:, 0], x_f[:, 1])
    # ...

    # compute the likelihood
    debug = True
    nlml = NLML(L(u), u, 'SE', debug=debug)

    # set values
    nlml.set_u(x_u, us)
    nlml.set_f(x_f, fs)

    from numpy.random import rand
    from numpy import exp, ones, log

    # ... using pure python implementation
    from symgp.nelder_mead import nelder_mead

    x_start = rand(len(nlml.args))
    print('> x_start = ', x_start)
    m = nelder_mead(nlml,
                    x_start,
                    step=0.1,
                    no_improve_thr=10e-4,
                    no_improv_break=4,
                    max_iter=0,
                    alpha=.5,
                    gamma=1.5,
                    rho=-0.5,
                    sigma=0.5,
                    verbose=False)

    args = exp(m[0])
    print('> estimated phi = ', nlml.map_args(args)['phi'])

    if debug:
        plt.xlabel('number of data')
        plt.ylabel(r'eigenvalues in log-scale')
        plt.legend()
        plt.show()