def test_vertex_eq(m = 5): np.random.seed(0) dom = BoxDomain(-np.ones(m), np.ones(m)) dom = dom.add_constraints(A_eq = np.ones(m), b_eq = [0]) Xhat = dom.sample(10) X0 = dom.sample(5) check_vertex_sample(dom, Xhat, X0)
def test_vertex_weight(m = 5): np.random.seed(0) dom = BoxDomain(-np.ones(m), np.ones(m)) Xhat = dom.sample(10) X0 = dom.sample(100) L = np.diag(1./np.arange(1,m+1)) check_vertex_sample(dom, Xhat, X0, L = L)
def test_vertex_rectangular(m = 5): np.random.seed(0) dom = BoxDomain(-np.ones(m), np.ones(m)) Xhat = dom.sample(10) X0 = dom.sample(100) L = np.ones((1, m)) check_vertex_sample(dom, Xhat, X0, L = L)
def test_poly_basis(dimension=2, degree=5): """ test different bases""" np.random.seed(0) basis = LegendreTensorBasis(degree, dim=dimension) coef = np.random.randn(len(basis)) pf = PolynomialFunction(basis, coef) dom = BoxDomain(-np.ones(dimension), np.ones(dimension)) X = dom.sample(100) fX = pf(X) Xtest = dom.sample(1000) fXtest = pf(Xtest) for basis in [ 'arnoldi', 'legendre', 'monomial', 'chebyshev', 'laguerre', 'hermite' ]: print("basis ", basis) pa = PolynomialApproximation(degree, basis=basis) print("fitting") pa.fit(X, fX) print(pa.basis.V(Xtest).shape) print(pa(Xtest).shape) print(fXtest.shape) assert np.linalg.norm(pa(Xtest) - fXtest, np.inf) < 1e-7
def test_vertex_low_rank_nonrandomize(m = 5): np.random.seed(0) dom = BoxDomain(-np.ones(m), np.ones(m)) Xhat = dom.sample(10) X0 = dom.sample(100) L = np.diag(1./np.arange(1,m+1)) L[0,0] = 0. check_vertex_sample(dom, Xhat, X0, L = L, randomize = False)
def test_fill_distance(m = 5): dom = BoxDomain(-np.ones(m), np.ones(m)) Xhat = dom.sample(10) X0 = dom.sample(1e2) x = seq_maximin_sample(dom, Xhat, X0 = X0) d = np.min(cdist(x.reshape(1,-1), Xhat)) d2 = fill_distance_estimate(dom, Xhat, X0 = X0) assert np.isclose(d, d2)
def test_poly_fit(dimension=2, degree=5, tol=1e-6): np.random.seed(0) dom = BoxDomain(-np.ones(dimension), np.ones(dimension)) X = dom.sample(100) fXnoise = np.random.randn(X.shape[0]) for bound in ['lower', 'upper', None]: for norm in [1, 2, np.inf]: for basis in [ 'arnoldi', 'legendre', 'monomial', 'chebyshev', 'laguerre', 'hermite' ]: pa = PolynomialApproximation(degree, basis=basis, norm=norm, bound=bound) pa.fit(X, fXnoise) if bound == 'lower': I = ~(pa(X) - tol <= fXnoise) if np.sum(I) > 0: print('%s lower:' % basis) print(pa(X[I]), fXnoise[I]) assert False if bound == 'upper': I = ~(pa(X) + tol >= fXnoise) if np.sum(I) > 0: print('%s upper:' % basis) print(pa(X[I]), fXnoise[I]) assert False
def test_func(): client = Client(processes = False) dom = BoxDomain(-1,1) fun = Function(func, dom, dask_client = client) X = dom.sample(5) res = fun.eval_async(X) for r, x in zip(res, X): print(r.result()) assert np.isclose(x, r.result())
def test_isinside(m=5): np.random.seed(0) dom = BoxDomain(-10 * np.ones(m), -5 * np.ones(m)) X = dom.sample(10) Xg = dom.sample_grid(2) hull = ConvexHullDomain(Xg) assert np.all(hull.isinside(X))
def test_vertex_full(m = 2): np.random.seed(0) dom = BoxDomain(-np.ones(m), np.ones(m)) Xhat = dom.sample(10) #check_vertex(dom, Xhat) # Check with degenerate points Xhat[1] = Xhat[0] #check_vertex(dom, Xhat) # Check with a Lipschitz matrix np.random.seed(0) Xhat = dom.sample(5) #L = np.random.randn(m,m) L = np.diag(np.arange(1, m+1)) print(L) print("Checking with a Lipschitz matrix") check_vertex(dom, Xhat, L = L)
def test_lambda(): dom = BoxDomain(-1,1) def f(x): return x #f = lambda x: x print('about to start client') # We use a threaded version for sanity # https://github.com/dask/distributed/issues/2515 client = Client(processes = False) print(client) fun = Function(f, dom, dask_client = client) x = dom.sample(1) res = fun.eval_async(x) print(x, res.result()) assert np.isclose(x, res.result())
def test_mult_output(M= 10,m = 5): dom = BoxDomain(-np.ones(m), np.ones(m)) X = dom.sample(M) a = np.random.randn(m) b = np.random.randn(m) def fun_a(X): return a.dot(X.T) def fun_b(X): return b.dot(X.T) def fun(X): return np.vstack([X.dot(a), X.dot(b)]).T print("Single function with multiple outputs") for vectorized in [True, False]: myfun = Function(fun, dom, vectorized = vectorized) print(fun(X)) print("vectorized", vectorized) print(myfun(X).shape) assert myfun(X).shape == (M, 2) print(myfun(X[0]).shape) assert myfun(X[0]).shape == (2,) fX = fun(X) for i, x in enumerate(X): assert np.all(np.isclose(fX[i], fun(x))) print("Two functions with a single output each") for vectorized in [True, False]: myfun = Function([fun_a, fun_b], dom, vectorized = vectorized) print(fun(X)) print("vectorized", vectorized) print(myfun(X).shape) assert myfun(X).shape == (M, 2) print(myfun(X[0]).shape) assert myfun(X[0]).shape == (2,) fX = fun(X) for i, x in enumerate(X): assert np.all(np.isclose(fX[i], fun(x)))
def test_constraints(m=3): np.random.seed(0) dom = BoxDomain(-1 * np.ones(m), np.ones(m)) # Lower pyramid portion dom_con = dom.add_constraints(A=np.ones((1, m)), b=np.ones(1)) # Convex hull describes the same space as dom_con X = dom.sample_grid(2) hull = ConvexHullDomain(X, A=dom_con.A, b=dom_con.b) # Check that the same points are inside X = dom.sample(100) assert np.all(hull.isinside(X) == dom_con.isinside(X)) # Check sampling X = hull.sample(100) assert np.all(dom_con.isinside(X))
def test_initial_sample(m = 10): dom = BoxDomain(-np.ones(m), np.ones(m)) L1 = np.random.randn(1,m) L2 = np.random.randn(2,m) L3 = np.random.randn(3,m) Nsamp = 100 for L in [L1, L2, L3]: # Standard uniform sampling X1 = dom.sample(Nsamp) LX1 = L.dot(X1.T).T d1 = pdist(LX1) # initial sample algorithm X2 = initial_sample(dom, L, Nsamp = Nsamp) assert np.all(dom.isinside(X2)) LX2 = L.dot(X2.T).T d2 = pdist(LX2) print("uniform sampling mean distance", np.mean(d1), 'min', np.min(d1)) print("initial sampling mean distance", np.mean(d2), 'min', np.min(d2)) assert np.mean(d2) > np.mean(d1), "Initial sampling ineffective"
def test_gp_fit(m=3, M=100): """ check """ dom = BoxDomain(-np.ones(m), np.ones(m)) a = np.ones(m) b = np.ones(m) b[0] = 0 f = lambda x: np.sin(x.dot(a)) + x.dot(b)**2 fun = Function(f, dom) X = dom.sample(M) fX = f(X) for structure in ['const', 'diag', 'tril']: for degree in [None, 0, 1]: gp = GaussianProcess(structure=structure, degree=degree) gp.fit(X, fX) print(gp.L) I = ~np.isclose(gp(X), fX) print(fX[I]) print(gp(X[I])) assert np.all(np.isclose( gp(X), fX, atol=1e-5)), "we should interpolate samples" _, cov = gp.eval(X, return_cov=True) assert np.all(np.isclose( cov, 0, atol=1e-3)), "Covariance should be small at samples"
def test_return_grad(m=3): A = np.random.randn(m, m) A += A.T def func(x, return_grad = False): fx = 0.5*x.dot(A.dot(x)) if return_grad: grad = A.dot(x) return fx, grad else: return fx dom = BoxDomain(-2*np.ones(m), 2*np.ones(m)) x = dom.sample(1) fun = Function(func, dom, return_grad = True) # Check the derivative x_norm = dom.normalize(x) err = check_derivative(x_norm, fun.eval, fun.grad) assert err < 1e-5 # Check wrapping fx, grad = func(x, return_grad = True) assert np.isclose(fx, fun(x_norm)) # multiply the grad by two to correct the change of coordinates assert np.all(np.isclose(2*grad, fun.grad(x_norm))) # Check multiple outputs X = dom.sample(10) fX, grads = fun(X, return_grad = True) for i, x in enumerate(X): assert np.isclose(fun(x), fX[i]) assert np.all(np.isclose(fun.grad(x), grads[i])) # Check vectorized functions def func2(X, return_grad = False): X = np.atleast_2d(X) fX = np.vstack([0.5*x.dot(A.dot(x)) for x in X]) if return_grad: grad = X.dot(A) return fX, grad else: return fX fun2 = Function(func2, dom, vectorized = True, return_grad = True) x = fun2.domain.sample() X = fun2.domain.sample(5) assert np.isclose(fun2(x), fun(x)) assert np.all(np.isclose(fun2(X), fun(X))) assert np.all(np.isclose(fun2.grad(X), fun.grad(X))) # Check the __call__ interface fX, grad = fun2(X, return_grad = True) print(fX.shape, fun(X).shape) assert fX.shape == fun(X).shape assert np.all(np.isclose(fX, fun(X))) print(grad.shape, fun.grad(X).shape) assert grad.shape == fun.grad(X).shape assert np.all(np.isclose(grad, fun.grad(X))) fx, grad = fun2(x, return_grad = True) print(fx.shape, fun(x).shape) assert fx.shape == fun(x).shape assert np.all(np.isclose(fx, fun(x))) print(grad.shape, fun.grad(x).shape) assert grad.shape == fun.grad(x).shape assert np.all(np.isclose(grad, fun.grad(x))) fX, grad = fun(X, return_grad = True) print(fX.shape, fun(X).shape) assert fX.shape == fun(X).shape assert np.all(np.isclose(fX, fun(X))) print(grad.shape, fun.grad(X).shape) assert grad.shape == fun.grad(X).shape assert np.all(np.isclose(grad, fun.grad(X))) fx, grad = fun(x, return_grad = True) print(fx.shape, fun(x).shape) assert fx.shape == fun(x).shape assert np.all(np.isclose(fx, fun(x))) print(grad.shape, fun.grad(x).shape) assert grad.shape == fun.grad(x).shape assert np.all(np.isclose(grad, fun.grad(x)))
def test_vertex_box(m = 5): np.random.seed(0) dom = BoxDomain(-np.ones(m), np.ones(m)) Xhat = dom.sample(10) X0 = dom.sample(100) check_vertex_sample(dom, Xhat, X0)