def test_space_evaluation(settings_ishigami): f_3d = Ishigami() space = Space(settings_ishigami['space']['corners']) space.sampling(2, 'halton') targets_space = f_3d(space) f_data_base = np.array([5.25, 4.2344145]).reshape(2, 1) npt.assert_almost_equal(targets_space, f_data_base)
def test_moment_independant(self, ishigami_data, tmp): ishigami_data_ = copy.deepcopy(ishigami_data) ishigami_data_.space.max_points_nb = 5000 X = ishigami_data_.space.sampling(5000, 'olhs') Y = ishigami_data_.func(X).flatten() momi = moment_independent(X, Y, plabels=['x1', 'x2', 'x3'], fname=os.path.join(tmp, 'moment_independent.pdf')) npt.assert_almost_equal(momi[2]['Kolmogorov'], [0.236, 0.377, 0.107], decimal=2) npt.assert_almost_equal(momi[2]['Kuiper'], [0.257, 0.407, 0.199], decimal=2) npt.assert_almost_equal(momi[2]['Delta'], [0.211, 0.347, 0.162], decimal=2) npt.assert_almost_equal(momi[2]['Sobol'], [0.31, 0.421, 0.002], decimal=2) # Cramer space = Space(corners=[[-5, -5], [5, 5]], sample=5000) space.sampling(dists=['Normal(0, 1)', 'Normal(0, 1)']) Y = [np.exp(x_i[0] + 2 * x_i[1]) for x_i in space] X = np.array(space) momi = moment_independent(X, Y, fname=os.path.join(tmp, 'moment_independent.pdf')) npt.assert_almost_equal(momi[2]['Cramer'], [0.113, 0.572], decimal=2)
init_size3 = 100 init_size2 = 80 init_size1 = 60 #sample size used for trunctation error init_size = 1000 indim = 2 # inputs dim plabels = ['Ks', 'Q'] space = Space(corners) # Build the learning samples #training sample for truncation error (1 sample) x_train = np.array(space.sampling(init_size, 'halton')) #training samples for sampling error (init_size varies) x_train5 = np.array(space.sampling(init_size5, 'halton')) x_train4 = np.array(space.sampling(init_size4, 'halton')) x_train3 = np.array(space.sampling(init_size3, 'halton')) x_train2 = np.array(space.sampling(init_size2, 'halton')) x_train1 = np.array(space.sampling(init_size1, 'halton')) #training sample for estimation of LC metrics (large init_size) x_trainr = np.array(space.sampling(init_size, 'halton')) #Creaing empty arrays for ouptut data y_train = [] y_train5 = []
def test_space(settings_ishigami, seed): corners = settings_ishigami['space']['corners'] space = Space(corners) assert space.max_points_nb == np.inf space = Space(corners, sample=10) assert space.max_points_nb == 10 space = Space(corners, sample=10, nrefine=6, plabels=['x', 'y', 'z']) assert space.max_points_nb == 16 space += (1, 2, 3) npt.assert_array_equal(space.values, [(1, 2, 3)]) space.empty() npt.assert_array_equal(space.values, np.empty((0, 3))) space += [(1, 2, 3), (1, 1, 3)] npt.assert_array_equal(space.values, [(1, 2, 3), (1, 1, 3)]) space2 = Space(corners, space.values) npt.assert_array_equal(space2.values, [(1, 2, 3), (1, 1, 3)]) s1 = space.sampling() assert len(s1) == 10 space2 = Space(corners, sample=settings_ishigami['space']['sampling']['init_size'], nrefine=settings_ishigami['space']['resampling']['resamp_size']) s2 = space2.sampling(10, kind='lhsc') assert len(s2) == 10 assert np.any(s1 != s2) space.empty() space += (1, 2, 3) space += (1, 2, 3) assert len(space) == 1 space = Space(corners, sample=16, duplicate=True) space += (1, 2, 3) space += (1, 2, 3) assert len(space) == 2 with pytest.raises(ValueError): space += (1, 2) assert len(space) == 2 space += (1, 7, 3) assert len(space) == 2 space.sampling(17) assert len(space) == 16 space.empty() dists = ['Uniform(0., 1.)', 'Uniform(-1., 2.)', 'Uniform(-2., 3.)'] space.sampling(5, kind='halton', dists=dists) out = [(0.5, 0.0, -1.0), (0.25, 1.0, 0.0), (0.75, -0.67, 1.0), (0.125, 0.33, 2.0), (0.625, 1.33, -1.8)] npt.assert_almost_equal(space, out, decimal=1) space = Space(corners, sample=np.array([(1, 2, 3), (1, 1, 3)])) assert space.doe_init == 2 assert space.max_points_nb == 2 test_settings = copy.deepcopy(settings_ishigami) test_settings['space']['corners'][1] = [np.pi, -np.pi, np.pi] with pytest.raises(ValueError): Space(test_settings['space']['corners'])
import numpy as np from scipy.spatial import distance from sklearn import preprocessing from sklearn.neighbors import NearestNeighbors import matplotlib.pyplot as plt from matplotlib.patches import Circle from batman.space import Space from batman.visualization import doe, response_surface, reshow from batman.functions import Branin import openturns as ot # Problem definition: f(sample) -> data corners = np.array([[-5, 0], [10, 14]]) sample = Space(corners) sample.sampling(20) doe(sample, fname='init_doe.pdf') fun_branin = Branin() def fun(x): return -fun_branin(x) data = fun(sample) # Algo def random_uniform_ring(
init_size4 = 150 init_size3 = 100 init_size2 = 50 init_size1 = 40 #sample size used for trunctation error init_size = 1000 indim = 2 # inputs dim plabels = ['Ks', 'Q'] space = Space(corners) # Build the learning samples x_train = np.array(space.sampling( init_size, 'halton')) #training sample for truncation error (1 sample) x_train5 = np.array(space.sampling( init_size5, 'halton')) #training samples for sampling error (init_size varies) x_train4 = np.array(space.sampling(init_size4, 'halton')) x_train3 = np.array(space.sampling(init_size3, 'halton')) x_train2 = np.array(space.sampling(init_size2, 'halton')) x_train1 = np.array(space.sampling(init_size1, 'halton')) x_trainr = np.array(space.sampling( 1000, 'halton')) #training sample for estimation of LC metrics (large init_size) #Build the solution vector with y = f(x)