Пример #1
0
    def test_moment_independant(self, ishigami_data, tmp):
        ishigami_data_ = copy.deepcopy(ishigami_data)
        ishigami_data_.space.max_points_nb = 5000
        X = ishigami_data_.space.sampling(5000, 'olhs')
        Y = ishigami_data_.func(X).flatten()

        momi = moment_independent(X,
                                  Y,
                                  plabels=['x1', 'x2', 'x3'],
                                  fname=os.path.join(tmp,
                                                     'moment_independent.pdf'))

        npt.assert_almost_equal(momi[2]['Kolmogorov'], [0.236, 0.377, 0.107],
                                decimal=2)
        npt.assert_almost_equal(momi[2]['Kuiper'], [0.257, 0.407, 0.199],
                                decimal=2)
        npt.assert_almost_equal(momi[2]['Delta'], [0.211, 0.347, 0.162],
                                decimal=2)
        npt.assert_almost_equal(momi[2]['Sobol'], [0.31, 0.421, 0.002],
                                decimal=2)

        # Cramer
        space = Space(corners=[[-5, -5], [5, 5]], sample=5000)
        space.sampling(dists=['Normal(0, 1)', 'Normal(0, 1)'])
        Y = [np.exp(x_i[0] + 2 * x_i[1]) for x_i in space]
        X = np.array(space)

        momi = moment_independent(X,
                                  Y,
                                  fname=os.path.join(tmp,
                                                     'moment_independent.pdf'))

        npt.assert_almost_equal(momi[2]['Cramer'], [0.113, 0.572], decimal=2)
Пример #2
0
def test_space_evaluation(settings_ishigami):
    f_3d = Ishigami()
    space = Space(settings_ishigami['space']['corners'])
    space.sampling(2, 'halton')
    targets_space = f_3d(space)
    f_data_base = np.array([5.25, 4.2344145]).reshape(2, 1)
    npt.assert_almost_equal(targets_space, f_data_base)
Пример #3
0
def test_discrepancy():
    corners = [[0.5, 0.5], [6.5, 6.5]]
    space_1 = Space(corners)
    space_2 = Space(corners)

    space_1 += [[1, 3], [2, 6], [3, 2], [4, 5], [5, 1], [6, 4]]
    space_2 += [[1, 5], [2, 4], [3, 3], [4, 2], [5, 1], [6, 6]]

    assert Space.discrepancy(space_1, space_1.corners) == pytest.approx(0.0081, abs=1e-4)
    assert Space.discrepancy(space_2, space_2.corners) == pytest.approx(0.0105, abs=1e-4)

    space_1 = (2.0 * space_1.values - 1.0) / (2.0 * 6.0)
    assert Space.discrepancy(space_1) == pytest.approx(0.0081, abs=1e-4)

    space = np.array([[2, 1, 1, 2, 2, 2],
                      [1, 2, 2, 2, 2, 2],
                      [2, 1, 1, 1, 1, 1],
                      [1, 1, 1, 1, 2, 2],
                      [1, 2, 2, 2, 1, 1],
                      [2, 2, 2, 2, 1, 1],
                      [2, 2, 2, 1, 2, 2]])
    space = (2.0 * space - 1.0) / (2.0 * 2.0)

    assert Space.discrepancy(space, method='MD') == pytest.approx(2.5000, abs=1e-4)
    assert Space.discrepancy(space, method='WD') == pytest.approx(1.3680, abs=1e-4)
    assert Space.discrepancy(space, method='CD') == pytest.approx(0.3172, abs=1e-4)
Пример #4
0
def test_mst(tmp):
    sample = np.array([[0.25, 0.5], [0.6, 0.4], [0.7, 0.2]])
    mean, std, edges = Space.mst(sample, fname=os.path.join(tmp, 'mst.pdf'))

    assert mean == pytest.approx(0.2938, abs=1e-4)
    assert std == pytest.approx(0.0702, abs=1e-4)
    npt.assert_equal(edges, [[0, 1], [1, 2]])
Пример #5
0
def branin_data(settings_ishigami):
    data = {}
    data['func'] = Branin()
    data['dists'] = [ot.Uniform(-5, 10), ot.Uniform(0, 15)]
    data['point'] = [2., 2.]
    data['target_point'] = data['func'](data['point'])
    data['space'] = Space(
        [[-7, 0], [10, 15]],
        settings_ishigami['space']['sampling']['init_size'],
        settings_ishigami['space']['resampling']['resamp_size'], ['x1', 'x2'])
    data['space'].sampling(10, kind='halton', discrete=0)
    data['target_space'] = data['func'](data['space'])
    return Datatest(data)
Пример #6
0
def g_function_data(settings_ishigami):
    data = {}
    data['func'] = G_Function()
    data['dists'] = [ot.Uniform(0, 1)] * 4
    data['point'] = [0.5, 0.2, 0.7, 0.1]
    data['target_point'] = data['func'](data['point'])
    data['space'] = Space(
        [[0, 0, 0, 0], [1, 1, 1, 1]],
        settings_ishigami['space']['sampling']['init_size'],
        settings_ishigami['space']['resampling']['resamp_size'],
        ['x1', 'x2', 'x3', 'x4'])
    data['space'].sampling(10, kind='halton', discrete=2)
    data['target_space'] = data['func'](data['space'])
    return Datatest(data)
Пример #7
0
def ishigami_data(settings_ishigami):
    data = {}
    data['func'] = Ishigami()
    x1 = ot.Uniform(-3.1415, 3.1415)
    data['dists'] = [x1] * 3
    data['point'] = [2.20, 1.57, 3]
    data['target_point'] = data['func'](data['point'])
    data['space'] = Space(
        settings_ishigami['space']['corners'],
        settings_ishigami['space']['sampling']['init_size'],
        settings_ishigami['space']['resampling']['resamp_size'],
        settings_ishigami['snapshot']['plabels'])
    data['space'].sampling(150,
                           settings_ishigami['space']['sampling']['method'])
    data['target_space'] = data['func'](data['space'])
    return Datatest(data)
Пример #8
0
def mascaret_data(settings_ishigami):
    data = {}
    fun = db_Mascaret()
    data['func'] = lambda x: fun(x).reshape(-1, 14)[:, 0:3]
    data['func'].x = fun.x[0:3]
    x1 = ot.Uniform(15., 60.)
    x2 = ot.Normal(4035., 400.)
    data['dists'] = [x1, x2]
    data['point'] = [31.54, 4237.025]
    data['target_point'] = data['func'](data['point'])[0]
    data['space'] = Space(
        [[15.0, 2500.0], [60, 6000.0]],
        settings_ishigami['space']['sampling']['init_size'],
        settings_ishigami['space']['resampling']['resamp_size'], ['Ks', 'Q'])
    data['space'].sampling(50,
                           settings_ishigami['space']['sampling']['method'])
    data['target_space'] = data['func'](data['space'])
    return Datatest(data)
Пример #9
0
def mufi_data(settings_ishigami):
    data = {}
    f_e = Forrester('e')
    f_c = Forrester('c')
    data['dists'] = [ot.Uniform(0.0, 1.0)]
    data['point'] = [0.65]
    data['target_point'] = f_e(data['point'])
    data['space'] = Space(
        [[0.0], [1.0]],
        10,
        settings_ishigami['space']['resampling']['resamp_size'],
        ['fidelity', 'x'],
        multifidelity=[5.1, 13.0])
    data['space'].sampling(10, 'halton')

    working_space = np.array(data['space'])

    data['target_space'] = np.vstack([
        f_e(working_space[working_space[:, 0] == 0][:, 1:]),
        f_c(working_space[working_space[:, 0] == 1][:, 1:])
    ])
    data['func'] = [f_e, f_c]

    return Datatest(data)
Пример #10
0
#samples sizes for samling error

init_size5 = 200
init_size4 = 150
init_size3 = 100
init_size2 = 80
init_size1 = 60

#sample size used for trunctation error

init_size = 1000

indim = 2  # inputs dim
plabels = ['Ks', 'Q']
space = Space(corners)

# Build the learning samples

#training sample for truncation error (1 sample)
x_train = np.array(space.sampling(init_size, 'halton'))

#training samples for sampling error (init_size varies)

x_train5 = np.array(space.sampling(init_size5, 'halton'))
x_train4 = np.array(space.sampling(init_size4, 'halton'))
x_train3 = np.array(space.sampling(init_size3, 'halton'))
x_train2 = np.array(space.sampling(init_size2, 'halton'))
x_train1 = np.array(space.sampling(init_size1, 'halton'))

#training sample for estimation of LC metrics (large init_size)
Пример #11
0
           np.array(sample_kde)[-1, 1],
           c='b',
           marker='D',
           s=60)

ticks = np.linspace(0, 1, num=5)
plt.colorbar(contour, ticks=ticks)
plt.show()

# WD plot
fig = plt.figure()
ax = fig.gca()
ax.set_xlim(mini, maxi)
ax.set_ylim(mini, maxi)
f_ = np.array([
    Space.discrepancy(np.vstack([np.array(sample_kde)[:-1], s]), method='WD')
    for s in positions.T
])[:, None]
f_ = np.reshape(f_, xx.shape)
contour = ax.contourf(xx, yy, f_)
ax.scatter(np.array(sample_kde)[:-1, 0],
           np.array(sample_kde)[:-1, 1],
           c='k',
           marker='o')
plt.colorbar(contour)
plt.show()

# 1D line at y=0.5
# fig = plt.figure()
# x = np.linspace(0, 1, 100)[:, None]
# x = np.concatenate([x, np.ones((100, 1)) * 0.5], axis=1)
Пример #12
0
    60122.3076923077, 60229.23076923078, 60336.15384615386, 60443.07692307694,
    60550.0, 60654.545454545456, 60759.09090909091, 60863.63636363637,
    60968.18181818182, 61072.72727272728, 61177.272727272735,
    61281.81818181819, 61386.36363636365, 61490.9090909091, 61595.45454545456,
    61700.0, 61818.75, 61937.5, 62056.25, 62175.0
]
in_dim = len(corners)  # input dim
Nl = 1000  # learning sample size
Nt = 1000  # test sample size
plabels = ['Ks_{min1}', 'Ks_{min2}', 'Ks_{min3}', 'Q']
dists = [
    'BetaMuSigma(4031, 400, 1000, 6000).getDistribution()',
    'Uniform(15., 60.)', 'Uniform(15., 60.)', 'Uniform(15., 60.)'
]
distsOT = dists_to_ot(dists)
space = Space(corners)

#Get the Data Base for UQ
Case = Mascaret_new()

X = Case.data_input
x_l = X[0:799, :]
x_t = X[800:999, :]
# Build the learning sample
#x_l = ot.LHSExperiment(ot.ComposedDistribution(distsOT), Nl, True, True).generate() #LHS distribution
#x_l = [list(x_l[i]) for i in range(Nl)]
#x_l = np.array(x_l)

doe_l = doe(x_l)
doe_t = doe(x_t)
# Build the training sample
Пример #13
0
def test_space(settings_ishigami, seed):
    corners = settings_ishigami['space']['corners']
    space = Space(corners)
    assert space.max_points_nb == np.inf

    space = Space(corners, sample=10)
    assert space.max_points_nb == 10

    space = Space(corners, sample=10, nrefine=6,
                  plabels=['x', 'y', 'z'])

    assert space.max_points_nb == 16

    space += (1, 2, 3)
    npt.assert_array_equal(space.values, [(1, 2, 3)])

    space.empty()
    npt.assert_array_equal(space.values, np.empty((0, 3)))

    space += [(1, 2, 3), (1, 1, 3)]
    npt.assert_array_equal(space.values, [(1, 2, 3), (1, 1, 3)])

    space2 = Space(corners, space.values)
    npt.assert_array_equal(space2.values, [(1, 2, 3), (1, 1, 3)])

    s1 = space.sampling()
    assert len(s1) == 10
    space2 = Space(corners,
                   sample=settings_ishigami['space']['sampling']['init_size'],
                   nrefine=settings_ishigami['space']['resampling']['resamp_size'])

    s2 = space2.sampling(10, kind='lhsc')
    assert len(s2) == 10
    assert np.any(s1 != s2)

    space.empty()
    space += (1, 2, 3)
    space += (1, 2, 3)
    assert len(space) == 1

    space = Space(corners, sample=16, duplicate=True)
    space += (1, 2, 3)
    space += (1, 2, 3)
    assert len(space) == 2

    with pytest.raises(ValueError):
        space += (1, 2)
    assert len(space) == 2

    space += (1, 7, 3)
    assert len(space) == 2

    space.sampling(17)
    assert len(space) == 16

    space.empty()
    dists = ['Uniform(0., 1.)', 'Uniform(-1., 2.)', 'Uniform(-2., 3.)']
    space.sampling(5, kind='halton', dists=dists)
    out = [(0.5, 0.0, -1.0), (0.25, 1.0, 0.0), (0.75, -0.67, 1.0),
           (0.125, 0.33, 2.0), (0.625, 1.33, -1.8)]
    npt.assert_almost_equal(space, out, decimal=1)

    space = Space(corners, sample=np.array([(1, 2, 3), (1, 1, 3)]))
    assert space.doe_init == 2
    assert space.max_points_nb == 2

    test_settings = copy.deepcopy(settings_ishigami)
    test_settings['space']['corners'][1] = [np.pi, -np.pi, np.pi]
    with pytest.raises(ValueError):
        Space(test_settings['space']['corners'])
Пример #14
0
import numpy as np
from scipy.spatial import distance
from sklearn import preprocessing
from sklearn.neighbors import NearestNeighbors
import matplotlib.pyplot as plt
from matplotlib.patches import Circle
from batman.space import Space
from batman.visualization import doe, response_surface, reshow
from batman.functions import Branin
import openturns as ot

# Problem definition: f(sample) -> data
corners = np.array([[-5, 0], [10, 14]])
sample = Space(corners)
sample.sampling(20)

doe(sample, fname='init_doe.pdf')

fun_branin = Branin()


def fun(x):
    return -fun_branin(x)


data = fun(sample)

# Algo


def random_uniform_ring(
Пример #15
0
#samples sizes for samling error

init_size5 = 200
init_size4 = 150
init_size3 = 100
init_size2 = 50
init_size1 = 40

#sample size used for trunctation error

init_size = 1000

indim = 2  # inputs dim
plabels = ['Ks', 'Q']
space = Space(corners)

# Build the learning samples

x_train = np.array(space.sampling(
    init_size, 'halton'))  #training sample for truncation error (1 sample)

x_train5 = np.array(space.sampling(
    init_size5,
    'halton'))  #training samples for sampling error (init_size varies)
x_train4 = np.array(space.sampling(init_size4, 'halton'))
x_train3 = np.array(space.sampling(init_size3, 'halton'))
x_train2 = np.array(space.sampling(init_size2, 'halton'))
x_train1 = np.array(space.sampling(init_size1, 'halton'))

x_trainr = np.array(space.sampling(