def test_dists_to_ot(): dists = dists_to_ot(['Uniform(12, 15)', 'Normal(400, 10)']) out = [ot.Uniform(12, 15), ot.Normal(400, 10)] assert dists == out with pytest.raises(AttributeError): dists_to_ot(['Uniorm(12, 15)'])
print("Samling error (y_train1) Study#" + str(i)) x_c, y, q = fl(x_train1[i]) y_train1.append(y) for i in range(len(x_trainr)): print("Reference case for LC metrics (y_trainr) Study#" + str(i)) x_c, y, q = fl(x_trainr[i]) y_trainr.append(y) # Build the test sample test_size = 1000 # test size dists = [ 'Uniform(20., 40.)', 'BetaMuSigma(2000, 500, 1000, 3000).getDistribution()' ] dists_ot = dists_to_ot(dists) x_test = ot.LHSExperiment(ot.ComposedDistribution(dists_ot), test_size, True, True).generate() x_test = np.array(x_test) #Buil the ouput test data for i in range(len(x_test)): print("Test#" + str(i)) x_c, y, q = fl(x_test[i]) y_test.append(y) # Surrogate ## Polynomial Chaos ### Quad
59801.53846153847, 59908.461538461546, 60015.384615384624, 60122.3076923077, 60229.23076923078, 60336.15384615386, 60443.07692307694, 60550.0, 60654.545454545456, 60759.09090909091, 60863.63636363637, 60968.18181818182, 61072.72727272728, 61177.272727272735, 61281.81818181819, 61386.36363636365, 61490.9090909091, 61595.45454545456, 61700.0, 61818.75, 61937.5, 62056.25, 62175.0 ] in_dim = len(corners) # input dim Nl = 1000 # learning sample size Nt = 1000 # test sample size plabels = ['Ks_{min1}', 'Ks_{min2}', 'Ks_{min3}', 'Q'] dists = [ 'BetaMuSigma(4031, 400, 1000, 6000).getDistribution()', 'Uniform(15., 60.)', 'Uniform(15., 60.)', 'Uniform(15., 60.)' ] distsOT = dists_to_ot(dists) space = Space(corners) #Get the Data Base for UQ Case = Mascaret_new() X = Case.data_input x_l = X[0:799, :] x_t = X[800:999, :] # Build the learning sample #x_l = ot.LHSExperiment(ot.ComposedDistribution(distsOT), Nl, True, True).generate() #LHS distribution #x_l = [list(x_l[i]) for i in range(Nl)] #x_l = np.array(x_l) doe_l = doe(x_l) doe_t = doe(x_t)