def test_gaussian_tensorgrid(self): # 4D function for testing! def fun(x): return np.cos(x[0]) + x[1]**2 + x[2] * x[3] x1 = Parameter(param_type='Gaussian', shape_parameter_A=0, shape_parameter_B=1, points=4) x2 = Parameter(param_type='Gaussian', shape_parameter_A=0, shape_parameter_B=1, points=4) x3 = Parameter(param_type='Gaussian', shape_parameter_A=0, shape_parameter_B=1, points=4) x4 = Parameter(param_type='Gaussian', shape_parameter_A=0, shape_parameter_B=1, points=4) parameters = [x1, x2, x3, x4] result, points = integrals.tensorgrid(parameters, fun) print result if np.linalg.norm(result - 1.606, 2) < 1e-4: print np.linalg.norm(result - 1.606, 2) print 'Success!' else: raise (RuntimeError, 'Integration testing failed') # Test to see if we get points and weights when function is not callable! points, weights = integrals.tensorgrid(parameters)
def test_polynomial_and_derivative_constructions(self): s = Parameter(lower=-1, upper=1, param_type='Uniform', points=2, derivative_flag=1) uq_parameters = [s, s] uq = Polynomial(uq_parameters) num_elements = 2 pts, x1, x2 = meshgrid(-1.0, 1.0, num_elements, num_elements) P, Q = uq.getMultivariatePolynomial(pts) print '--------output---------' print P print '~' print Q s = Parameter(lower=-1, upper=2, param_type='Uniform', points=4, derivative_flag=0) T = IndexSet('Tensor grid', [5]) uq = Polynomial([s]) pts = np.linspace(-1, 1, 20) P, D = uq.getMultivariatePolynomial(pts) print '--------output---------' print P print '~' print D
def testinputPDFs(self): # Output a histogram based on 1000 samples X = Parameter(points=3, shape_parameter_A=15, shape_parameter_B=2.5, param_type='Gaussian') X.getSamples(10000, graph=1)
def gradients_univariate(): # Parameters! pt = 8 x1 = Parameter(param_type="Uniform", lower=-1.0, upper=1.0, points=pt, derivative_flag=1) x2 = Parameter(param_type="Uniform", lower=-1.0, upper=1.0, points=pt, derivative_flag=1) parameters = [x1, x2] dims = len(parameters) # Basis selection! hyperbolic_cross = IndexSet("Total order", orders=[pt-1,pt-1]) esq = EffectiveSubsampling(parameters, hyperbolic_cross) A , p, w = esq.getAmatrix() C = esq.getCmatrix() # Matrix sizes m, n = A.shape print m, n print '*****************' m, n = C.shape print m, n # Now perform least squares! W = np.mat(np.diag(np.sqrt(w))) b = W.T * evalfunction(p, fun) d = evalgradients(p, fungrad, 'vector') x = qr.solveLSQ(np.vstack([A, C]), np.vstack([b, d]) ) print x
def test_pseudospectral_approximation_tensor(self): def expfun(x): return np.exp(x[0] + x[1]) + 0.5 * np.cos(x[0] * 2 * np.pi) # Compare actual function with polynomial approximation s = Parameter(lower=-1, upper=1, points=6) T = IndexSet('Tensor grid', [5, 5]) uq = Polynomial([s, s], T) num_elements = 10 coefficients, index_set, evaled_pts = uq.getPolynomialCoefficients( expfun) pts, x1, x2 = meshgrid(-1.0, 1.0, num_elements, num_elements) Approx = uq.getPolynomialApproximation(expfun, pts, coefficients) A = np.reshape(Approx, (num_elements, num_elements)) fun = evalfunction(pts, expfun) # Now plot this surface fig = plt.figure() ax = fig.gca(projection='3d') surf = ax.plot_surface(x1, x2, A, rstride=1, cstride=1, cmap=cm.winter, linewidth=0, antialiased=False, alpha=0.5) ax.scatter(x1, x2, fun, 'ko') ax.set_zlim(0, 10) ax.set_xlabel('x1') ax.set_ylabel('x2') ax.set_zlabel('Response') fig.colorbar(surf, shrink=0.5, aspect=5)
def main(): # Set the parameters x1 = Parameter(lower=38.2, upper=250.4, points=3) x2 = Parameter(lower=0.157, upper=0.313, points=3) parameters = [x1, x2] # Set the polynomial basis orders = [2, 2] polybasis = IndexSet("Total order", orders) print polybasis.getIndexSet() maximum_number_of_evals = polybasis.getCardinality() # Set up effective quadrature subsampling esq = EffectiveSubsampling(parameters, polybasis) Asquare = esq.getAsubsampled(maximum_number_of_evals) print Asquare
def gradients_multivariate_subsampled(): # Parameters! pt = 3 x1 = Parameter(param_type="Uniform", lower=-1.0, upper=1.0, points=pt, derivative_flag=1) x2 = Parameter(param_type="Uniform", lower=-1.0, upper=1.0, points=pt, derivative_flag=1) parameters = [x1, x2] dims = len(parameters) # Basis selection! basis = IndexSet("Total order", orders=[pt-1,pt-1]) esq = EffectiveSubsampling(parameters, basis) A , p, w = esq.getAmatrix() C = esq.getCmatrix() # QR column pivotings P = qr.mgs_pivoting(A.T) # Now perform least squares! basis_terms_required = basis.getCardinality() minimum_points = np.int( (basis_terms_required + dims)/(dims + 1.) ) + 5 nodes = P[0:minimum_points] A = getRows(A, nodes) C = getRowsC(C, nodes, dims) m, n = A.shape #print m , n m, n = C.shape #print m, n w = w[nodes] p = p[nodes,:] #print p, w W = np.mat(np.diag(np.sqrt(w))) b = W.T * evalfunction(p, fun) d = evalgradients(p, fungrad, 'vector') R = np.vstack([A, C]) print np.linalg.cond(R) print R print np.vstack([b, d]) x = qr.solveLSQ(np.vstack([A, C]), np.vstack([b, d]) ) print '\n' print x """
def test_uniform_tensorgrid(self): # 4D function for testing! def fun(x): return np.cos(x[0]) + x[1]**2 + x[2] * x[3] x1 = Parameter(lower=-0.5, upper=0.5, points=4) x2 = Parameter(lower=-1, upper=2, points=4) x3 = Parameter(lower=-3, upper=2, points=4) x4 = Parameter(lower=-2, upper=1, points=4) parameters = [x1, x2, x3, x4] result, points = integrals.tensorgrid(parameters, fun) print result if np.linalg.norm(result - 99.39829845, 2) < 1e-9: print np.linalg.norm(result - 99.39829845, 2) print 'Success!' else: raise (RuntimeError, 'Integration testing failed')
def test_vegetation_problem(self): # Set the parameters x1 = Parameter(lower=38.2, upper=250.4, points=3) x2 = Parameter(lower=0.157, upper=0.313, points=3) x3 = Parameter(lower=0.002, upper=0.01, points=3) x4 = Parameter(lower=0.0002, upper=0.001, points=3) parameters = [x1, x2, x3, x4] # Set the polynomial basis orders = [2, 2, 2, 2] polybasis = IndexSet("Total order", orders) print polybasis.getIndexSet() maximum_number_of_evals = polybasis.getCardinality() # Set up effective quadrature subsampling esq = EffectiveSubsampling(parameters, polybasis) points = esq.getEffectivelySubsampledPoints(maximum_number_of_evals) print points # Use the output from simulation data #Output = [15.9881,16.5091,16.0162,15.9950,16.0310,16.4592,16.0958,15.8507,16.0757,15.9252,16.4301,16.1259,16.4682,16.0501,16.2200] Output = [ 0.0906050857157, 0.0776969827712, 0.0864368518814, 0.0932615157217, 0.0892242211848, 0.0767011023127, 0.0866207387298, 0.0977708660066, 0.0861118221655, 0.0963280722499, 0.0774124991149, 0.087565776892, 0.0768618592992, 0.0870198933408, 0.0866443598643 ] #Output = [24.8170119614, 23.7770471604, 24.6131673073, 24.9723698096, 24.6920894782, 23.7015914415, 24.6180488646, 25.2502586048, 24.5767649971, 25.1656386185, 23.7951837649, 24.5402057883, 23.7204920408, 24.5140496633, 24.4823906956] #Output = [9.28511113565, 8.59116120376, 9.20725286751, 9.22159875619, 9.19565064461, 8.51969919061, 9.15223514391, 9.38391902559, 9.15666537065 , 9.31338865761 , 8.57543965821 , 9.0772799181, 8.53049860903, 9.17308676441, 8.98211942423] Output = np.mat(Output) # Solve the least squares problem x = esq.solveLeastSquares(maximum_number_of_evals, Output.T) print x # Compute statistics! vegeUQ = Statistics(x, polybasis) mean = vegeUQ.getMean() variance = vegeUQ.getVariance() sobol = vegeUQ.getFirstOrderSobol() print mean, variance print sobol
def testoutputPDFs(self): def expfun(x): return np.exp(x[0] + x[1]) + 0.5 * np.cos(x[0] * 2 * np.pi) # Compare actual function with polynomial approximation s1 = Parameter(lower=-1, upper=1, points=6, shape_parameter_A=0, shape_parameter_B=2.5, param_type='Gaussian') s2 = Parameter(lower=0, upper=5, points=6, shape_parameter_A=1.0, shape_parameter_B=3.0, param_type='Weibull') T = IndexSet('Tensor grid', [5, 5]) uq = Polynomial([s1, s2], T) output = uq.getPDF(expfun, graph=1)
def test_effective_quadratures_rule(self): # 4D function for testing! def fun(x): return np.cos(x[0]) + x[1]**2 + x[2] * x[3] x1 = Parameter(lower=-0.5, upper=0.5, points=4) x2 = Parameter(lower=-1, upper=2, points=4) x3 = Parameter(lower=-3, upper=2, points=4) x4 = Parameter(lower=-2, upper=1, points=4) parameters = [x1, x2, x3, x4] result, points = integrals.effectivequadratures(parameters, q_parameter=0.8, function=fun) print result if np.abs(result - 99.3982) < 1e-4: print np.abs(result - 99.3982) print 'Success!' else: raise (RuntimeError, 'Integration testing failed')
def gradients_univariate_subsampled(): # Parameters! pt = 8 x1 = Parameter(param_type="Uniform", lower=-1.0, upper=1.0, points=pt, derivative_flag=1) parameters = [x1] dims = len(parameters) # Basis selection! basis = IndexSet("Total order", orders=[pt-1]) esq = EffectiveSubsampling(parameters, basis) A , p, w = esq.getAmatrix() C = esq.getCmatrix() # QR column pivotings P = qr.mgs_pivoting(A.T) # Now perform least squares! basis_terms_required = basis.getCardinality() minimum_points = np.int( (basis_terms_required + dims)/(dims + 1.) ) nodes = P[0:minimum_points] A = getRows(A, nodes) C = getRows(C, nodes) #print 'Size of subsampled matrices!' #m, n = A.shape #print m , n #m, n = C.shape #print m, n # Subselection! w = w[nodes] p = p[nodes,:] W = np.mat(np.diag(np.sqrt(w))) b = W.T * evalfunction(p, fun) d = evalgradients(p, fungrad, 'vector') R = np.vstack([A, C]) # Stacked least squares problem! #x = qr.solveLSQ(np.vstack([A, C]), np.vstack([b, d]) ) #print '\n' #print 'Final Solution!' #print x #print A #print C #print b #print d # Direct Elimination least squares! x = qr.solveCLSQ(A, b, C, d)
def test_uniform_sparsegrid(self): # 4D function for testing! def fun(x): return np.cos(x[0]) + x[1]**2 + x[2] * x[3] x1 = Parameter(lower=-0.5, upper=0.5, points=4) x2 = Parameter(lower=-1, upper=2, points=4) x3 = Parameter(lower=-3, upper=2, points=4) x4 = Parameter(lower=-2, upper=1, points=4) parameters = [x1, x2, x3, x4] result, points = integrals.sparsegrid(parameters, level=5, growth_rule='exponential', function=fun) print result if np.linalg.norm(result - 99.3982, 2) < 1e-4: print np.linalg.norm(result - 99.3982, 2) print 'Success!' else: raise (RuntimeError, 'Integration testing failed')
def test_effective_quadratures_rule(self): # 4D function for testing! def fun(x): return np.cos(x[0]) + x[1]**2 + x[2] * x[3] x1 = Parameter(lower=-0.5, upper=0.5, points=4) x2 = Parameter(lower=-1, upper=2, points=4) x3 = Parameter(lower=-3, upper=2, points=4) x4 = Parameter(lower=-2, upper=1, points=4) parameters = [x1, x2, x3, x4] result, points = integrals.effectivequadratures(parameters, q_parameter=0.8, function=fun) print result if np.abs(result - 99.3982) < 1e-4: print np.abs(result - 99.3982) print 'Success!' else: error_function( 'ERROR: Effective-Quadratures integration routine not working!' )
def nogradients_univariate(): # Parameters! pt = 6 x1 = Parameter(param_type="Uniform", lower=-1.0, upper=1.0, points=pt) parameters = [x1, x1] # Effective subsampling object! esq = EffectiveSubsampling(parameters) # Solve the least squares problem A, p, w = esq.getAmatrix() # Is this always square?? W = np.mat(np.diag(np.sqrt(w) ) ) b = W.T * evalfunction(p, fun) x = qr.solveLSQ(A,b) print x
def test_pseudospectral_coefficient_routines(self): def expfun(x): return np.exp(x[0] + x[1]) s = Parameter(lower=-1, upper=1, points=5) T = IndexSet('Sparse grid', level=3, growth_rule='linear', dimension=2) uq = Polynomial([s, s], T) coefficients, index_set, evaled_pts = uq.getPolynomialCoefficients( expfun) x, y, z, max_order = twoDgrid(coefficients, index_set) z = np.log10(np.abs(z)) # Plot of the pseudospectral coefficients Zm = ma.masked_where(np.isnan(z), z) plt.pcolor(y, x, Zm, cmap='jet', vmin=-14, vmax=0) plt.title('SPAM coefficients') plt.xlabel('i1') plt.ylabel('i2') plt.colorbar() plt.xlim(0, max_order) plt.ylim(0, max_order) # Plot of the sparse grid points plt.plot(evaled_pts[:, 0], evaled_pts[:, 1], 'ro')
#!/usr/bin/env python from effective_quadratures.parameter import Parameter import numpy as np # Setting up the parameter s = Parameter(param_type='Beta', lower=-2, upper=5, shape_parameter_A=3, shape_parameter_B=2, points=5) s.getPDF(300, graph=1) # Computing 1D quadrature points and weights points, weights = s.getLocalQuadrature() print points, weights # Getting the Jacobi matrix print s.getJacobiMatrix() # Getting the first 5 orthogonal polynomial evaluated at some points x x = np.linspace(-2, 5, 10) print s.getOrthoPoly(x)
def test_declarations(self): # Parameter test 1: getPDFs() var1 = Parameter(points=12, shape_parameter_A=2, shape_parameter_B=3, param_type='TruncatedGaussian', lower=3, upper=10) x, y = var1.getPDF(50) print x, y print '\n' # Parameter test 2: getRecurrenceCoefficients() var2 = Parameter(points=15, param_type='Uniform', lower=-1, upper=1) x, y = var2.getPDF(300, graph=1) ab = var2.getRecurrenceCoefficients() print ab print '\n' # Parameter test 3: getJacobiMatrix() var3 = Parameter(points=5, param_type='Beta', lower=0, upper=5, shape_parameter_A=2, shape_parameter_B=3) J = var3.getJacobiMatrix() x, y = var3.getPDF(300, graph=1) print J print '\n' # Parameter test 4: getJacobiEigenvectors() var4 = Parameter(points=5, param_type='Gaussian', shape_parameter_A=0, shape_parameter_B=2) V = var4.getJacobiEigenvectors() print V print '\n' # Parameter test 5: computeMean() var5 = Parameter(points=10, param_type='Weibull', shape_parameter_A=1, shape_parameter_B=5) mu = var5.computeMean() print mu print '\n' # Parameter test 6: getOrthoPoly(points): x = np.linspace(-1, 1, 15) var6 = Parameter(points=10, param_type='Uniform', lower=-1, upper=1) poly = var6.getOrthoPoly(x) print poly print '\n' # Parameter test 7: Now with derivatives var7 = Parameter(points=7, param_type='Uniform', lower=-1, upper=1, derivative_flag=1) poly, derivatives = var7.getOrthoPoly(x) print poly, derivatives print '\n' # Parameter test 8: getLocalQuadrature(): var8 = Parameter(points=5, shape_parameter_A=0.8, param_type='Exponential') p, w = var8.getLocalQuadrature() print p, w print '\n' return 0