def test_hadamard_accuracy(self): systemsize = 4 eigen_decayrate = 2.0 # Create Hadmard Quadratic object QoI = examples.HadamardQuadratic(systemsize, eigen_decayrate) mu = np.zeros(systemsize) std_dev = np.eye(systemsize) jdist = cp.MvNormal(mu, std_dev) active_subspace = ActiveSubspace(QoI, n_dominant_dimensions=4, n_monte_carlo_samples=10000, use_svd=True, read_rv_samples=False, write_rv_samples=False) active_subspace.getDominantDirections(QoI, jdist) mu_j_analytical = QoI.eval_analytical_QoI_mean(mu, cp.Cov(jdist)) var_j_analytical = QoI.eval_analytical_QoI_variance(mu, cp.Cov(jdist)) # Create reduced collocation object QoI_dict = {'Hadamard' : {'QoI_func' : QoI.eval_QoI, 'output_dimensions' : 1, }, } sc_obj_active = StochasticCollocation2(jdist, 4, 'MvNormal', QoI_dict, include_derivs=False, reduced_collocation=True, dominant_dir=active_subspace.dominant_dir) sc_obj_active.evaluateQoIs(jdist) mu_j_active = sc_obj_active.mean(of=['Hadamard']) var_j_active = sc_obj_active.variance(of=['Hadamard'])
def test_multipleQoI(self): # This tests for multiple QoIs. We only compute the mean in this test, # because it is only checking if it can do multiple loops systemsize = 2 theta = 0 mu = mean_2dim # np.random.randn(systemsize) std_dev = std_dev_2dim # np.diag(np.random.rand(systemsize)) jdist = cp.MvNormal(mu, std_dev) QoI1 = examples.Paraboloid2D(systemsize, (theta, )) QoI2 = examples.PolyRVDV() QoI_dict = { 'paraboloid2': { 'QoI_func': QoI1.eval_QoI, 'output_dimensions': 1, }, 'PolyRVDV': { 'QoI_func': QoI2.eval_QoI, 'output_dimensions': 1, } } sc_obj = StochasticCollocation2(jdist, 3, 'MvNormal', QoI_dict) sc_obj.evaluateQoIs(jdist) mu_js = sc_obj.mean(of=['paraboloid2', 'PolyRVDV']) # Compare against known values # 1. Paraboloid2D, we use nested loops mu_j1_analytical = QoI1.eval_QoI_analyticalmean(mu, cp.Cov(jdist)) err = abs( (mu_js['paraboloid2'][0] - mu_j1_analytical) / mu_j1_analytical) self.assertTrue(err < 1.e-15)
def test_nonrv_derivatives(self): # This test checks the analytical derivative w.r.t complex step systemsize = 2 n_parameters = 2 mu = mean_2dim # np.random.randn(systemsize) std_dev = std_dev_2dim # np.diag(np.random.rand(systemsize)) jdist = cp.MvNormal(mu, std_dev) QoI = examples.PolyRVDV(data_type=complex) # Create the Stochastic Collocation object deriv_dict = { 'dv': { 'dQoI_func': QoI.eval_QoIGradient_dv, 'output_dimensions': n_parameters } } QoI_dict = { 'PolyRVDV': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, 'deriv_dict': deriv_dict } } dv = np.random.randn(systemsize) + 0j QoI.set_dv(dv) sc_obj = StochasticCollocation2(jdist, 3, 'MvNormal', QoI_dict, include_derivs=True, data_type=complex) sc_obj.evaluateQoIs(jdist, include_derivs=True) dmu_j = sc_obj.dmean(of=['PolyRVDV'], wrt=['dv']) dvar_j = sc_obj.dvariance(of=['PolyRVDV'], wrt=['dv']) dstd_dev = sc_obj.dStdDev(of=['PolyRVDV'], wrt=['dv']) # Lets do complex step pert = complex(0, 1e-30) dmu_j_complex = np.zeros(n_parameters, dtype=complex) dvar_j_complex = np.zeros(n_parameters, dtype=complex) dstd_dev_complex = np.zeros(n_parameters, dtype=complex) for i in range(0, n_parameters): dv[i] += pert QoI.set_dv(dv) sc_obj.evaluateQoIs(jdist, include_derivs=False) mu_j = sc_obj.mean(of=['PolyRVDV']) var_j = sc_obj.variance(of=['PolyRVDV']) std_dev_j = np.sqrt(var_j['PolyRVDV'][0, 0]) dmu_j_complex[i] = mu_j['PolyRVDV'].imag / pert.imag dvar_j_complex[i] = var_j['PolyRVDV'].imag / pert.imag dstd_dev_complex[i] = std_dev_j.imag / pert.imag dv[i] -= pert err1 = dmu_j['PolyRVDV']['dv'] - dmu_j_complex self.assertTrue((err1 < 1.e-13).all()) err2 = dvar_j['PolyRVDV']['dv'] - dvar_j_complex self.assertTrue((err2 < 1.e-13).all()) err3 = dstd_dev['PolyRVDV']['dv'] - dstd_dev_complex self.assertTrue((err2 < 1.e-13).all())
def run_hadamard(systemsize, eigen_decayrate, std_dev, n_eigenmodes): n_collocation_pts = 2 # Create Hadmard Quadratic object QoI = examples.HadamardQuadratic(systemsize, eigen_decayrate) # # Create stochastic collocation object # collocation = StochasticCollocation(n_collocation_pts, "Normal") # Initialize chaospy distribution x = np.random.rand(QoI.systemsize) jdist = cp.MvNormal(x, np.diag(std_dev)) threshold_factor = 0.5 dominant_space = DimensionReduction(threshold_factor=threshold_factor, exact_Hessian=False, n_arnoldi_sample=71, min_eigen_accuracy=1.e-2) dominant_space.getDominantDirections(QoI, jdist, max_eigenmodes=n_eigenmodes) # if systemsize == 64: # print('x = \n', repr(x)) # print('std_dev = \n', repr(std_dev)) # print('iso_eigenvals = ', dominant_space.iso_eigenvals) # print("dominant_indices = ", dominant_space.dominant_indices) # Collocate # collocation = StochasticCollocation(n_collocation_pts, "Normal") # mu_j = collocation.normal.reduced_mean(QoI.eval_QoI, jdist, dominant_space) # print "mu_j = ", mu_j QoI_dict = { 'Hadamard': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, }, } sc_obj = StochasticCollocation2(jdist, n_collocation_pts, 'MvNormal', QoI_dict, include_derivs=False, reduced_collocation=True, dominant_dir=dominant_space.dominant_dir) sc_obj.evaluateQoIs(jdist) mu_j_dict = sc_obj.mean(of=['Hadamard']) mu_j = mu_j_dict['Hadamard'] # Evaluate the analytical value of the Hadamard Quadratic covariance = cp.Cov(jdist) mu_analytic = QoI.eval_analytical_QoI_mean(x, covariance) # print "mu_analytic = ", mu_analytic relative_error = np.linalg.norm((mu_j - mu_analytic) / mu_analytic) # print "relative_error = ", relative_error return relative_error
def test_reduced_montecarlo(self): systemsize = 4 eigen_decayrate = 2.0 # Create Hadmard Quadratic object QoI = HadamardQuadratic(systemsize, eigen_decayrate) true_eigenvals = np.array([0.08, 0.02, 0.005, 0.00888888888888889]) true_eigenvecs = np.array([[0.5, 0.5, -0.5, -0.5], [0.5, -0.5, 0.5, -0.5], [0.5, 0.5, 0.5, 0.5], [0.5, -0.5, -0.5, 0.5]]) dominant_dir = true_eigenvecs[:, 0:3] QoI_dict = { 'hadamard4_2': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, } } # Create the distribution mu = np.ones(systemsize) std_dev = 0.2 * np.eye(systemsize) jdist = cp.MvNormal(mu, std_dev) # Create the Monte Carlo object nsample = 100000 mc_obj = MonteCarlo(nsample, jdist, QoI_dict, reduced_collocation=True, dominant_dir=dominant_dir, include_derivs=False) mc_obj.getSamples(jdist, include_derivs=False) mu_j_mc = mc_obj.mean(jdist, of=['hadamard4_2']) # Compare against a reduced stochastic collocation object sc_obj = StochasticCollocation2(jdist, 3, 'MvNormal', QoI_dict, reduced_collocation=True, dominant_dir=dominant_dir) sc_obj.evaluateQoIs(jdist) mu_j_sc = sc_obj.mean(of=['hadamard4_2']) rel_err = abs((mu_j_mc['hadamard4_2'] - mu_j_sc['hadamard4_2']) / mu_j_sc['hadamard4_2']) self.assertTrue(rel_err[0] < 1.e-3)
def test_reduced_normalStochasticCollocation3D(self): # This is not a very good test because we are comparing the reduced collocation # against the analytical expected value. The only hting it tells us is that # the solution is within the ball park of actual value. We still need to # come up with a better test. systemsize = 3 mu = mean_3dim # np.random.randn(systemsize) std_dev = std_dev_3dim # abs(np.diag(np.random.randn(systemsize))) jdist = cp.MvNormal(mu, std_dev) # Create QoI Object QoI = examples.Paraboloid3D(systemsize) dominant_dir = np.array([[1.0, 0.0], [0.0, 1.0], [0.0, 0.0]], dtype=np.float) # Create the Stochastic Collocation object deriv_dict = { 'xi': { 'dQoI_func': QoI.eval_QoIGradient, 'output_dimensions': systemsize } } QoI_dict = { 'paraboloid': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, 'deriv_dict': deriv_dict } } sc_obj = StochasticCollocation2(jdist, 3, 'MvNormal', QoI_dict, reduced_collocation=True, dominant_dir=dominant_dir) sc_obj.evaluateQoIs(jdist) mu_js = sc_obj.mean(of=['paraboloid']) var_js = sc_obj.variance(of=['paraboloid']) # Analytical mean mu_j_analytical = QoI.eval_QoI_analyticalmean(mu, cp.Cov(jdist)) err = abs((mu_js['paraboloid'][0] - mu_j_analytical) / mu_j_analytical) self.assertTrue(err < 1e-1) # Analytical variance var_j_analytical = QoI.eval_QoI_analyticalvariance(mu, cp.Cov(jdist)) err = abs( (var_js['paraboloid'][0, 0] - var_j_analytical) / var_j_analytical) self.assertTrue(err < 0.01)
def test_normalStochasticCollocation3D(self): systemsize = 3 mu = mean_3dim # np.random.randn(systemsize) std_dev = std_dev_3dim # np.diag(np.random.rand(systemsize)) jdist = cp.MvNormal(mu, std_dev) # Create QoI Object QoI = examples.Paraboloid3D(systemsize) # Create the Stochastic Collocation object deriv_dict = { 'xi': { 'dQoI_func': QoI.eval_QoIGradient, 'output_dimensions': systemsize } } QoI_dict = { 'paraboloid': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, 'deriv_dict': deriv_dict } } sc_obj = StochasticCollocation2(jdist, 3, 'MvNormal', QoI_dict) sc_obj.evaluateQoIs(jdist) mu_js = sc_obj.mean(of=['paraboloid']) var_js = sc_obj.variance(of=['paraboloid']) # Analytical mean mu_j_analytical = QoI.eval_QoI_analyticalmean(mu, cp.Cov(jdist)) err = abs((mu_js['paraboloid'][0] - mu_j_analytical) / mu_j_analytical) self.assertTrue(err < 1.e-15) # Analytical variance var_j_analytical = QoI.eval_QoI_analyticalvariance(mu, cp.Cov(jdist)) err = abs( (var_js['paraboloid'][0, 0] - var_j_analytical) / var_j_analytical) self.assertTrue(err < 1.e-15)
def test_derivatives_scalarQoI(self): systemsize = 3 mu = mean_3dim # np.random.randn(systemsize) std_dev = std_dev_3dim # np.diag(np.random.rand(systemsize)) jdist = cp.MvNormal(mu, std_dev) # Create QoI Object QoI = examples.Paraboloid3D(systemsize) # Create the Stochastic Collocation object deriv_dict = { 'xi': { 'dQoI_func': QoI.eval_QoIGradient, 'output_dimensions': systemsize } } QoI_dict = { 'paraboloid': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, 'deriv_dict': deriv_dict } } sc_obj = StochasticCollocation2(jdist, 3, 'MvNormal', QoI_dict, include_derivs=True) sc_obj.evaluateQoIs(jdist, include_derivs=True) dmu_j = sc_obj.dmean(of=['paraboloid'], wrt=['xi']) # dvar_j = sc_obj.dvariance(of=['paraboloid'], wrt=['xi']) # Analytical dmu_j dmu_j_analytical = np.array([100 * mu[0], 50 * mu[1], 2 * mu[2]]) err = abs( (dmu_j['paraboloid']['xi'] - dmu_j_analytical) / dmu_j_analytical) self.assertTrue((err < 1.e-12).all())
def test_nonrv_derivatives_reduced_collocation(self): # This test checks the analytical derivative w.r.t complex step systemsize = 2 n_parameters = 2 mu = mean_2dim # np.random.randn(systemsize) std_dev = std_dev_2dim # abs(np.diag(np.random.randn(systemsize))) jdist = cp.MvNormal(mu, std_dev) QoI = examples.PolyRVDV(data_type=complex) # Create the Stochastic Collocation object deriv_dict = { 'dv': { 'dQoI_func': QoI.eval_QoIGradient_dv, 'output_dimensions': n_parameters } } QoI_dict = { 'PolyRVDV': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, 'deriv_dict': deriv_dict } } dv = np.random.randn(systemsize) + 0j QoI.set_dv(dv) # Create dimension reduction object threshold_factor = 0.9 dominant_space = DimensionReduction(threshold_factor=threshold_factor, exact_Hessian=True) # Get the eigenmodes of the Hessian product and the dominant indices dominant_space.getDominantDirections(QoI, jdist) dominant_dir = dominant_space.iso_eigenvecs[:, dominant_space. dominant_indices] sc_obj = StochasticCollocation2(jdist, 4, 'MvNormal', QoI_dict, include_derivs=True, reduced_collocation=True, dominant_dir=dominant_dir, data_type=complex) sc_obj.evaluateQoIs(jdist, include_derivs=True) dmu_j = sc_obj.dmean(of=['PolyRVDV'], wrt=['dv']) dvar_j = sc_obj.dvariance(of=['PolyRVDV'], wrt=['dv']) dstd_dev = sc_obj.dStdDev(of=['PolyRVDV'], wrt=['dv']) # Lets do complex step pert = complex(0, 1e-30) dmu_j_complex = np.zeros(n_parameters, dtype=complex) dvar_j_complex = np.zeros(n_parameters, dtype=complex) dstd_dev_complex = np.zeros(n_parameters, dtype=complex) for i in range(0, n_parameters): dv[i] += pert QoI.set_dv(dv) sc_obj.evaluateQoIs(jdist, include_derivs=False) mu_j = sc_obj.mean(of=['PolyRVDV']) var_j = sc_obj.variance(of=['PolyRVDV']) std_dev_j = np.sqrt(var_j['PolyRVDV'][0, 0]) dmu_j_complex[i] = mu_j['PolyRVDV'].imag / pert.imag dvar_j_complex[i] = var_j['PolyRVDV'].imag / pert.imag dstd_dev_complex[i] = std_dev_j.imag / pert.imag dv[i] -= pert err1 = dmu_j['PolyRVDV']['dv'] - dmu_j_complex self.assertTrue((err1 < 1.e-13).all()) err2 = dvar_j['PolyRVDV']['dv'] - dvar_j_complex self.assertTrue((err2 < 1.e-10).all()) err3 = dstd_dev['PolyRVDV']['dv'] - dstd_dev_complex self.assertTrue((err2 < 1.e-13).all())
sample_radius=1.e-2) dominant_space.getDominantDirections(UQObj.QoI, UQObj.jdist, max_eigenmodes=4) # # Full collocation # sc_obj = StochasticCollocation2(UQObj.jdist, 3, 'MvNormal', UQObj.QoI_dict, # include_derivs=False) # sc_obj.evaluateQoIs(UQObj.jdist, include_derivs=False) # REduced collocation dominant_dir = dominant_space.iso_eigenvecs[:, dominant_space. dominant_indices] sc_obj = StochasticCollocation2(UQObj.jdist, 3, 'MvNormal', UQObj.QoI_dict, include_derivs=False, reduced_collocation=True, dominant_dir=dominant_dir) sc_obj.evaluateQoIs(UQObj.jdist, include_derivs=False) # print('fvals = ') # print(sc_obj.QoI_dict['fuelburn']['fvals']) # Print initial value init_mu_j = sc_obj.mean(of=['fuelburn']) init_var_j = sc_obj.variance(of=['fuelburn']) # print("Mean fuelburn = ", init_mu_j['fuelburn'][0]) # print("Variance fuelburn = ", init_var_j['fuelburn'][0]) # print() mfmc_sol = { 'twist_cp': np.array([-1.59, 0.34, 4.50]),
def run_hadamard(systemsize, eigen_decayrate, std_dev, n_eigenmodes): n_collocation_pts = 3 # Create Hadmard Quadratic object QoI = examples.HadamardQuadratic(systemsize, eigen_decayrate) # Initialize chaospy distribution x = np.random.rand(QoI.systemsize) jdist = cp.MvNormal(x, np.diag(std_dev)) threshold_factor = 1.0 use_exact_Hessian = False if use_exact_Hessian: dominant_space = DimensionReduction(threshold_factor=threshold_factor, exact_Hessian=True) dominant_space.getDominantDirections(QoI, jdist) dominant_dir = dominant_space.iso_eigenvecs[:, 0:n_eigenmodes] else: dominant_space = DimensionReduction(threshold_factor=threshold_factor, exact_Hessian=False, n_arnoldi_sample=71, min_eigen_accuracy=1.e-2) dominant_space.getDominantDirections(QoI, jdist, max_eigenmodes=n_eigenmodes) dominant_dir = dominant_space.dominant_dir # print "dominant_indices = ", dominant_space.dominant_indices # Create stochastic collocation object # collocation = StochasticCollocation(n_collocation_pts, "Normal") QoI_dict = { 'Hadamard': { 'QoI_func': QoI.eval_QoI, 'output_dimensions': 1, }, } sc_obj = StochasticCollocation2(jdist, n_collocation_pts, 'MvNormal', QoI_dict, include_derivs=False, reduced_collocation=True, dominant_dir=dominant_dir) sc_obj.evaluateQoIs(jdist) # Collocate # mu_j = collocation.normal.reduced_mean(QoI, jdist, dominant_space) mu_j = sc_obj.mean(of=['Hadamard']) var_j = sc_obj.variance(of=['Hadamard']) std_dev_j = np.sqrt(var_j['Hadamard']) # print "mu_j = ", mu_j # Evaluate the analytical value of the Hadamard Quadratic covariance = cp.Cov(jdist) mu_analytic = QoI.eval_analytical_QoI_mean(x, covariance) var_analytic = QoI.eval_analytical_QoI_variance(x, covariance) std_dev_analytic = np.sqrt(var_analytic) # print "mu_analytic = ", mu_analytic relative_error_mu = np.linalg.norm( (mu_j['Hadamard'] - mu_analytic) / mu_analytic) relative_err_var = np.linalg.norm( (var_j['Hadamard'] - var_analytic) / var_analytic) relative_err_std_dev = np.linalg.norm( (std_dev_j - std_dev_analytic) / std_dev_analytic) # print "relative_error = ", relative_error return relative_error_mu, relative_err_var, relative_err_std_dev
n_dominant_dir = int(sys.argv[2]) # 11 dominant_dir = eigenvecs[:, 0:n_dominant_dir] use_surrogate_qoi_obj = False if use_surrogate_qoi_obj: surrogate_QoI_dict = { 'time_duration': { 'QoI_func': surrogate_QoI.eval_QoI, 'output_dimensions': 1, } } # Create the stochastic collocation object sc_obj = StochasticCollocation2(jdist, 3, 'MvNormal', surrogate_QoI_dict, reduced_collocation=True, dominant_dir=dominant_dir, include_derivs=False) sc_obj.evaluateQoIs(jdist) else: # Create the stochastic collocation object sc_obj = StochasticCollocation2(jdist, 2, 'MvNormal', QoI_dict, reduced_collocation=True, dominant_dir=dominant_dir, include_derivs=False) sc_obj.evaluateQoIs(jdist)
}, } # Check if plugging back value also yields the expected results QoI.p['oas_scaneagle.wing.thickness_cp'] = np.array([0.008, 0.008, 0.008]) QoI.p['oas_scaneagle.wing.twist_cp'] = np.array([2.5, 2.5, 5.]) QoI.p['oas_scaneagle.wing.sweep'] = 20.0 QoI.p['oas_scaneagle.alpha'] = 5.0 sample_radii = [1.e-1, 1.e-2, 1.e-3, 1.e-4, 1.e-5, 1.e-6] if sys.argv[1] == 'full': # Create the stochastic collocation object sc_obj_full = StochasticCollocation2(jdist, 3, 'MvNormal', QoI_dict, include_derivs=False, reduced_collocation=False) start_time = time.time() sc_obj_full.evaluateQoIs(jdist, include_derivs=False) t_sampling = time.time() mu_j_full = sc_obj_full.mean(of=['fuelburn']) time_elapsed_mu = time.time() - t_sampling t_before_var = time.time() var_j_full = sc_obj_full.variance(of=['fuelburn']) time_elapsed_var = time.time() - t_before_var print('mu_j_full = ', mu_j_full['fuelburn'][0]) print('var_j_full = ', var_j_full['fuelburn'][0, 0])
n_thickness_intersects = UQObj.QoI.p[ 'oas_scaneagle.AS_point_0.wing_perf.thickness_intersects'].size n_CM = 3 print("eigenvals = ", UQObj.dominant_space.iso_eigenvals) print('eigenvecs = \n', UQObj.dominant_space.iso_eigenvecs) print('dominant_dir = \n', UQObj.dominant_space.dominant_dir) print('\n#-----------------------------------------------------------#') # Full collocation use_stochastic_collocation = False use_monte_carlo = True if use_stochastic_collocation: colloc_obj = StochasticCollocation2(UQObj.jdist, 3, 'MvNormal', UQObj.QoI_dict, include_derivs=False) red_colloc_obj = StochasticCollocation2( UQObj.jdist, 3, 'MvNormal', UQObj.QoI_dict, include_derivs=False, reduced_collocation=True, # dominant_dir=custom_eigenvec[:,0:int(sys.argv[1])]) dominant_dir=UQObj.dominant_space.dominant_dir) elif use_monte_carlo: nsample = 100 colloc_obj = MonteCarlo(nsample, UQObj.jdist,