def solve_nonlinear(self, params, unknowns, resids): power = params['power'] method_dict = params['method_dict'] dist = method_dict['distribution'] rule = method_dict['rule'] n = len(power) if rule != 'rectangle': points, weights = cp.generate_quadrature(order=n - 1, domain=dist, rule=rule) # else: # points, weights = quadrature_rules.rectangle(n, method_dict['distribution']) poly = cp.orth_chol(n - 1, dist) # poly = cp.orth_bert(n-1, dist) # double check this is giving me good orthogonal polynomials. # print poly, '\n' p2 = cp.outer(poly, poly) # print 'chol', cp.E(p2, dist) norms = np.diagonal(cp.E(p2, dist)) print 'diag', norms expansion, coeff = cp.fit_quadrature(poly, points, weights, power, retall=True, norms=norms) # expansion, coeff = cp.fit_quadrature(poly, points, weights, power, retall=True) mean = cp.E(expansion, dist) print 'mean cp.E =', mean # mean = sum(power*weights) print 'mean sum =', sum(power * weights) print 'mean coeff =', coeff[0] std = cp.Std(expansion, dist) print mean print std print np.sqrt(np.sum(coeff[1:]**2 * cp.E(poly**2, dist)[1:])) # std = np.sqrt(np.sum(coeff[1:]**2 * cp.E(poly**2, dist)[1:])) # number of hours in a year hours = 8760.0 # promote statistics to class attribute unknowns['mean'] = mean * hours unknowns['std'] = std * hours print 'In ChaospyStatistics'
def test_orthogonals(): dist = cp.Iid(cp.Normal(), dim) cp.orth_gs(order, dist) cp.orth_ttr(order, dist) cp.orth_chol(order, dist)
print("\nFirst Order Indices") print(pd.DataFrame(Sensitivities,columns=['Smc','Spc','Sa'],index=row_labels).round(3)) # print("\nRelative errors") # rel_errors=np.column_stack(((S_mc - s**2)/s**2,(S_pc - s**2)/s**2)) # print(pd.DataFrame(rel_errors,columns=['Error Smc','Error Spc'],index=row_labels).round(3)) # Polychaos convergence Npc_list = np.logspace(1, 3, 10).astype(int) error = [] for i, Npc in enumerate(Npc_list): Zpc = jpdf.sample(Npc) Ypc = linear_model(w, Zpc.T) Npol = 4 poly = cp.orth_chol(Npol, jpdf) approx = cp.fit_regression(poly, Zpc, Ypc, rule="T") s_pc = cp.Sens_m(approx, jpdf) error.append(LA.norm((s_pc - s**2)/s**2)) plt.figure() plt.semilogy(Npc_list, error) _=plt.xlabel('Nr Z') _=plt.ylabel('L2-norm of error in Sobol indices') # # Scatter plots of data, z-slices, and linear model fig=plt.figure() Ndz = 10 # Number of slices of the Z-axes Zslice = np.zeros((Nrv, Ndz)) # array for mean-values in the slices
# example orthogonalization schemes # a normal random variable n = cp.Normal(0, 1) x = np.linspace(0,1, 50) # the polynomial order of the orthogonal polynomials polynomial_order = 3 poly = cp.orth_bert(polynomial_order, n, normed=True) print('Bertran recursion {}'.format(poly)) ax = plt.subplot(221) ax.set_title('Bertran recursion') _=plt.plot(x, poly(x).T) _=plt.xticks([]) poly = cp.orth_chol(polynomial_order, n, normed=True) print('Cholesky decomposition {}'.format(poly)) ax = plt.subplot(222) ax.set_title('Cholesky decomposition') _=plt.plot(x, poly(x).T) _=plt.xticks([]) poly = cp.orth_ttr(polynomial_order, n, normed=True) print('Discretized Stieltjes / Thre terms reccursion {}'.format(poly)) ax = plt.subplot(223) ax.set_title('Discretized Stieltjes ') _=plt.plot(x, poly(x).T) poly = cp.orth_gs(polynomial_order, n, normed=True) print('Modified Gram-Schmidt {}'.format(poly)) ax = plt.subplot(224)
def test_orth_chol(): dist = cp.Normal(0, 1) orth1 = cp.orth_ttr(5, dist, normed=True) orth2 = cp.orth_chol(5, dist, normed=True) eps = cp.sum((orth1-orth2)**2) assert np.allclose(eps(np.linspace(-100, 100, 5)), 0)