def test_orth_ttr(): dist = cp.Normal(0, 1) orth = cp.orth_ttr(5, dist) outer = cp.outer(orth, orth) Cov1 = cp.E(outer, dist) Diatoric = Cov1 - np.diag(np.diag(Cov1)) assert np.allclose(Diatoric, 0) Cov2 = cp.Cov(orth[1:], dist) assert np.allclose(Cov1[1:,1:], Cov2)
def solve_nonlinear(self, params, unknowns, resids): power = params['power'] method_dict = params['method_dict'] dist = method_dict['distribution'] rule = method_dict['rule'] n = len(power) if rule != 'rectangle': points, weights = cp.generate_quadrature(order=n - 1, domain=dist, rule=rule) # else: # points, weights = quadrature_rules.rectangle(n, method_dict['distribution']) poly = cp.orth_chol(n - 1, dist) # poly = cp.orth_bert(n-1, dist) # double check this is giving me good orthogonal polynomials. # print poly, '\n' p2 = cp.outer(poly, poly) # print 'chol', cp.E(p2, dist) norms = np.diagonal(cp.E(p2, dist)) print 'diag', norms expansion, coeff = cp.fit_quadrature(poly, points, weights, power, retall=True, norms=norms) # expansion, coeff = cp.fit_quadrature(poly, points, weights, power, retall=True) mean = cp.E(expansion, dist) print 'mean cp.E =', mean # mean = sum(power*weights) print 'mean sum =', sum(power * weights) print 'mean coeff =', coeff[0] std = cp.Std(expansion, dist) print mean print std print np.sqrt(np.sum(coeff[1:]**2 * cp.E(poly**2, dist)[1:])) # std = np.sqrt(np.sum(coeff[1:]**2 * cp.E(poly**2, dist)[1:])) # number of hours in a year hours = 8760.0 # promote statistics to class attribute unknowns['mean'] = mean * hours unknowns['std'] = std * hours print 'In ChaospyStatistics'
def galerkin_approx(coordinates, joint, expansion_small, norms_small): alpha, beta = chaospy.variable(2) e_alpha_phi = chaospy.E(alpha*expansion_small, joint) initial_condition = e_alpha_phi/norms_small phi_phi = chaospy.outer(expansion_small, expansion_small) e_beta_phi_phi = chaospy.E(beta*phi_phi, joint) def right_hand_side(c, t): return -numpy.sum(c*e_beta_phi_phi, -1)/norms_small coefficients = odeint( func=right_hand_side, y0=initial_condition, t=coordinates, ) return chaospy.sum(expansion_small*coefficients, -1)
import chaospy as cp import numpy as np import odespy #Intrusive Galerkin method dist_a = cp.Uniform(0, 0.1) dist_I = cp.Uniform(8, 10) dist = cp.J(dist_a, dist_I) # joint multivariate dist P, norms = cp.orth_ttr(2, dist, retall=True) variable_a, variable_I = cp.variable(2) PP = cp.outer(P, P) E_aPP = cp.E(variable_a*PP, dist) E_IP = cp.E(variable_I*P, dist) def right_hand_side(c, x): # c' = right_hand_side(c, x) return -np.dot(E_aPP, c)/norms # -M*c initial_condition = E_IP/norms solver = odespy.RK4(right_hand_side) solver.set_initial_condition(initial_condition) x = np.linspace(0, 10, 1000) c = solver.solve(x)[0] u_hat = cp.dot(P, c) #Rosenblat transformation using point collocation
import chaospy as cp import numpy as np import odespy #Intrusive Galerkin method dist_a = cp.Uniform(0, 0.1) dist_I = cp.Uniform(8, 10) dist = cp.J(dist_a, dist_I) # joint multivariate dist P, norms = cp.orth_ttr(2, dist, retall=True) variable_a, variable_I = cp.variable(2) PP = cp.outer(P, P) E_aPP = cp.E(variable_a * PP, dist) E_IP = cp.E(variable_I * P, dist) def right_hand_side(c, x): # c' = right_hand_side(c, x) return -np.dot(E_aPP, c) / norms # -M*c initial_condition = E_IP / norms solver = odespy.RK4(right_hand_side) solver.set_initial_condition(initial_condition) x = np.linspace(0, 10, 1000) c = solver.solve(x)[0] u_hat = cp.dot(P, c) #Rosenblat transformation using point collocation
def test_approx_expansion(expansion_approx, expansion_small, distribution): outer1 = chaospy.E(chaospy.outer(expansion_small, expansion_small), distribution) outer2 = chaospy.E(chaospy.outer(expansion_approx, expansion_approx), distribution) assert numpy.allclose(outer1, outer2, rtol=1e-12)
def test_orthogonality_large(expansion_large, distribution): outer = chaospy.E(chaospy.outer(expansion_large, expansion_large), distribution) assert numpy.allclose(outer, numpy.eye(len(outer)), rtol=1e-4)
def test_orthogonality_small(expansion_small, distribution): outer = chaospy.E(chaospy.outer(expansion_small, expansion_small), distribution) assert numpy.allclose(outer, numpy.eye(len(outer)), rtol=1e-8)
# u' = -a*u # d/dx sum(c*P) = -a*sum(c*P) # <d/dx sum(c*P),P[k]> = <-a*sum(c*P), P[k]> # d/dx c[k]*<P[k],P[k]> = -sum(c*<a*P,P[k]>) # d/dx c = -E( outer(a*P,P) ) / E( P*P ) # # u(0) = I # <sum(c(0)*P), P[k]> = <I, P[k]> # c[k](0) <P[k],P[k]> = <I, P[k]> # c(0) = E( I*P ) / E( P*P ) order = 5 P, norm = cp.orth_ttr(order, dist, retall=True, normed=True) # support structures q0, q1 = cp.variable(2) P_nk = cp.outer(P, P) E_ank = cp.E(q0*P_nk, dist) E_ik = cp.E(q1*P, dist) sE_ank = cp.sum(E_ank, 0) # Right hand side of the ODE def f(c_k, x): return -cp.sum(c_k*E_ank, -1)/norm solver = odespy.RK4(f) c_0 = E_ik/norm solver.set_initial_condition(c_0) c_n, x = solver.solve(x) U_hat = cp.sum(P*c_n, -1) mean = cp.E(U_hat, dist)
# u' = -a*u # d/dx sum(c*P) = -a*sum(c*P) # <d/dx sum(c*P),P[k]> = <-a*sum(c*P), P[k]> # d/dx c[k]*<P[k],P[k]> = -sum(c*<a*P,P[k]>) # d/dx c = -E( outer(a*P,P) ) / E( P*P ) # # u(0) = I # <sum(c(0)*P), P[k]> = <I, P[k]> # c[k](0) <P[k],P[k]> = <I, P[k]> # c(0) = E( I*P ) / E( P*P ) order = 5 P, norm = cp.orth_ttr(order, dist, retall=True, normed=True) # support structures q0, q1 = cp.variable(2) P_nk = cp.outer(P, P) E_ank = cp.E(q0 * P_nk, dist) E_ik = cp.E(q1 * P, dist) sE_ank = cp.sum(E_ank, 0) # Right hand side of the ODE def f(c_k, x): return -cp.sum(c_k * E_ank, -1) / norm solver = odespy.RK4(f) c_0 = E_ik / norm solver.set_initial_condition(c_0) c_n, x = solver.solve(x) U_hat = cp.sum(P * c_n, -1)