def Spearman(poly, dist, sample=1e4, retall=False, **kws): """ Calculate Spearman's rank-order correlation coefficient Parameters ---------- poly : Poly Polynomial of interest. dist : Dist Defines the space where correlation is taken. sample : int Number of samples used in estimation. retall : bool If true, return p-value as well. **kws : optional Extra keywords passed to dist.sample. Returns ------- rho[, p-value] rho : float or ndarray Correlation output. Of type float if two-dimensional problem. Correleation matrix if larger. p-value : float or ndarray The two-sided p-value for a hypothesis test whose null hypothesis is that two sets of data are uncorrelated, has same dimension as rho. """ samples = dist.sample(sample, **kws) poly = po.flatten(poly) Y = poly(*samples) if retall: return spearmanr(Y.T) return spearmanr(Y.T)[0]
def E_cond(poly, freeze, dist, **kws): assert not dist.dependent() if poly.dim<len(dist): poly = po.setdim(poly, len(dist)) freeze = po.Poly(freeze) freeze = po.setdim(freeze, len(dist)) keys = freeze.A.keys() if len(keys)==1 and keys[0]==(0,)*len(dist): freeze = freeze.A.values()[0] else: freeze = np.array(keys) freeze = freeze.reshape(freeze.size/len(dist), len(dist)) shape = poly.shape poly = po.flatten(poly) kmax = np.max(poly.keys, 0)+1 keys = [i for i in np.ndindex(*kmax)] vals = dist.mom(np.array(keys).T, **kws).T mom = dict(zip(keys, vals)) A = poly.A.copy() keys = A.keys() out = {} zeros = [0]*poly.dim for i in xrange(len(keys)): key = list(keys[i]) a = A[tuple(key)] for d in xrange(poly.dim): for j in xrange(len(freeze)): if freeze[j,d]: key[d], zeros[d] = zeros[d], key[d] break tmp = a*mom[tuple(key)] if tuple(zeros) in out: out[tuple(zeros)] = out[tuple(zeros)] + tmp else: out[tuple(zeros)] = tmp for d in xrange(poly.dim): for j in xrange(len(freeze)): if freeze[j,d]: key[d], zeros[d] = zeros[d], key[d] break out = po.Poly(out, poly.dim, poly.shape, float) out = po.reshape(out, shape) return out
def Skew(poly, dist=None, **kws): """ Skewness, or element by element 3rd order statistics of a distribution or polynomial. Parameters ---------- poly : Poly, Dist Input to take skewness on. dist : Dist Defines the space the skewness is taken on. It is ignored if `poly` is a distribution. **kws : optional Extra keywords passed to dist.mom. Returns ------- skewness : ndarray Element for element variance along `poly`, where `skewness.shape==poly.shape`. See Also -------- Corr Correlation matrix Cov Covariance matrix E Expected value Kurt Kurtosis operator Var Variance operator Examples -------- >>> x = cp.variable() >>> Z = cp.Gamma() >>> print cp.Skew(Z) 2.0 """ if isinstance(poly, di.Dist): x = po.variable(len(poly)) poly, dist = x, poly else: poly = po.Poly(poly) if poly.dim<len(dist): po.setdim(poly, len(dist)) shape = poly.shape poly = po.flatten(poly) m1 = E(poly, dist) m2 = E(poly**2, dist) m3 = E(poly**3, dist) out = (m3-3*m2*m1+2*m1**3)/(m2-m1**2)**1.5 out = np.reshape(out, shape) return out
def orth_ttr(order, dist, normed=False, sort="GR", retall=False, **kws): """ Create orthogonal polynomial expansion from three terms recursion formula. Parameters ---------- order : int Order of polynomial expansion dist : Dist Distribution space where polynomials are orthogonal If dist.ttr exists, it will be used, othervice Clenshaw-Curtis integration will be used. Must be stochastically independent. normed : bool If True orthonormal polynomials will be used instead of monic. sort : str Polynomial sorting. Same as in basis retall : bool If true return norms as well. kws : optional Keyword argument passed to stieltjes method. Returns ------- orth[, norms] orth : Poly Orthogonal polynomial expansion norms : np.ndarray Norms of the orthogonal expansion on the form E(orth**2, dist) Calculated using recurrence coefficients for stability. Examples -------- >>> Z = cp.Normal() >>> print cp.orth_ttr(4, Z) [1.0, q0, q0^2-1.0, q0^3-3.0q0, -6.0q0^2+3.0+q0^4] """ P, norms, A, B = qu.stieltjes(dist, order, retall=True, **kws) if normed: for i in xrange(len(P)): P[i] = P[i]/np.sqrt(norms[:,i]) norms = norms**0 dim = len(dist) if dim>1: Q, G = [], [] indices = ber.bindex(0,order,dim,sort) for I in indices: q = [P[I[i]][i] for i in xrange(dim)] q = reduce(lambda x,y: x*y, q) Q.append(q) if retall: for I in indices: g = [norms[i,I[i]] for i in xrange(dim)] G.append(np.prod(g)) P = Q else: G = norms[0] P = po.flatten(po.Poly(P)) if retall: return P, np.array(G) return P
def orth_ttr(order, dist, normed=False, sort="GR", retall=False, **kws): """ Create orthogonal polynomial expansion from three terms recursion formula. Parameters ---------- order : int Order of polynomial expansion dist : Dist Distribution space where polynomials are orthogonal If dist.ttr exists, it will be used, othervice Clenshaw-Curtis integration will be used. Must be stochastically independent. normed : bool If True orthonormal polynomials will be used instead of monic. sort : str Polynomial sorting. Same as in basis retall : bool If true return norms as well. kws : optional Keyword argument passed to stieltjes method. Returns ------- orth[, norms] orth : Poly Orthogonal polynomial expansion norms : np.ndarray Norms of the orthogonal expansion on the form E(orth**2, dist) Calculated using recurrence coefficients for stability. Examples -------- >>> Z = cp.Normal() >>> print cp.orth_ttr(4, Z) [1.0, q0, q0^2-1.0, q0^3-3.0q0, -6.0q0^2+3.0+q0^4] """ P, norms, A, B = qu.stieltjes(dist, order, retall=True, **kws) if normed: for i in xrange(len(P)): P[i] = P[i] / np.sqrt(norms[:, i]) norms = norms**0 dim = len(dist) if dim > 1: Q, G = [], [] indices = ber.bindex(0, order, dim, sort) for I in indices: q = [P[I[i]][i] for i in xrange(dim)] q = reduce(lambda x, y: x * y, q) Q.append(q) if retall: for I in indices: g = [norms[i, I[i]] for i in xrange(dim)] G.append(np.prod(g)) P = Q else: G = norms[0] P = po.flatten(po.Poly(P)) if retall: return P, np.array(G) return P
def Perc(poly, q, dist, sample=1e4, **kws): """ Percentile function Parameters ---------- poly : Poly Polynomial of interest. q : array_like positions where percentiles are taken. Must be a number or an array, where all values are on the interval `[0,100]`. dist : Dist Defines the space where percentile is taken. sample : int Number of samples used in estimation. **kws : optional Extra keywords passed to dist.sample. Returns ------- Q : ndarray Percentiles of `poly` with `Q.shape=poly.shape+q.shape`. Examples -------- >>> cp.seed(1000) >>> x,y = cp.variable(2) >>> poly = cp.Poly([x, x*y]) >>> Z = cp.J(cp.Uniform(3,6), cp.Normal()) >>> print cp.Perc(poly, [0, 50, 100], Z) [[ 3. -45. ] [ 4.5080777 -0.05862173] [ 6. 45. ]] """ shape = poly.shape poly = po.flatten(poly) q = np.array(q)/100. dim = len(dist) # Interior sample = kws.pop("sample", 1e4) Z = dist.sample(sample, **kws) if dim==1: Z = (Z,) q = np.array([q]) poly1 = poly(*Z) # Min/max mi, ma = dist.range().reshape(2,dim) ext = np.mgrid[(slice(0,2,1),)*dim].reshape(dim, 2**dim).T ext = np.where(ext, mi, ma).T poly2 = poly(*ext) poly2 = np.array([_ for _ in poly2.T if not np.any(np.isnan(_))]).T # Finish if poly2.shape: poly1 = np.concatenate([poly1,poly2], -1) samples = poly1.shape[-1] poly1.sort() out = poly1.T[np.asarray(q*(samples-1), dtype=int)] out = out.reshape(q.shape + shape) return out
def Cov(poly, dist=None, **kws): """ Covariance matrix, or 2rd order statistics of a distribution or polynomial. Parameters ---------- poly : Poly, Dist Input to take covariance on. Must have `len(poly)>=2`. dist : Dist Defines the space the covariance is taken on. It is ignored if `poly` is a distribution. **kws : optional Extra keywords passed to dist.mom. Returns ------- covariance : ndarray Covariance matrix with `covariance.shape==poly.shape+poly.shape`. See Also -------- Corr Correlation matrix E Expected value Kurt Kurtosis operator Skew Skewness operator Var Variance operator Examples -------- >>> Z = cp.MvNormal([0,0], [[2,.5],[.5,1]]) >>> print cp.Cov(Z) [[ 2. 0.5] [ 0.5 1. ]] >>> x = cp.variable() >>> Z = cp.Normal() >>> print cp.Cov([x, x**2], Z) [[ 1. 0.] [ 0. 2.]] """ if not isinstance(poly, (di.Dist, po.Poly)): poly = po.Poly(poly) if isinstance(poly, di.Dist): x = po.variable(len(poly)) poly, dist = x, poly else: poly = po.Poly(poly) dim = len(dist) shape = poly.shape poly = po.flatten(poly) keys = poly.keys N = len(keys) A = poly.A keys1 = np.array(keys).T if dim==1: keys1 = keys1[0] keys2 = sum(np.meshgrid(keys, keys)) else: keys2 = np.empty((dim, N, N)) for i in xrange(N): for j in xrange(N): keys2[:, i,j] = keys1[:,i]+keys1[:,j] m1 = dist.mom(keys1, **kws) m2 = dist.mom(keys2, **kws) mom = m2-np.outer(m1, m1) out = np.zeros((len(poly), len(poly))) for i in xrange(len(keys)): a = A[keys[i]] out += np.outer(a,a)*mom[i,i] for j in xrange(i+1, len(keys)): b = A[keys[j]] ab = np.outer(a,b) out += (ab+ab.T)*mom[i,j] out = np.reshape(out, shape+shape) return out
def Kurt(poly, dist=None, fisher=True, **kws): """ Kurtosis, or element by element 4rd order statistics of a distribution or polynomial. Parameters ---------- P : Poly, Dist Input to take skewness on. dist : Dist Defines the space the skewness is taken on. It is ignored if `poly` is a distribution. fisher : bool If True, Fisher's definition is used (Normal -> 0.0). If False, Pearson's definition is used (normal -> 3.0) **kws : optional Extra keywords passed to dist.mom. Returns ------- skewness : ndarray Element for element variance along `poly`, where `skewness.shape==poly.shape`. See Also -------- Corr Correlation matrix Cov Covariance matrix E Expected value Skew Skewness operator Var Variance operator Examples -------- >>> x = cp.variable() >>> Z = cp.Uniform() >>> print cp.Kurt(Z) -1.2 >>> Z = cp.Normal() >>> print cp.Kurt(x, Z) 4.4408920985e-16 """ if isinstance(poly, di.Dist): x = po.variable(len(poly)) poly, dist = x, poly else: poly = po.Poly(poly) if fisher: adjust = 3 else: adjust = 0 shape = poly.shape poly = po.flatten(poly) m1 = E(poly, dist) m2 = E(poly**2, dist) m3 = E(poly**3, dist) m4 = E(poly**4, dist) out = (m4-4*m3*m1 + 6*m2*m1**2 - 3*m1**4) /\ (m2**2-2*m2*m1**2+m1**4) - adjust out = np.reshape(out, shape) return out
def E(poly, dist=None, **kws): """ Expected value, or 1st order statistics of a probability distribution or polynomial on a given probability space. Parameters ---------- poly : Poly, Dist Input to take expected value on. dist : Dist Defines the space the expected value is taken on. It is ignored if `poly` is a distribution. **kws : optional Extra keywords passed to dist.mom. Returns ------- expected : ndarray The expected value of the polynomial or distribution, where `expected.shape==poly.shape`. See Also -------- Corr Correlation matrix Cov Covariance matrix Kurt Kurtosis operator Skew Skewness operator Var Variance operator Examples -------- For distributions: >>> x = cp.variable() >>> Z = cp.Uniform() >>> print cp.E(Z) 0.5 >>> print cp.E(x**3, Z) 0.25 """ if not isinstance(poly, (di.Dist, po.Poly)): print type(poly) print "Approximating expected value..." out = qu.quad(poly, dist, veceval=True, **kws) print "done" return out if isinstance(poly, di.Dist): dist = poly poly = po.variable(len(poly)) if not poly.keys: return np.zeros(poly.shape, dtype=int) if isinstance(poly, (list, tuple, np.ndarray)): return [E(_, dist, **kws) for _ in poly] if poly.dim<len(dist): poly = po.setdim(poly, len(dist)) shape = poly.shape poly = po.flatten(poly) keys = poly.keys mom = dist.mom(np.array(keys).T, **kws) A = poly.A if len(dist)==1: mom = mom[0] out = np.zeros(poly.shape) for i in xrange(len(keys)): out += A[keys[i]]*mom[i] out = np.reshape(out, shape) return out
def Std(poly, dist=None, **kws): """ Standard deviation, or element by element 2nd order statistics of a distribution or polynomial. Parameters ---------- poly : Poly, Dist Input to take variance on. dist : Dist Defines the space the variance is taken on. It is ignored if `poly` is a distribution. **kws : optional Extra keywords passed to dist.mom. Returns ------- variation : ndarray Element for element variance along `poly`, where `variation.shape==poly.shape`. See Also -------- Corr Correlation matrix Cov Covariance matrix E Expected value Kurt Kurtosis operator Skew Skewness operator Examples -------- >>> x = cp.variable() >>> Z = cp.Uniform() >>> print cp.Var(Z) 0.0833333333333 >>> print cp.Var(x**3, Z) 0.0803571428571 """ if isinstance(poly, di.Dist): x = po.variable(len(poly)) poly, dist = x, poly else: poly = po.Poly(poly) dim = len(dist) if poly.dim<dim: po.setdim(poly, dim) shape = poly.shape poly = po.flatten(poly) keys = poly.keys N = len(keys) A = poly.A keys1 = np.array(keys).T if dim==1: keys1 = keys1[0] keys2 = sum(np.meshgrid(keys, keys)) else: keys2 = np.empty((dim, N, N)) for i in xrange(N): for j in xrange(N): keys2[:,i,j] = keys1[:,i]+keys1[:,j] m1 = np.outer(*[dist.mom(keys1, **kws)]*2) m2 = dist.mom(keys2, **kws) mom = m2-m1 out = np.zeros(poly.shape) for i in xrange(N): a = A[keys[i]] out += a*a*mom[i,i] for j in xrange(i+1, N): b = A[keys[j]] out += 2*a*b*mom[i,j] out = out.reshape(shape) return np.sqrt(out)
def weightgen(nodes, dist): poly = stieltjes(dist, len(nodes) - 1, retall=True)[0] poly = po.flatten(po.Poly(poly)) V = poly(nodes) Vi = np.linalg.inv(V) return Vi[:, 0]
def weightgen(nodes, dist): poly = stieltjes(dist, len(nodes)-1, retall=True)[0] poly = po.flatten(po.Poly(poly)) V = poly(nodes) Vi = np.linalg.inv(V) return Vi[:,0]