def __init__(self, y, M, E, W=None): from numpy import asarray, atleast_2d, sqrt from numpy_sugar import ddot self._y = atleast_2d(asarray(y, float).ravel()).T self._E = atleast_2d(asarray(E, float).T).T if W is None: self._W = self._E elif isinstance(W, tuple): # W must be an eigen-decomposition of ЮџєЮџєрхђ self._W = ddot(W[0], sqrt(W[1])) else: self._W = atleast_2d(asarray(W, float).T).T self._M = atleast_2d(asarray(M, float).T).T nsamples = len(self._y) if nsamples != self._M.shape[0]: raise ValueError("Number of samples mismatch between y and M.") if nsamples != self._E.shape[0]: raise ValueError("Number of samples mismatch between y and E.") if nsamples != self._W.shape[0]: raise ValueError("Number of samples mismatch between y and W.") self._lmm = None self._rhos = [0.0, 0.1**2, 0.2**2, 0.3**2, 0.4**2, 0.5**2, 0.5, 0.999]
def _score_stats(self, g, rhos): """ Let ЮЎ║ be the optimal covariance matrix under the null hypothesis. For a given ¤Ђ, the score-based test statistic is given by ЮЉёрхе = ┬йЮљ▓рхђЮЎ┐рхе(РѕѓЮЎ║рхе)ЮЎ┐рхеЮљ▓, where РѕѓЮЎ║рхе = ЮЎ│(¤ЂЮЪЈЮЪЈрхђ + (1-¤Ђ)ЮЎ┤ЮЎ┤рхђ)ЮЎ│ and ЮЎ│ = diag(Юља). """ from numpy import zeros from numpy_sugar import ddot Q = zeros(len(rhos)) DPy = ddot(g, self._P(self._y)) s = DPy.sum() l = s * s DPyE = DPy.T @ self._E r = DPyE @ DPyE.T for i, rho in enumerate(rhos): Q[i] = (rho * l + (1 - rho) * r) / 2 return Q
def _score_stats_null_dist(self, g): """ Under the null hypothesis, the score-based test statistic follows a weighted sum of random variables: ЮЉё Рѕ╝ РѕЉрхбЮюєрхб¤Є┬▓(1), where Ююєрхб are the non-zero eigenvalues of ┬йРѕџЮЎ┐(РѕѓЮЎ║)РѕџЮЎ┐. Note that РѕѓЮЎ║рхе = ЮЎ│(¤ЂЮЪЈЮЪЈрхђ + (1-¤Ђ)ЮЎ┤ЮЎ┤рхђ)ЮЎ│ = (¤ЂЮљаЮљархђ + (1-¤Ђ)ЮЎ┤╠ЃЮЎ┤╠Ѓрхђ) for ЮЎ┤╠Ѓ = ЮЎ│ЮЎ┤. By using SVD decomposition, one can show that the non-zero eigenvalues of ЮџЄЮџЄрхђ are equal to the non-zero eigenvalues of ЮџЄрхђЮџЄ. Therefore, Ююєрхб are the non-zero eigenvalues of ┬й[Рѕџ¤ЂЮља Рѕџ(1-¤Ђ)ЮЎ┤╠Ѓ]ЮЎ┐[Рѕџ¤ЂЮља Рѕџ(1-¤Ђ)ЮЎ┤╠Ѓ]рхђ. """ from math import sqrt from numpy import empty from numpy.linalg import eigvalsh from numpy_sugar import ddot Et = ddot(g, self._E) Pg = self._P(g) PEt = self._P(Et) gPg = g.T @ Pg EtPEt = Et.T @ PEt gPEt = g.T @ PEt n = Et.shape[1] + 1 F = empty((n, n)) lambdas = [] for i in range(len(self._rhos)): rho = self._rhos[i] F[0, 0] = rho * gPg F[0, 1:] = sqrt(rho) * sqrt(1 - rho) * gPEt F[1:, 0] = F[0, 1:] F[1:, 1:] = (1 - rho) * EtPEt lambdas.append(eigvalsh(F) / 2) return lambdas
def score_2dof_inter(self, X): from numpy import empty from numpy_sugar import ddot Q_rho = self._score_stats(X.ravel(), [0]) g = X.ravel() Et = ddot(g, self._E) PEt = self._P(Et) EtPEt = Et.T @ PEt gPEt = g.T @ PEt n = Et.shape[1] + 1 F = empty((n, n)) F[0, 0] = 0 F[0, 1:] = gPEt F[1:, 0] = F[0, 1:] F[1:, 1:] = EtPEt F /= 2 return davies_pvalue(Q_rho[0], F)
def score_2dof_inter(self, X): """ Interaction test. Parameters ---------- X : 1d-array Genetic variant. Returns ------- float P-value. """ from numpy import empty from numpy_sugar import ddot Q_rho = self._score_stats(X.ravel(), [0]) g = X.ravel() Et = ddot(g, self._E) PEt = self._P(Et) EtPEt = Et.T @ PEt gPEt = g.T @ PEt n = Et.shape[1] + 1 F = empty((n, n)) F[0, 0] = 0 F[0, 1:] = gPEt F[1:, 0] = F[0, 1:] F[1:, 1:] = EtPEt F /= 2 return davies_pvalue(Q_rho[0], F)