def get_logdet(m): from gwstatsmodels.tools.compatibility import np_slogdet logdet = np_slogdet(m) if logdet[0] == -1: # pragma: no cover raise ValueError("Matrix is not positive definite") elif logdet[0] == 0: # pragma: no cover raise ValueError("Matrix is singular") else: logdet = logdet[1] return logdet
def _loglike_mle(self, params): """ Loglikelihood of AR(p) process using exact maximum likelihood """ nobs = self.nobs Y = self.Y X = self.X endog = self.endog k_ar = self.k_ar k_trend = self.k_trend # reparameterize according to Jones (1980) like in ARMA/Kalman Filter if self.transparams: params = self._transparams(params) # get mean and variance for pre-sample lags yp = endog[:k_ar].copy() if k_trend: c = [params[0]] * k_ar else: c = [0] mup = np.asarray(c/(1-np.sum(params[k_trend:]))) diffp = yp-mup[:,None] # get inv(Vp) Hamilton 5.3.7 Vpinv = self._presample_varcov(params) diffpVpinv = np.dot(np.dot(diffp.T,Vpinv),diffp).item() ssr = sumofsq(endog[k_ar:].squeeze() -np.dot(X,params)) # concentrating the likelihood means that sigma2 is given by sigma2 = 1./nobs * (diffpVpinv + ssr) self.sigma2 = sigma2 logdet = np_slogdet(Vpinv)[1] #TODO: add check for singularity loglike = -1/2.*(nobs*(np.log(2*np.pi) + np.log(sigma2)) - \ logdet + diffpVpinv/sigma2 + ssr/sigma2) return loglike