Exemplo n.º 1
0
    def _compute_test_stat(self, u):
        n = np.shape(u)[0]
        XLOO = LeaveOneOut(self.exog)
        uLOO = LeaveOneOut(u[:,None]).__iter__()
        I = 0
        S2 = 0
        for i, X_not_i in enumerate(XLOO):
            u_j = uLOO.next()
            u_j = np.squeeze(u_j)
            # See Bootstrapping procedure on p. 357 in [1]
            K = gpke(self.bw, data=-X_not_i, data_predict=-self.exog[i, :],
                     var_type=self.var_type, tosum=False)
            f_i = (u[i] * u_j * K)
            assert u_j.shape == K.shape
            I += f_i.sum()  # See eq. 12.7 on p. 355 in [1]
            S2 += (f_i**2).sum()  # See Theorem 12.1 on p.356 in [1]
            assert np.size(I) == 1
            assert np.size(S2) == 1

        I *= 1. / (n * (n - 1))
        ix_cont = _get_type_pos(self.var_type)[0]
        hp = self.bw[ix_cont].prod()
        S2 *= 2 * hp / (n * (n - 1))
        T = n * I * np.sqrt(hp / S2)
        return T
Exemplo n.º 2
0
    def cv_loo(self, params):
        # See p. 254 in Textbook
        params = np.asarray(params)
        b = params[0 : self.K]
        bw = params[self.K:]
        LOO_X = LeaveOneOut(self.exog)
        LOO_Y = LeaveOneOut(self.endog).__iter__()
        L = 0
        for i, X_not_i in enumerate(LOO_X):
            Y = LOO_Y.next()
            #print b.shape, np.dot(self.exog[i:i+1, :], b).shape, bw,
            G = self.func(bw, endog=Y, exog=-np.dot(X_not_i, b)[:,None],
                          #data_predict=-b*self.exog[i, :])[0]
                          data_predict=-np.dot(self.exog[i:i+1, :], b))[0]
            #print G.shape
            L += (self.endog[i] - G) ** 2

        # Note: There might be a way to vectorize this. See p.72 in [1]
        return L / self.nobs
Exemplo n.º 3
0
    def cv_loo(self, params):
        """
        Similar to the cross validation leave-one-out estimator.

        Modified to reflect the linear components.

        Parameters
        ----------
        params : array_like
            Vector consisting of the coefficients (b) and the bandwidths (bw).
            The first ``k_linear`` elements are the coefficients.

        Returns
        -------
        L : float
            The value of the objective function

        References
        ----------
        See p.254 in [1]
        """
        params = np.asarray(params)
        b = params[0:self.k_linear]
        bw = params[self.k_linear:]
        LOO_X = LeaveOneOut(self.exog)
        LOO_Y = LeaveOneOut(self.endog).__iter__()
        LOO_Z = LeaveOneOut(self.exog_nonparametric).__iter__()
        Xb = np.dot(self.exog, b)[:, None]
        L = 0
        for ii, X_not_i in enumerate(LOO_X):
            Y = next(LOO_Y)
            Z = next(LOO_Z)
            Xb_j = np.dot(X_not_i, b)[:, None]
            Yx = Y - Xb_j
            G = self.func(bw,
                          endog=Yx,
                          exog=-Z,
                          data_predict=-self.exog_nonparametric[ii, :])[0]
            lt = Xb[ii, :]  #.sum()  # linear term
            L += (self.endog[ii] - lt - G)**2

        return L
Exemplo n.º 4
0
    def cv_loo(self, params):
        """
        Similar to the cross validation leave-one-out estimator.

        Modified to reflect the linear components.

        Parameters
        ----------
        params: array_like
            Vector consisting of the coefficients (b) and the bandwidths (bw).
            The first ``k_linear`` elements are the coefficients.

        Returns
        -------
        L: float
            The value of the objective function

        References
        ----------
        See p.254 in [1]
        """
        params = np.asarray(params)
        b = params[0 : self.k_linear]
        bw = params[self.k_linear:]
        LOO_X = LeaveOneOut(self.exog)
        LOO_Y = LeaveOneOut(self.endog).__iter__()
        LOO_Z = LeaveOneOut(self.exog_nonparametric).__iter__()
        Xb = np.dot(self.exog, b)[:,None]
        L = 0
        for ii, X_not_i in enumerate(LOO_X):
            Y = LOO_Y.next()
            Z = LOO_Z.next()
            Xb_j = np.dot(X_not_i, b)[:,None]
            Yx = Y - Xb_j
            G = self.func(bw, endog=Yx, exog=-Z,
                          data_predict=-self.exog_nonparametric[ii, :])[0]
            lt = Xb[ii, :] #.sum()  # linear term
            L += (self.endog[ii] - lt - G) ** 2

        return L