Esempio n. 1
0
    def objective(self, x, use_ad=False):
        # unpack variable
        beta = x[self.idx_beta]
        gamma = x[self.idx_gamma]
        delta = x[self.idx_delta]

        # trimming option
        if self.use_trimming:
            sqrt_w = np.sqrt(self.w)
            sqrt_W = sqrt_w.reshape(self.N, 1)
            F_beta = self.F(beta) * sqrt_w
            Y = self.Y * sqrt_w
            Z = self.Z * sqrt_W
            if self.std_flag == 0:
                V = self.V**self.w
            elif self.std_flag == 1:
                V = np.repeat(delta[0], self.N)**self.w
            elif self.std_flag == 2:
                V = np.repeat(delta, self.n)**self.w
        else:
            F_beta = self.F(beta)
            Y = self.Y
            Z = self.Z
            if self.std_flag == 0:
                V = self.V
            elif self.std_flag == 1:
                V = np.repeat(delta[0], self.N)
            elif self.std_flag == 2:
                V = np.repeat(delta, self.n)

        # residual and variance
        R = Y - F_beta
        D = utils.VarMat(V, Z, gamma, self.n)

        val = 0.5 * self.N * np.log(2.0 * np.pi)

        if use_ad:
            # should only use when testing
            varmat = D
            D = varmat.varMat()
            inv_D = varmat.invVarMat()

            val += 0.5 * np.log(np.linalg.det(D))
            val += 0.5 * R.dot(inv_D.dot(R))
        else:
            val += 0.5 * D.logDet()
            val += 0.5 * R.dot(D.invDot(R))

        # add gpriors
        if self.use_regularizer:
            val += 0.5 * self.hw.dot((self.H(x) - self.hm)**2)

        if self.use_gprior:
            val += 0.5 * self.gw.dot((x - self.gm)**2)

        if self.use_lprior:
            val += self.lw.dot(x[self.k:])

        return val
Esempio n. 2
0
    def degree_of_freedom(self):
        """Compute the degree of freedom of the model
        only considered in term of beta
        """
        if self.soln is None:
            print("Please fit the data first.")

        if self.use_trimming:
            sqrt_w = np.sqrt(self.w)
            sqrt_W = sqrt_w.reshape(self.N, 1)
            F_beta = self.F(self.beta) * sqrt_w
            JF_beta = self.JF(self.beta) * sqrt_W
            Y = self.Y * sqrt_w
            Z = self.Z * sqrt_W
            if self.std_flag == 0:
                V = self.V**self.w
            elif self.std_flag == 1:
                V = np.repeat(delta[0], self.N)**self.w
            elif self.std_flag == 2:
                V = np.repeat(delta, self.n)**self.w
        else:
            F_beta = self.F(self.beta)
            JF_beta = self.JF(self.beta)
            Y = self.Y
            Z = self.Z
            if self.std_flag == 0:
                V = self.V
            elif self.std_flag == 1:
                V = np.repeat(delta[0], self.N)
            elif self.std_flag == 2:
                V = np.repeat(delta, self.n)

        D = utils.VarMat(V, Z, self.gamma, self.n)

        # compute the Hessian matrix
        hess = JF_beta.T.dot(D.invDot(JF_beta))
        if self.use_regularizer:
            JH_beta = self.JH(self.soln)[:, self.idx_beta]
            hess += (JH_beta.T * self.hw).dot(JH_beta)

        # substract the active constraints
        lb_beta = self.lb[self.idx_beta]
        ub_beta = self.ub[self.idx_beta]
        JC_beta = np.eye(self.k_beta)
        active_id = (np.abs(self.beta - lb_beta) < 1e-7) |\
            (np.abs(self.beta - ub_beta) < 1e-7)
        if np.any(active_id):
            JC_beta = JC_beta[active_id]
        else:
            JC_beta = np.array([]).reshape(0, self.k_beta)

        if self.use_constraints:
            JC_beta_more = self.JC(self.soln)[:, self.idx_beta]
            active_id = (np.abs(self.C(self.soln) - self.cl) < 1e-7) |\
                (np.abs(self.C(self.soln) - self.cu) < 1e-7)

            if np.any(active_id):
                JC_beta_more = JC_beta_more[active_id]
            else:
                JC_beta_more = np.array([]).reshape(0, self.k_beta)
            JC_beta = np.vstack((JC_beta, JC_beta_more))

        if JC_beta.size != 0:
            cw = np.repeat(1e15, JC_beta.shape[0])
            hess += (JC_beta.T * cw).dot(JC_beta)

        hat_mat = JF_beta.dot(np.linalg.pinv(hess)).dot(D.invDot(JF_beta).T)
        df = np.trace(hat_mat)

        return df
Esempio n. 3
0
    def gradient(self, x, use_ad=False, eps=1e-12):
        if use_ad:
            # should only use when testing
            g = np.zeros(self.k_total)
            z = x + 0j
            for i in range(self.k_total):
                z[i] += eps * 1j
                g[i] = self.objective(z, use_ad=use_ad).imag / eps
                z[i] -= eps * 1j

            return g

        # unpack variable
        beta = x[self.idx_beta]
        gamma = x[self.idx_gamma]
        delta = x[self.idx_delta]

        gamma[gamma <= 0.0] = 0.0

        # trimming option
        if self.use_trimming:
            sqrt_w = np.sqrt(self.w)
            sqrt_W = sqrt_w.reshape(self.N, 1)
            F_beta = self.F(beta) * sqrt_w
            JF_beta = self.JF(beta) * sqrt_W
            Y = self.Y * sqrt_w
            Z = self.Z * sqrt_W
            if self.std_flag == 0:
                V = self.V**self.w
            elif self.std_flag == 1:
                V = np.repeat(delta[0], self.N)**self.w
            elif self.std_flag == 2:
                V = np.repeat(delta, self.n)**self.w
        else:
            F_beta = self.F(beta)
            JF_beta = self.JF(beta)
            Y = self.Y
            Z = self.Z
            if self.std_flag == 0:
                V = self.V
            elif self.std_flag == 1:
                V = np.repeat(delta[0], self.N)
            elif self.std_flag == 2:
                V = np.repeat(delta, self.n)

        # residual and variance
        R = Y - F_beta
        D = utils.VarMat(V, Z, gamma, self.n)

        # gradient for beta
        DR = D.invDot(R)
        g_beta = -JF_beta.T.dot(DR)

        # gradient for gamma
        DZ = D.invDot(Z)
        g_gamma = 0.5*np.sum(Z*DZ, axis=0) -\
            0.5*np.sum(
                np.add.reduceat(DZ.T*R, self.idx_split, axis=1)**2,
                axis=1)

        # gradient for delta
        if self.std_flag == 0:
            g_delta = np.array([])
        elif self.std_flag == 1:
            d = -DR**2 + D.invDiag()
            if self.use_trimming:
                v = np.repeat(delta[0], self.N)
                d *= self.w * (v**(self.w - 1.0))
            g_delta = 0.5 * np.array([np.sum(d)])
        elif self.std_flag == 2:
            d = -DR**2 + D.invDiag()
            if self.use_trimming:
                v = np.repeat(delta, self.n)
                d *= self.w * (v**(self.w - 1.0))
            g_delta = 0.5 * (np.add.reduceat(d, self.idx_split))

        g = np.hstack((g_beta, g_gamma, g_delta))

        # add gradient from the regularizer
        if self.use_regularizer:
            g += self.JH(x).T.dot((self.H(x) - self.hm) * self.hw)

        # add gradient from the gprior
        if self.use_gprior:
            g += (x[:self.k] - self.gm) * self.gw

        # add gradient from the lprior
        if self.use_lprior:
            g = np.hstack((g, self.lw))

        return g
Esempio n. 4
0
def get_varmat(model: LimeTr):
    S = model.S**model.w
    Z = model.Z * np.sqrt(model.w)[:, None]
    n = model.n
    gamma = model.gamma
    return utils.VarMat(S**2, Z, gamma, n)