Exemple #1
0
    def predict(self, hyperparams, Xstar_r, compute_cov = False):
        """
        predict on Xstar
        """
        self._update_inputs(hyperparams)
        KV = self.get_covariances(hyperparams)
        
        self.covar_r.Xcross = Xstar_r
        
        Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
        Kstar_c = self.covar_c.K(hyperparams['covar_c'])  
        BKstar_cB = SP.dot(self.basis, SP.dot(Kstar_c.T, self.basis.T))

        KinvY = SP.dot(KV['U_r'], SP.dot(KV['Ytilde'], SP.dot(KV['U_c'].T, KV['Binv'])))
        
        Ystar = SP.dot(Kstar_r.T,  SP.dot(KinvY, BKstar_cB))
        
        Ystar_covar = []
        if compute_cov:
            R_star_star = SP.exp(2 * hyperparams['covar_r']) * fast_dot(Xstar_r, Xstar_r.T)
            R_tr_star = Kstar_r
            C = BKstar_cB         
            temp = fast_kron(fast_dot(C, fast_dot(KV['Binv'].T ,KV['U_c'])), fast_dot(R_tr_star.T, KV['U_r']))
            Ystar_covar = SP.diag(fast_kron(C, R_star_star)) - SP.sum((1./KV['S'] * temp).T * temp.T, axis = 0)            
            
            Ystar_covar = unravel(Ystar_covar, Xstar_r.shape[0], self.Y.shape[1])
            
        return Ystar, Ystar_covar
Exemple #2
0
    def _LMLgrad_covar(self, hyperparams):
        """
        evaluates the gradient of the log marginal likelihood with respect to the
        hyperparameters of the covariance function
        """
        try:
            KV = self.get_covariances(hyperparams)
        except LA.LinAlgError:
            LG.error('linalg exception in _LMLgrad_covar')
            return {
                'covar_r': SP.zeros(len(hyperparams['covar_r'])),
                'covar_c': SP.zeros(len(hyperparams['covar_c'])),
                'covar_r': SP.zeros(len(hyperparams['covar_r']))
            }
        except ValueError:
            LG.error('value error in _LMLgrad_covar')
            return {
                'covar_r': SP.zeros(len(hyperparams['covar_r'])),
                'covar_c': SP.zeros(len(hyperparams['covar_c'])),
                'covar_r': SP.zeros(len(hyperparams['covar_r']))
            }

        RV = {}
        Si = unravel(1. / KV['S'], self.n, self.t)

        if 'covar_r' in hyperparams:
            theta = SP.zeros(len(hyperparams['covar_r']))
            for i in range(len(theta)):
                Kgrad_r = self.covar_r.Kgrad_theta(hyperparams['covar_r'], i)
                d = (KV['U_r'] * SP.dot(Kgrad_r, KV['U_r'])).sum(0)
                LMLgrad_det = reduce(SP.dot, [d, Si, KV['S_c_tilde']])
                UdKU = reduce(SP.dot, [KV['U_r'].T, Kgrad_r, KV['U_r']])
                SYUdKU = SP.dot(
                    UdKU,
                    (KV['Ytilde'] * SP.tile(KV['S_c_tilde'][SP.newaxis, :],
                                            (self.n, 1))))
                LMLgrad_quad = -(KV['Ytilde'] * SYUdKU).sum()
                LMLgrad = 0.5 * (LMLgrad_det + LMLgrad_quad)
                theta[i] = LMLgrad
            RV['covar_r'] = theta

        if 'covar_c' in hyperparams:
            theta = SP.zeros(len(hyperparams['covar_c']))
            for i in range(len(theta)):
                Kgrad_c = self.covar_c.Kgrad_theta(hyperparams['covar_c'], i)
                S_c_tilde_grad = reduce(
                    SP.dot, [KV['UBinvB'], Kgrad_c, KV['UBinvB'].T])
                LMLgrad_det = reduce(
                    SP.dot,
                    [KV['S_r'], Si, SP.diag(S_c_tilde_grad)])
                SYUdKU = SP.dot(
                    (KV['Ytilde'] * SP.tile(KV['S_r'][:, SP.newaxis],
                                            (1, self.t))), S_c_tilde_grad.T)
                LMLgrad_quad = -SP.sum(KV['Ytilde'] * SYUdKU)
                LMLgrad = 0.5 * (LMLgrad_det + LMLgrad_quad)
                theta[i] = LMLgrad
            RV['covar_c'] = theta

        return RV
Exemple #3
0
    def _LMLgrad_covar(self,hyperparams,debugging=False):
        """
        evaluates the gradient of the log marginal likelihood with respect to the
        hyperparameters of the covariance function
        """
        try:
            KV = self.get_covariances(hyperparams,debugging=debugging)
        except LA.LinAlgError:
            LG.error('linalg exception in _LMLgrad_covar')
            return {'covar_r':SP.zeros(len(hyperparams['covar_r'])),'covar_c':SP.zeros(len(hyperparams['covar_c'])),'covar_r':SP.zeros(len(hyperparams['covar_r']))}
        except ValueError:
            LG.error('value error in _LMLgrad_covar')
            return {'covar_r':SP.zeros(len(hyperparams['covar_r'])),'covar_c':SP.zeros(len(hyperparams['covar_c'])),'covar_r':SP.zeros(len(hyperparams['covar_r']))}
 
        RV = {}
        Si = unravel(1./KV['S'],self.n,self.t)

        if 'covar_r' in hyperparams:
            theta = SP.zeros(len(hyperparams['covar_r']))
            for i in range(len(theta)):
                Kgrad_r = self.covar_r.Kgrad_theta(hyperparams['covar_r'],i)
                d=(KV['U_r']*SP.dot(Kgrad_r,KV['U_r'])).sum(0)
                LMLgrad_det = SP.dot(d,SP.dot(Si,KV['S_c']))
                UdKU = SP.dot(KV['U_r'].T,SP.dot(Kgrad_r,KV['U_r']))
                SYUdKU = SP.dot(UdKU,(KV['Ytilde']*SP.tile(KV['S_c'][SP.newaxis,:],(self.n,1))))
                LMLgrad_quad = - (KV['Ytilde']*SYUdKU).sum()
                LMLgrad = 0.5*(LMLgrad_det + LMLgrad_quad)
                theta[i] = LMLgrad

                if debugging:
                    Kd = SP.kron(KV['K_c'], Kgrad_r)
                    _LMLgrad = 0.5 * (KV['W']*Kd).sum()
                    assert SP.allclose(LMLgrad,_LMLgrad), 'ouch, gradient is wrong for covar_r'
                    
            RV['covar_r'] = theta

        if 'covar_c' in hyperparams:
            theta = SP.zeros(len(hyperparams['covar_c']))
            for i in range(len(theta)):
                Kgrad_c = self.covar_c.Kgrad_theta(hyperparams['covar_c'],i)

                d=(KV['U_c']*SP.dot(Kgrad_c,KV['U_c'])).sum(0)
                LMLgrad_det = SP.dot(KV['S_r'],SP.dot(Si,d))

                UdKU = SP.dot(KV['U_c'].T,SP.dot(Kgrad_c,KV['U_c']))
                SYUdKU = SP.dot((KV['Ytilde']*SP.tile(KV['S_r'][:,SP.newaxis],(1,self.t))),UdKU.T)
                LMLgrad_quad = -SP.sum(KV['Ytilde']*SYUdKU)
                LMLgrad = 0.5*(LMLgrad_det + LMLgrad_quad)
                theta[i] = LMLgrad
            
                if debugging:
                    Kd = SP.kron(Kgrad_c, KV['K_r'])
                    _LMLgrad = 0.5 * (KV['W']*Kd).sum()
                    assert SP.allclose(LMLgrad,_LMLgrad), 'ouch, gradient is wrong for covar_c'
                    
                RV['covar_c'] = theta

        return RV
Exemple #4
0
    def predict(self, hyperparams, Xstar_r, compute_cov = False, debugging = False):
        """
        predict on Xstar
        """
        self._update_inputs(hyperparams)
        KV = self.get_covariances(hyperparams,debugging=debugging)
        
        self.covar_r.Xcross = Xstar_r
        
        Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
        Kstar_c = self.covar_c.K(hyperparams['covar_c'])

        KinvY = SP.dot(KV['U_r'],SP.dot(KV['Ytilde'],KV['U_c'].T))
        Ystar = SP.dot(Kstar_r.T,SP.dot(KinvY,Kstar_c))
        Ystar = unravel(Ystar,self.covar_r.n_cross,self.t)

        if debugging:
            Kstar = SP.kron(Kstar_c,Kstar_r)
            Ynaive = SP.dot(Kstar.T,KV['alpha'])
            Ynaive = unravel(Ynaive,self.covar_r.n_cross,self.t)
            assert SP.allclose(Ystar,Ynaive), 'ouch, prediction does not work out'
        
        Ystar_covar = []
        if compute_cov:
            
            CU = fast_dot(Kstar_c, KV['U_c'])     
            s_rev = 1./KV['S']
            Ystar_covar = SP.zeros([Xstar_r.shape[0], self.Y.shape[1]])
            printProgressBar(0, Xstar_r.shape[0], prefix = 'Computing perdiction varaince:', suffix = 'Complete', length = 20)
            for i in range(Xstar_r.shape[0]):
                R_star_star = self.covar_r.K(hyperparams['covar_r'], SP.expand_dims(Xstar_r[i,:],axis=0))
                self.covar_r.Xcross = SP.expand_dims(Xstar_r[i,:],axis=0)
                R_tr_star = self.covar_r.Kcross(hyperparams['covar_r'])
                RU = SP.dot(R_tr_star.T, KV['U_r'])
                q = SP.kron(SP.diag(Kstar_c), R_star_star)
                t = SP.zeros([self.t])                
                for j in range(self.t):
                    temp = SP.kron(CU[j,:], RU) 
                    t[j,] = SP.sum((s_rev * temp).T * temp.T, axis = 0)
                Ystar_covar[i,:] = q - t
                if (i + 1) % (Xstar_r.shape[0]/10) == 0:
                    printProgressBar(i+1, Xstar_r.shape[0], prefix = 'Computing perdiction varaince:', suffix = 'Complete', length = 20)
            self.covar_r.Xcross = Xstar_r
            
        return Ystar, Ystar_covar
Exemple #5
0
    def get_covariances(self,hyperparams):
        """
        INPUT:
        hyperparams:  dictionary
        OUTPUT: dictionary with the fields
        Kr:     kernel on rows
        Kc:     kernel on columns
        Knoise: noise kernel
        """
        if self._is_cached(hyperparams):
            return self._covar_cache
        if self._covar_cache==None:
            self._covar_cache = {}
            
        if not(self._is_cached(hyperparams,keys=['covar_c'])):
            K_c = self.covar_c.K(hyperparams['covar_c'])
            S_c,U_c = LA.eigh(K_c)
            self._covar_cache['K_c'] = K_c
            self._covar_cache['U_c'] = U_c
            self._covar_cache['S_c'] = S_c
        else:
            K_c = self._covar_cache['K_c']
            U_c = self._covar_cache['U_c']
            S_c = self._covar_cache['S_c']
            
        if not(self._is_cached(hyperparams,keys=['covar_r'])):
            K_r = self.covar_r.K(hyperparams['covar_r'])
            S_r,U_r = LA.eigh(K_r)
            self._covar_cache['K_r'] = K_r
            self._covar_cache['U_r'] = U_r
            self._covar_cache['S_r'] = S_r
        else:
            K_r = self._covar_cache['K_r']
            U_r = self._covar_cache['U_r']
            S_r = self._covar_cache['S_r']

        Binv = SP.linalg.pinv(self.basis)       
        S = SP.kron(S_c,S_r) + self.likelihood.Kdiag(hyperparams['lik'],self.nt)
        #UYUB = SP.dot(U_r.T, SP.dot(self.Y, SP.dot(Binv.T, SP.dot(U_c, SP.dot(U_c.T, Binv)))))
        UYUB = SP.dot(U_r.T, SP.dot(self.Y, SP.dot(Binv.T, U_c)))
        YtildeVec = (1./S) * ravel(UYUB)
        self._covar_cache['Binv'] = Binv
        self._covar_cache['S'] = S
        self._covar_cache['UYUB'] = UYUB
        self._covar_cache['Ytilde'] = unravel(YtildeVec,self.n,self.t)
        UBinv = SP.dot(U_c.T, Binv)
        self._covar_cache['UBinvB'] = SP.dot(UBinv, self.basis)
        self._covar_cache['UBinvBinvU'] = SP.dot(UBinv, UBinv.T)
        self._covar_cache['S_c_tilde'] = SP.diag(SP.dot(self._covar_cache['UBinvB'],SP.dot(K_c, self._covar_cache['UBinvB'].T)))
        self._covar_cache['hyperparams'] = copy.deepcopy(hyperparams)
        return self._covar_cache
Exemple #6
0
    def predict(self, hyperparams, Xstar_r, compute_cov = False, debugging = False):
        """
        predict on Xstar
        """
        self._update_inputs(hyperparams)
        KV = self.get_covariances(hyperparams,debugging=debugging)
        
        self.covar_r.Xcross = Xstar_r
        
        Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
        Kstar_c = self.covar_c.K(hyperparams['covar_c'])

        KinvY = SP.dot(KV['U_r'],SP.dot(KV['Ytilde'],KV['U_c'].T))
        Ystar = SP.dot(Kstar_r.T,SP.dot(KinvY,Kstar_c))
        Ystar = unravel(Ystar,self.covar_r.n_cross,self.t)

        if debugging:
            Kstar = SP.kron(Kstar_c,Kstar_r)
            Ynaive = SP.dot(Kstar.T,KV['alpha'])
            Ynaive = unravel(Ynaive,self.covar_r.n_cross,self.t)
            assert SP.allclose(Ystar,Ynaive), 'ouch, prediction does not work out'
        
        Ystar_covar = []
        if compute_cov:
            R_star_star = SP.exp(2 * hyperparams['covar_r']) * fast_dot(Xstar_r, Xstar_r.T)
            R_tr_star = Kstar_r
            C = Kstar_c
            
#            Ystar_covar = SP.diag(fast_kron(C, R_star_star) - fast_dot(1./KV['S'] * 
#                fast_kron(fast_dot(C, KV['U_c']), fast_dot(R_tr_star.T, KV['U_r']))
#                , fast_kron(fast_dot(KV['U_c'].T, C), fast_dot(KV['U_r'].T, R_tr_star))))
            
            temp = fast_kron(fast_dot(C, KV['U_c']), fast_dot(R_tr_star.T, KV['U_r']))
            Ystar_covar = SP.diag(fast_kron(C, R_star_star)) - SP.sum((1./KV['S'] * temp).T * temp.T, axis = 0)            
            
            Ystar_covar = unravel(Ystar_covar, Xstar_r.shape[0], self.t)
            
        return Ystar, Ystar_covar
Exemple #7
0
    def get_covariances(self, hyperparams):

        if self._is_cached(hyperparams):
            return self._covar_cache
        if self._covar_cache == None:
            self._covar_cache = {}

        if not (self._is_cached(hyperparams, keys=['covar_c'])):
            K_c = self.covar_c.K(hyperparams['covar_c'])
            S_c, U_c = LA.eigh(K_c)
            self._covar_cache['K_c'] = K_c
            self._covar_cache['U_c'] = U_c
            self._covar_cache['S_c'] = S_c
        else:
            K_c = self._covar_cache['K_c']
            U_c = self._covar_cache['U_c']
            S_c = self._covar_cache['S_c']

        if not (self._is_cached(hyperparams, keys=['covar_r'])):
            K_r = self.covar_r.K(hyperparams['covar_r'])
            S_r, U_r = LA.eigh(K_r)
            self._covar_cache['K_r'] = K_r
            self._covar_cache['U_r'] = U_r
            self._covar_cache['S_r'] = S_r
        else:
            K_r = self._covar_cache['K_r']
            U_r = self._covar_cache['U_r']
            S_r = self._covar_cache['S_r']

        Binv = SP.linalg.pinv(self.basis.T).T
        S = SP.kron(S_c, S_r) + self.likelihood.Kdiag(hyperparams['lik'],
                                                      self.nt)
        UYUB = reduce(SP.dot, [U_r.T, self.Y, Binv.T, U_c])
        YtildeVec = (1. / S) * ravel(UYUB)
        self._covar_cache['Binv'] = Binv
        self._covar_cache['S'] = S
        self._covar_cache['UYUB'] = UYUB
        self._covar_cache['Ytilde'] = unravel(YtildeVec, self.n, self.t)
        UBinv = SP.dot(U_c.T, Binv)
        self._covar_cache['UBinvB'] = SP.dot(UBinv, self.basis)
        self._covar_cache['UBinvBinvU'] = SP.dot(UBinv, UBinv.T)
        self._covar_cache['S_c_tilde'] = SP.diag(
            reduce(SP.dot, [
                self._covar_cache['UBinvB'], K_c, self._covar_cache['UBinvB'].T
            ]))
        self._covar_cache['hyperparams'] = copy.deepcopy(hyperparams)
        return self._covar_cache
Exemple #8
0
    def _LMLgrad_s(self, hyperparams):
        """
        evaluate gradients with respect to covariance matrix Sigma
        """
        try:
            KV = self.get_covariances(hyperparams)
        except LA.LinAlgError:
            LG.error('linalg exception in _LMLgrad_x_sigma')
            return {'X_s': SP.zeros(hyperparams['X_s'].shape)}

        Si = 1. / KV['Stilde_os']
        Yhat = unravel(Si * ravel(KV['UYtildeU_os']), self.n, SP.prod(self.bn))
        RV = {}

        if 'covar_s' in hyperparams:
            k = 0
            RV['covar_s'] = []
            for covar in self.covar_s:
                theta = SP.zeros(len(hyperparams['covar_s'][k]))
                #USU = SP.dot(KV['USi_c'][k].T,KV['Utilde_s'][k])

                USU = SP.dot(
                    self.nbasis[k].T,
                    SP.dot(self.basis[k],
                           SP.dot(KV['USi_c'][k].T, KV['Utilde_s'][k])))

                for i in range(len(theta)):
                    Kgrad_s = covar.Kgrad_theta(hyperparams['covar_s'][k], i)
                    UdKU = SP.dot(USU.T, SP.dot(Kgrad_s, USU))
                    temp = copy.deepcopy(KV['Stilde_s'])
                    for z in range(len(temp)):
                        temp[z] = SP.diag(temp[z])
                    temp[k] = UdKU
                    SUdKUS = reduce(SP.kron, temp[::-1])
                    LMLgrad_det = SP.sum(Si * SP.kron(
                        SP.diag(SUdKUS), reduce(SP.kron, KV['Stilde_o'])))
                    SYUdKU = SP.dot(SUdKUS,
                                    reduce(SP.kron, KV['Stilde_o']) * Yhat.T)
                    LMLgrad_quad = -(Yhat.T * SYUdKU).sum()
                    LMLgrad = 0.5 * (LMLgrad_det + LMLgrad_quad)
                    theta[i] = LMLgrad
                RV['covar_s'].append(theta)
                k += 1

        return RV
Exemple #9
0
    def _LMLgrad_x(self,hyperparams,debugging=False):
        """
        evaluates the gradient of the log marginal likelihood with respect to
        the latent factors
        """
        try:
            KV = self.get_covariances(hyperparams,debugging=debugging)
        except LA.LinAlgError:
            LG.error('linalg exception in _LML_covar')
            RV = {}
            if 'X_r' in hyperparams:
                RV['X_r'] = SP.zeros(hyperparams['X_r'].shape)
            if 'X_c' in hyperparams:
                RV['X_c'] = SP.zeros(hyperparams['X_c'].shape)
            return RV
        except ValueError:
            LG.error('value error in _LML_covar')
            RV = {}
            if 'X_r' in hyperparams:
                RV['X_r'] = SP.zeros(hyperparams['X_r'].shape)
            if 'X_c' in hyperparams:
                RV['X_c'] = SP.zeros(hyperparams['X_c'].shape)
            return RV
       
        RV = {}
        if 'X_r' in hyperparams:
            LMLgrad = SP.zeros((self.n,self.covar_r.n_dimensions))
            LMLgrad_det = SP.zeros((self.n,self.covar_r.n_dimensions))
            LMLgrad_quad = SP.zeros((self.n,self.covar_r.n_dimensions))

            SS = SP.dot(unravel(1./KV['S'],self.n,self.t),KV['S_c'])
            UY = SP.dot(KV['U_r'],KV['Ytilde'])
            UYSYU = SP.dot(UY,SP.dot(SP.diag(KV['S_c']),UY.T))
            for d in xrange(self.covar_r.n_dimensions):
                Kd_grad = self.covar_r.Kgrad_x(hyperparams['covar_r'],d)
                # calculate gradient of logdet
                URU = SP.dot(Kd_grad.T,KV['U_r'])*KV['U_r']
                LMLgrad_det[:,d] = 2*SP.dot(URU,SS.T)
                # calculate gradient of squared form
                LMLgrad_quad[:,d] = -2*(UYSYU*Kd_grad).sum(0)
            LMLgrad = 0.5*(LMLgrad_det + LMLgrad_quad)
            RV['X_r'] = LMLgrad
            
            if debugging:
                 _LMLgrad = SP.zeros((self.n,self.covar_r.n_dimensions))
                 for n in xrange(self.n):
                     for d in xrange(self.covar_r.n_dimensions):
                         Kgrad_x = self.covar_r.Kgrad_x(hyperparams['covar_r'],d,n)
                         Kgrad_x = SP.kron(KV['K_c'],Kgrad_x)
                         _LMLgrad[n,d] = 0.5*(KV['W']*Kgrad_x).sum()
                 assert SP.allclose(LMLgrad,_LMLgrad), 'ouch, gradient is wrong for X_r'

        if 'X_c' in hyperparams:
            LMLgrad = SP.zeros((self.t,self.covar_c.n_dimensions))
            LMLgrad_quad = SP.zeros((self.t,self.covar_c.n_dimensions))
            LMLgrad_det = SP.zeros((self.t,self.covar_c.n_dimensions))

            SS = SP.dot(KV['S_r'],unravel(1./KV['S'],self.n,self.t))
            UY = SP.dot(KV['U_c'],KV['Ytilde'].T)
            UYSYU = SP.dot(UY,SP.dot(SP.diag(KV['S_r']),UY.T))
            for d in xrange(self.covar_c.n_dimensions):
                Kd_grad = self.covar_c.Kgrad_x(hyperparams['covar_c'],d)
                # calculate gradient of logdet
                UCU = SP.dot(Kd_grad.T,KV['U_c'])*KV['U_c']
                LMLgrad_det[:,d] = 2*SP.dot(SS,UCU.T)
                # calculate gradient of squared form
                LMLgrad_quad[:,d] = -2*(UYSYU*Kd_grad).sum(0)
                
            LMLgrad = 0.5*(LMLgrad_det + LMLgrad_quad)
            RV['X_c'] = LMLgrad
            
            if debugging:
                 _LMLgrad = SP.zeros((self.t,self.covar_c.n_dimensions))
                 for n in xrange(self.t):
                     for d in xrange(self.covar_c.n_dimensions):
                         Kgrad_x = self.covar_c.Kgrad_x(hyperparams['covar_c'],d,n)
                         Kgrad_x = SP.kron(Kgrad_x,KV['K_r'])
                         _LMLgrad[n,d] = 0.5*(KV['W']*Kgrad_x).sum()
                 assert SP.allclose(LMLgrad,_LMLgrad), 'ouch, gradient is wrong for X_c'
    
        return RV
Exemple #10
0
    def get_covariances(self,hyperparams,debugging=False):
        """
        INPUT:
        hyperparams:  dictionary
        OUTPUT: dictionary with the fields
        Kr:     kernel on rows
        Kc:     kernel on columns
        Knoise: noise kernel
        """
        if self._is_cached(hyperparams):
            return self._covar_cache
        if self._covar_cache==None:
            self._covar_cache = {}
            
        if not(self._is_cached(hyperparams,keys=['covar_c'])):
            K_c = self.covar_c.K(hyperparams['covar_c'])
            S_c,U_c = LA.eigh(K_c)
            self._covar_cache['K_c'] = K_c
            self._covar_cache['U_c'] = U_c
            self._covar_cache['S_c'] = S_c
        else:
            K_c = self._covar_cache['K_c']
            U_c = self._covar_cache['U_c']
            S_c = self._covar_cache['S_c']
            
        if not(self._is_cached(hyperparams,keys=['covar_r'])):
            K_r = self.covar_r.K(hyperparams['covar_r'])
            S_r,U_r = LA.eigh(K_r)
            self._covar_cache['K_r'] = K_r
            self._covar_cache['U_r'] = U_r
            self._covar_cache['S_r'] = S_r
        else:
            K_r = self._covar_cache['K_r']
            U_r = self._covar_cache['U_r']
            S_r = self._covar_cache['S_r']

        S = SP.kron(S_c,S_r) + self.likelihood.Kdiag(hyperparams['lik'],self.nt)
        UYU = SP.dot(U_r.T,SP.dot(self.Y,U_c))
        YtildeVec = (1./S)*ravel(UYU)
        self._covar_cache['S'] = S
        self._covar_cache['UYU'] = UYU
        self._covar_cache['Ytilde'] = unravel(YtildeVec,self.n,self.t)

        
        if debugging:
            # test ravel operations
            UYUvec = ravel(UYU)
            UYU2 = unravel(UYUvec,self.n,self.t)
            SP.allclose(UYU2,UYU)

            # needed later
            Yvec = ravel(self.Y)
            K_noise = self.likelihood.K(hyperparams['lik'],self.nt) # only works for iid noise
            K = SP.kron(K_c,K_r) + K_noise
            #L = LA.cholesky(K).T
            L = jitChol(K)[0].T # lower triangular
            alpha = LA.cho_solve((L,True),Yvec)
            alpha2D = SP.reshape(alpha,(self.nt,1))
            Kinv = LA.cho_solve((L,True),SP.eye(self.nt))
            W = Kinv - SP.dot(alpha2D,alpha2D.T)
            self._covar_cache['Yvec'] = Yvec
            self._covar_cache['K'] = K
            self._covar_cache['Kinv'] = Kinv
            self._covar_cache['L'] = L
            self._covar_cache['alpha'] = alpha
            self._covar_cache['W'] = W

        self._covar_cache['hyperparams'] = copy.deepcopy(hyperparams)
        return self._covar_cache
Exemple #11
0
    def predict(self, hyperparams, Xstar_r, compute_cov=False):
        """
        predict on Xstar
        """
        KV = self.get_covariances(hyperparams)

        self.covar_r.Xcross = Xstar_r

        Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
        Kstar_c = self.covar_c.K(hyperparams['covar_c'])
        KinvY = SP.dot(KV['U_r'], SP.dot(KV['Ytilde'], KV['U_c'].T))
        BD = SP.dot(self.basis, Kstar_c)
        Ystar = reduce(SP.dot, [Kstar_r.T, KinvY, BD.T])

        Ystar_covar = []
        if compute_cov:
            BDU = SP.dot(BD, KV['U_c'])
            BDB = (BD * self.basis).sum(-1)
            Ystar_covar = SP.zeros([Xstar_r.shape[0], self.Y.shape[1]])
            s_rev = 1. / KV['S']
            printProgressBar(0,
                             BDU.shape[0],
                             prefix='Computing perdiction varaince:',
                             suffix='Complete',
                             length=20)
            if Xstar_r.shape[0] < 500:
                R_star_star = self.covar_r.K(hyperparams['covar_r'], Xstar_r)
                self.covar_r.Xcross = Xstar_r
                R_tr_star = self.covar_r.Kcross(hyperparams['covar_r'])
                RU = SP.dot(R_tr_star.T, KV['U_r'])
                q = unravel(SP.kron(BDB, SP.diag(R_star_star)),
                            Ystar_covar.shape[0], Ystar_covar.shape[1])
                t = SP.zeros(Ystar_covar.shape)
                for j in range(BDU.shape[0]):
                    temp = SP.kron(BDU[j, :], RU)
                    t[:, j] = SP.sum((s_rev * temp).T * temp.T, axis=0)
                    if (j + 1) % 10000 == 0:
                        printProgressBar(
                            j + 1,
                            BDU.shape[0],
                            prefix='Computing perdiction varaince:',
                            suffix='Complete',
                            length=20)

                Ystar_covar = q - t
            else:
                for i in range(Xstar_r.shape[0]):
                    R_star_star = self.covar_r.K(
                        hyperparams['covar_r'],
                        SP.expand_dims(Xstar_r[i, :], axis=0))
                    self.covar_r.Xcross = SP.expand_dims(Xstar_r[i, :], axis=0)
                    R_tr_star = self.covar_r.Kcross(hyperparams['covar_r'])
                    RU = SP.dot(R_tr_star.T, KV['U_r'])
                    q = SP.kron(BDB, R_star_star)
                    t = SP.zeros([self.basis.shape[0]])
                    for j in range(BDU.shape[0]):
                        temp = SP.kron(BDU[j, :], RU)
                        t[j, ] = SP.sum((s_rev * temp).T * temp.T, axis=0)
                    Ystar_covar[i, :] = q - t
                    if (i + 1) % (Xstar_r.shape[0] / 10) == 0:
                        printProgressBar(
                            i + 1,
                            Xstar_r.shape[0],
                            prefix='Computing perdiction varaince:',
                            suffix='Complete',
                            length=20)
                self.covar_r.Xcross = Xstar_r
        return Ystar, Ystar_covar
Exemple #12
0
    def predict(self, hyperparams, Xstar_r, compute_cov=False):
        """
        predicting
        """
        KV = self.get_covariances(hyperparams)

        self.covar_r[0].Xcross = Xstar_r
        Kstar_r = self.covar_r[0].Kcross(hyperparams['covar_r'][0])
        USUr = SP.dot(
            SP.sqrt(1. / KV['S_o'][0]) * KV['U_o'][0], KV['Utilde_r'][0])
        S = KV['Stilde_rc']
        RUSUrYhat = SP.dot(
            Kstar_r.T,
            SP.dot(
                USUr,
                unravel(
                    ravel(KV['UYtildeU_rc']) * 1. / S, self.n,
                    SP.prod(self.nbn))))
        usuc = list()
        for i in range(self.out_dims):
            usuc.append(
                SP.dot(
                    self.basis[i],
                    SP.dot(
                        KV['K_c'][i],
                        SP.dot(
                            self.basis[i].T,
                            SP.dot(self.nbasis[i],
                                   SP.dot(KV['USi_s'][i].T,
                                          KV['Utilde_c'][i]))))).T)
        if SP.prod(self.t) <= 10000:
            USUC = reduce(SP.kron, usuc[::-1])
            Ystar = SP.dot(RUSUrYhat, USUC)
        else:
            Ystar = SP.zeros([Xstar_r.shape[0], SP.prod(self.t)])
            if self.out_dims == 1:
                for j in range(self.t[0]):
                    USUC = usuc[0][:, j]
                    Ystar[:, j] = SP.dot(RUSUrYhat, USUC)
            elif self.out_dims == 2:
                for j in range(self.t[0]):
                    for k in range(self.t[1]):
                        USUC = reduce(SP.kron, [usuc[1][:, k], usuc[0][:, j]])
                        Ystar[:, k + j * self.t[1]] = SP.dot(RUSUrYhat, USUC)
            elif self.out_dims == 3:
                for j in range(self.t[0]):
                    for k in range(self.t[1]):
                        for l in range(self.t[2]):
                            USUC = reduce(
                                SP.kron,
                                [usuc[2][:, l], usuc[1][:, k], usuc[0][:, j]])
                            Ystar[:, l + k * self.t[2] + j *
                                  (self.t[1] * self.t[2])] = SP.dot(
                                      RUSUrYhat, USUC)
        Ystar_covar = []
        if compute_cov:
            if self.nt < 10000:
                B = reduce(SP.kron, self.basis[::-1])
                C = SP.dot(B, SP.dot(reduce(SP.kron, KV['K_c'][::-1]), B.T))
                USUC = reduce(SP.kron, usuc[::-1])
                R_star_star = self.covar_r[0].K(hyperparams['covar_r'][0],
                                                Xstar_r)
                temp = SP.kron(USUC.T, SP.dot(Kstar_r.T, USUr))
                Ystar_covar = SP.kron(SP.diag(C),
                                      SP.diag(R_star_star)) - SP.sum(
                                          (1. / S * temp).T * temp.T, axis=0)
                Ystar_covar = unravel(Ystar_covar, Xstar_r.shape[0],
                                      SP.prod(self.t))
            elif SP.prod(self.t) < 10000:
                Ystar_covar = SP.zeros([Xstar_r.shape[0], SP.prod(self.t)])
                B = reduce(SP.kron, self.basis[::-1])
                C = SP.dot(B, SP.dot(reduce(SP.kron, KV['K_c'][::-1]), B.T))
                USUC = reduce(SP.kron, usuc[::-1])
                printProgressBar(0,
                                 Xstar_r.shape[0],
                                 prefix='Computing perdiction varaince:',
                                 suffix='Complete',
                                 length=20)
                for i in range(Xstar_r.shape[0]):
                    R_star_star = self.covar_r[0].K(
                        hyperparams['covar_r'][0],
                        SP.expand_dims(Xstar_r[i, :], axis=0))
                    self.covar_r[0].Xcross = SP.expand_dims(Xstar_r[i, :],
                                                            axis=0)
                    R_tr_star = self.covar_r[0].Kcross(
                        hyperparams['covar_r'][0])
                    r = SP.dot(R_tr_star.T, USUr)
                    q = SP.diag(SP.kron(C, R_star_star))
                    t = SP.zeros([SP.prod(self.t)])
                    for j in range(SP.prod(self.t)):
                        temp = SP.kron(USUC[:, j], r)
                        t[j, ] = SP.sum((1. / S * temp).T * temp.T, axis=0)
                    Ystar_covar[i, :] = q - t
                    if (i + 1) % 50 == 0:
                        printProgressBar(
                            i + 1,
                            Xstar_r.shape[0],
                            prefix='Computing perdiction varaince:',
                            suffix='Complete',
                            length=20)
                self.covar_r[0].Xcross = Xstar_r
            elif Xstar_r.shape[0] < 2000:
                Ystar_covar = SP.zeros([Xstar_r.shape[0], SP.prod(self.t)])
                c_diag = list()
                for j in range(self.out_dims):
                    temp = SP.dot(self.basis[j],
                                  SP.dot(KV['K_c'][j], self.basis[j].T))
                    c_diag.append(SP.diag(temp))
                C = reduce(SP.kron, c_diag[::-1])
                R_star_star = self.covar_r[0].K(hyperparams['covar_r'][0],
                                                Xstar_r)
                R_tr_star = self.covar_r[0].Kcross(hyperparams['covar_r'][0])
                r = SP.dot(R_tr_star.T, USUr)
                #q = SP.reshape(SP.kron(SP.diag(R_star_star),C),[Ystar_covar.shape[0],Ystar_covar.shape[1]])
                q = unravel(SP.kron(C, SP.diag(R_star_star)),
                            Ystar_covar.shape[0], Ystar_covar.shape[1])
                t = SP.zeros(Ystar_covar.shape)
                printProgressBar(0,
                                 Xstar_r.shape[0],
                                 prefix='Computing perdiction varaince:',
                                 suffix='Complete',
                                 length=20)
                if self.out_dims == 1:
                    for j in range(self.t[0]):
                        USUC = usuc[0][:, j]
                        temp = SP.kron(USUC, r)
                        t[j, ] = SP.sum((1. / S * temp).T * temp.T, axis=0)
                        printProgressBar(
                            j + 1,
                            self.t[0],
                            prefix='Computing perdiction varaince:',
                            suffix='Complete',
                            length=20)
                elif self.out_dims == 2:
                    for j in range(self.t[0]):
                        for k in range(self.t[1]):
                            USUC = reduce(SP.kron,
                                          [usuc[1][:, k], usuc[0][:, j]])
                            temp = SP.kron(USUC, r)
                            t[k + (j * self.t[1]), ] = SP.sum(
                                (1. / S * temp).T * temp.T, axis=0)
                        printProgressBar(
                            j + 1,
                            self.t[0],
                            prefix='Computing perdiction varaince:',
                            suffix='Complete',
                            length=20)
                elif self.out_dims == 3:
                    for j in range(self.t[0]):
                        for k in range(self.t[1]):
                            for l in range(self.t[2]):
                                USUC = reduce(SP.kron, [
                                    usuc[2][:, l], usuc[1][:, k], usuc[0][:, j]
                                ])
                                temp = SP.kron(USUC, r)
                                t[:, l + k * self.t[2] +
                                  j * self.t[1] * self.t[2], ] = SP.sum(
                                      (1. / S * temp).T * temp.T, axis=0)
                        if (j + 1) % 5 == 0:
                            printProgressBar(
                                j + 1,
                                self.t[0],
                                prefix='Computing perdiction varaince:',
                                suffix='Complete',
                                length=20)
                Ystar_covar = q - t
            else:
                Ystar_covar = SP.zeros([Xstar_r.shape[0], SP.prod(self.t)])
                c_diag = list()
                for j in range(self.out_dims):
                    temp = SP.dot(self.basis[j],
                                  SP.dot(KV['K_c'][j], self.basis[j].T))
                    c_diag.append(SP.diag(temp))
                C = reduce(SP.kron, c_diag[::-1])
                printProgressBar(0,
                                 Xstar_r.shape[0],
                                 prefix='Computing perdiction varaince:',
                                 suffix='Complete',
                                 length=20)
                for i in range(Xstar_r.shape[0]):
                    R_star_star = self.covar_r[0].K(
                        hyperparams['covar_r'][0],
                        SP.expand_dims(Xstar_r[i, :], axis=0))
                    self.covar_r[0].Xcross = SP.expand_dims(Xstar_r[i, :],
                                                            axis=0)
                    R_tr_star = self.covar_r[0].Kcross(
                        hyperparams['covar_r'][0])
                    r = SP.dot(R_tr_star.T, USUr)
                    q = C * R_star_star
                    t = SP.zeros([SP.prod(self.t)])
                    if self.out_dims == 1:
                        for j in range(self.t[0]):
                            USUC = usuc[0][:, j]
                            temp = SP.kron(USUC, r)
                            t[j, ] = SP.sum((1. / S * temp).T * temp.T, axis=0)
                    elif self.out_dims == 2:
                        for j in range(self.t[0]):
                            for k in range(self.t[1]):
                                USUC = reduce(SP.kron,
                                              [usuc[1][:, k], usuc[0][:, j]])
                                temp = SP.kron(USUC, r)
                                t[k + (j * self.t[1]), ] = SP.sum(
                                    (1. / S * temp).T * temp.T, axis=0)
                    elif self.out_dims == 3:
                        for j in range(self.t[0]):
                            for k in range(self.t[1]):
                                for l in range(self.t[2]):
                                    USUC = reduce(SP.kron, [
                                        usuc[2][:, l], usuc[1][:, k],
                                        usuc[0][:, j]
                                    ])
                                    temp = SP.kron(USUC, r)
                                    t[l + k * self.t[2] +
                                      j * self.t[1] * self.t[2], ] = SP.sum(
                                          (1. / S * temp).T * temp.T, axis=0)
                    Ystar_covar[i, :] = q - t
                    if (i + 1) % 10 == 0:
                        printProgressBar(
                            i + 1,
                            Xstar_r.shape[0],
                            prefix='Computing perdiction varaince:',
                            suffix='Complete',
                            length=20)
                self.covar_r[0].Xcross = Xstar_r

        return Ystar, Ystar_covar
Exemple #13
0
    def _LMLgrad_x(self,hyperparams):
        """
        evaluates the gradient of the log marginal likelihood with respect to
        the latent factors
        """
        try:
            KV = self.get_covariances(hyperparams)
        except LA.LinAlgError:
            LG.error('linalg exception in _LML_covar')
            RV = {}
            if 'X_r' in hyperparams:
                RV['X_r'] = SP.zeros(hyperparams['X_r'].shape)
            if 'X_c' in hyperparams:
                RV['X_c'] = SP.zeros(hyperparams['X_c'].shape)
            return RV
        except ValueError:
            LG.error('value error in _LML_covar')
            RV = {}
            if 'X_r' in hyperparams:
                RV['X_r'] = SP.zeros(hyperparams['X_r'].shape)
            if 'X_c' in hyperparams:
                RV['X_c'] = SP.zeros(hyperparams['X_c'].shape)
            return RV
       
        RV = {}
        if 'X_r' in hyperparams:
            LMLgrad = SP.zeros((self.n,self.covar_r.n_dimensions))
            LMLgrad_det = SP.zeros((self.n,self.covar_r.n_dimensions))
            LMLgrad_quad = SP.zeros((self.n,self.covar_r.n_dimensions))

            SS = SP.dot(unravel(1./KV['S'],self.n,self.t),KV['S_c_tilde'])
            UY = SP.dot(KV['U_r'],KV['Ytilde'])
            UYSYU = SP.dot(UY,SP.dot(SP.diag(KV['S_c_tilde']),UY.T))
            for d in xrange(self.covar_r.n_dimensions):
                Kd_grad = self.covar_r.Kgrad_x(hyperparams['covar_r'],d)
                # calculate gradient of logdet
                URU = SP.dot(Kd_grad.T,KV['U_r'])*KV['U_r']
                LMLgrad_det[:,d] = 2*SP.dot(URU,SS.T)
                # calculate gradient of squared form
                LMLgrad_quad[:,d] = -2*(UYSYU*Kd_grad).sum(0)
            LMLgrad = 0.5*(LMLgrad_det + LMLgrad_quad)
            RV['X_r'] = LMLgrad
            
        if 'X_c' in hyperparams:
            LMLgrad = SP.zeros((self.t,self.covar_c.n_dimensions))
            LMLgrad_quad = SP.zeros((self.t,self.covar_c.n_dimensions))
            LMLgrad_det = SP.zeros((self.t,self.covar_c.n_dimensions))

            SS = SP.dot(KV['S_r'],unravel(1./KV['S'],self.n,self.t))
            UY = SP.dot(KV['UBinvB'],KV['Ytilde'].T)
            UYSYU = SP.dot(UY,SP.dot(SP.diag(KV['S_r']),UY.T))
            for d in xrange(self.covar_c.n_dimensions):
                Kd_grad = self.covar_c.Kgrad_x(hyperparams['covar_c'],d)
                # calculate gradient of logdet
                UCU = SP.dot(Kd_grad.T,KV['UBinvB'])*KV['UBinvB']
                LMLgrad_det[:,d] = 2*SP.dot(SS,UCU.T)
                # calculate gradient of squared form
                LMLgrad_quad[:,d] = -2*(UYSYU*Kd_grad).sum(0)
                
            LMLgrad = 0.5*(LMLgrad_det + LMLgrad_quad)
            RV['X_c'] = LMLgrad
    
        return RV