示例#1
0
    def _LMLgrad_s(self,hyperparams,debugging=False):
        """
        evaluate gradients with respect to covariance matrix Sigma
        """
        try:
            KV = self.get_covariances(hyperparams, debugging=debugging)
        except LA.LinAlgError:
            LG.error('linalg exception in _LMLgrad_x_sigma')
            return {'X_s':SP.zeros(hyperparams['X_s'].shape)}

        Si = 1./KV['Stilde_os']
        SS = SP.dot(unravel(Si,self.n,self.t).T,KV['Stilde_o'])
        USU = SP.dot(KV['USi_c'],KV['Utilde_s'])        
        Yhat = unravel(Si * ravel(KV['UYtildeU_os']),self.n,self.t)
        RV = {}
        
        if 'X_s' in hyperparams:
            USUY = SP.dot(USU,Yhat.T)
            USUYSYUSU = SP.dot(USUY,(KV['Stilde_o']*USUY).T)
        
            LMLgrad = SP.zeros((self.t,self.covar_s.n_dimensions))
            LMLgrad_det = SP.zeros((self.t,self.covar_s.n_dimensions))
            LMLgrad_quad = SP.zeros((self.t,self.covar_s.n_dimensions))
        
            for d in xrange(self.covar_s.n_dimensions):
                Kd_grad = self.covar_s.Kgrad_x(hyperparams['covar_s'],d)
                UsU = SP.dot(Kd_grad.T,USU)*USU
                LMLgrad_det[:,d] = SP.dot(UsU,SS.T)
                # calculate gradient of squared form
                LMLgrad_quad[:,d] = -(USUYSYUSU*Kd_grad).sum(0)
            LMLgrad = LMLgrad_det + LMLgrad_quad
            RV['X_s'] = LMLgrad
            
            if debugging:
                _LMLgrad = SP.zeros((self.t,self.covar_s.n_dimensions))
                for t in xrange(self.t):
                    for d in xrange(self.covar_s.n_dimensions):
                        Kgrad_x = self.covar_s.Kgrad_x(hyperparams['covar_s'],d,t)
                        Kgrad_x = SP.kron(Kgrad_x,KV['K_o'])
                        _LMLgrad[t,d] = 0.5*(KV['W']*Kgrad_x).sum()

                assert SP.allclose(LMLgrad,_LMLgrad,rtol=1E-3,atol=1E-2), 'ouch, something is wrong: %.2f'%LA.norm(LMLgrad-_LMLgrad)

        if 'covar_s' in hyperparams:
            theta = SP.zeros(len(hyperparams['covar_s']))
            for i in range(len(theta)):
                Kgrad_s = self.covar_s.Kgrad_theta(hyperparams['covar_s'],i)
                UdKU = SP.dot(USU.T, SP.dot(Kgrad_s, USU))
                SYUdKU = SP.dot(UdKU,KV['Stilde_o'] * Yhat.T)
                LMLgrad_det = SP.sum(Si*SP.kron(SP.diag(UdKU),KV['Stilde_o']))
                LMLgrad_quad = -(Yhat.T*SYUdKU).sum()
                LMLgrad = 0.5*(LMLgrad_det + LMLgrad_quad)
                theta[i] = LMLgrad

                if debugging:
                    Kd = SP.kron(Kgrad_s, KV['K_o'])
                    _LMLgrad = 0.5 * (KV['W']*Kd).sum()
                    assert SP.allclose(LMLgrad,_LMLgrad,rtol=1E-3,atol=1E-2), 'ouch, something is wrong: %.2f'%LA.norm(LMLgrad-_LMLgrad)
            RV['covar_s'] = theta
        return RV
示例#2
0
    def predict(self, hyperparams, Xstar_r=None, debugging=False):
        """
        predict over new training points
        """
        self._update_inputs(hyperparams)
        KV = self.get_covariances(hyperparams, debugging=debugging)
        if Xstar_r != None:
            self.covar_r.Xcross = Xstar_r

        Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
        Kstar_c = self.covar_c.K(
            hyperparams['covar_c'])  # kernel over tasks is fixed!
        S = SP.kron(KV['Stilde_c'], KV['Stilde_r']) + 1
        USUc = SP.dot(SP.sqrt(1. / KV['S_s']) * KV['U_s'], KV['Utilde_c'])
        USUr = SP.dot(SP.sqrt(1. / KV['S_o']) * KV['U_o'], KV['Utilde_r'])
        KinvY = SP.dot(
            USUr,
            SP.dot(unravel(ravel(KV['UYtildeU_rc']) * 1. / S, self.n, self.t),
                   USUc.T))
        Ystar = SP.dot(Kstar_r.T, SP.dot(KinvY, Kstar_c))
        Ystar = unravel(Ystar, self.covar_r.n_cross, self.t)

        if debugging:
            Kstar = SP.kron(Kstar_c, Kstar_r)
            Ynaive = SP.dot(Kstar.T, KV['alpha'])
            Ynaive = unravel(Ynaive, self.covar_r.n_cross, self.t)
            assert SP.allclose(Ystar,
                               Ynaive), 'ouch, prediction does not work out'
        return Ystar
示例#3
0
    def predict(self,
                hyperparams,
                Xstar_r,
                compute_cov=False,
                debugging=False):
        """
        predict over new training points
        """
        self._update_inputs(hyperparams)
        KV = self.get_covariances(hyperparams, debugging=debugging)
        self.covar_r.Xcross = Xstar_r

        Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
        Kstar_c = self.covar_c.K(
            hyperparams['covar_c'])  # kernel over tasks is fixed!
        S = SP.kron(KV['Stilde_c'], KV['Stilde_r']) + 1
        USUc = SP.dot(SP.sqrt(1. / KV['S_s']) * KV['U_s'], KV['Utilde_c'])
        #USUc1 = SP.dot(KV['Utilde_c'].T, SP.dot(SP.diag(SP.sqrt(1./KV['S_s'])),KV['U_s'].T))
        USUr = SP.dot(SP.sqrt(1. / KV['S_o']) * KV['U_o'], KV['Utilde_r'])
        #USUr = SP.dot(SP.dot(KV['U_o'], SP.diag(SP.sqrt(1./KV['S_o']))),KV['Utilde_r'])
        KinvY = SP.dot(
            USUr,
            SP.dot(unravel(ravel(KV['UYtildeU_rc']) * 1. / S, self.n, self.t),
                   USUc.T))
        #KinvY = SP.dot(USUr, SP.dot(unravel(ravel(KV['UYtildeU_rc']) * 1./S, self.n, self.t), USUc))
        Ystar = SP.dot(Kstar_r.T, SP.dot(KinvY, Kstar_c))
        Ystar = unravel(Ystar, self.covar_r.n_cross, self.t)

        if debugging:
            Kstar = SP.kron(Kstar_c, Kstar_r)
            Ynaive = SP.dot(Kstar.T, KV['alpha'])
            Ynaive = unravel(Ynaive, self.covar_r.n_cross, self.t)
            assert SP.allclose(Ystar,
                               Ynaive), 'ouch, prediction does not work out'

        Ystar_covar = []
        if compute_cov:
            R_star_star = SP.exp(2 * hyperparams['covar_r']) * fast_dot(
                Xstar_r, Xstar_r.T)
            R_tr_star = Kstar_r
            C = Kstar_c

            #            temp1 =  fast_kron(fast_dot(fast_dot(fast_dot(C, KV['U_s']), np.diag(SP.sqrt(1./KV['S_s']))),KV['Utilde_c']),
            #                              fast_dot(fast_dot(fast_dot(R_tr_star.T, KV['U_o']), np.diag(SP.sqrt(1./KV['S_o']))),KV['Utilde_r']))
            #            Ystar_covar1 = SP.diag(fast_kron(C, R_star_star) - fast_dot(fast_dot(temp1,np.diag(1./S )),temp1.T))

            temp = fast_kron(
                fast_dot(fast_dot(C, KV['U_s'] * SP.sqrt(1. / KV['S_s'])),
                         KV['Utilde_c']),
                fast_dot(
                    fast_dot(R_tr_star.T, KV['U_o'] * SP.sqrt(1. / KV['S_o'])),
                    KV['Utilde_r']))
            Ystar_covar = SP.diag(fast_kron(C, R_star_star)) - SP.sum(
                (1. / S * temp).T * temp.T, axis=0)

            Ystar_covar = unravel(Ystar_covar, Xstar_r.shape[0], self.t)

        return Ystar, Ystar_covar
示例#4
0
    def _LMLgrad_x_omega(self, hyperparams, debugging=False):
        try:
            KV = self.get_covariances(hyperparams)
        except LA.LinAlgError:
            LG.error('linalg exception in _LMLgrad_x_omega')
            return {'X_o': SP.zeros(hyperparams['X_o'].shape)}

        LMLgrad = SP.zeros((self.n, self.covar_o.n_dimensions))
        for d in xrange(self.covar_o.n_dimensions):
            Kgrad_x = self.covar_o.Kgrad_x(hyperparams['covar_o'], d)
            Kgrad_x = SP.tile(Kgrad_x, self.n)
            Kgrad_x = SP.kron(KV['K_s'], Kgrad_x)
            LMLgrad[:, d] = unravel(SP.sum(KV['W'] * Kgrad_x, axis=0), self.n,
                                    self.t).sum(1)

        if debugging:
            _LMLgrad = SP.zeros((self.n, self.covar_o.n_dimensions))
            for n in xrange(self.n):
                for d in xrange(self.covar_o.n_dimensions):
                    Kgrad_x = self.covar_o.Kgrad_x(hyperparams['covar_o'], d,
                                                   n)
                    Kgrad_x = SP.kron(KV['K_s'], Kgrad_x)
                    _LMLgrad[n, d] = 0.5 * (KV['W'] * Kgrad_x).sum()
            assert SP.allclose(LMLgrad, _LMLgrad), 'ouch,something is wrong'

        return {'X_o': LMLgrad}
示例#5
0
 def predict(self, hyperparams, Xstar_r=None, **kwargs):
     """
     predict on Xstar
     """
     if Xstar_r != None:
         self.covar_r.Xcross = Xstar_r
     self._update_inputs(hyperparams)
     KV = self.get_covariances(hyperparams)
     Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
     Kstar_c = self.covar_c.K(
         hyperparams['covar_c'])  # kernel over tasks is fixed!
     Kstar = SP.kron(Kstar_c, Kstar_r)
     Ystar = SP.dot(Kstar.T, KV['alpha'])
     Ystar = unravel(Ystar, self.covar_r.n_cross, self.t)
     return Ystar
示例#6
0
    def predict(self,hyperparams,Xstar_r=None,debugging=False):
        """
        predict over new training points
        """
        self._update_inputs(hyperparams)
        KV = self.get_covariances(hyperparams,debugging=debugging)
        if Xstar_r!=None:
            self.covar_r.Xcross = Xstar_r
        
        Kstar_r = self.covar_r.Kcross(hyperparams['covar_r'])
        Kstar_c = self.covar_c.K(hyperparams['covar_c']) # kernel over tasks is fixed!
        S = SP.kron(KV['Stilde_c'],KV['Stilde_r'])+1
        USUc = SP.dot(SP.sqrt(1./KV['S_s']) * KV['U_s'],KV['Utilde_c'])
        USUr = SP.dot(SP.sqrt(1./KV['S_o']) * KV['U_o'],KV['Utilde_r'])
        KinvY = SP.dot(USUr,SP.dot(unravel(ravel(KV['UYtildeU_rc']) * 1./S,self.n,self.t),USUc.T))
        Ystar = SP.dot(Kstar_r.T,SP.dot(KinvY,Kstar_c))
        Ystar = unravel(Ystar,self.covar_r.n_cross,self.t)

        if debugging:
            Kstar = SP.kron(Kstar_c,Kstar_r)
            Ynaive = SP.dot(Kstar.T,KV['alpha'])
            Ynaive = unravel(Ynaive,self.covar_r.n_cross,self.t)
            assert SP.allclose(Ystar,Ynaive), 'ouch, prediction does not work out'
        return Ystar
示例#7
0
    def _LMLgrad_o(self, hyperparams, debugging=False):
        """
        evaluates the gradient with respect to the covariance matrix Omega
        """
        try:
            KV = self.get_covariances(hyperparams, debugging=debugging)
        except LA.LinAlgError:
            LG.error('linalg exception in _LMLgrad_x_omega')
            return {'X_o': SP.zeros(hyperparams['X_o'].shape)}

        Si = 1. / KV['Stilde_os']
        SS = SP.dot(unravel(Si, self.n, self.t), KV['Stilde_s'])
        USU = SP.dot(KV['USi_r'], KV['Utilde_o'])
        Yhat = unravel(Si * ravel(KV['UYtildeU_os']), self.n, self.t)
        RV = {}

        if 'X_o' in hyperparams:
            USUY = SP.dot(USU, Yhat)
            USUYSYUSU = SP.dot(USUY, (KV['Stilde_s'] * USUY).T)

            LMLgrad = SP.zeros((self.n, self.covar_o.n_dimensions))
            LMLgrad_det = SP.zeros((self.n, self.covar_o.n_dimensions))
            LMLgrad_quad = SP.zeros((self.n, self.covar_o.n_dimensions))

            for d in xrange(self.covar_o.n_dimensions):
                Kd_grad = self.covar_o.Kgrad_x(hyperparams['covar_o'], d)
                # calculate gradient of logdet
                UoU = SP.dot(Kd_grad.T, USU) * USU
                LMLgrad_det[:, d] = SP.dot(UoU, SS.T)
                # calculate gradient of squared form
                LMLgrad_quad[:, d] = -(USUYSYUSU * Kd_grad).sum(0)

            LMLgrad = LMLgrad_det + LMLgrad_quad
            RV['X_o'] = LMLgrad

            if debugging:
                _LMLgrad = SP.zeros((self.n, self.covar_o.n_dimensions))
                for n in xrange(self.n):
                    for d in xrange(self.covar_o.n_dimensions):
                        Kgrad_x = self.covar_o.Kgrad_x(hyperparams['covar_o'],
                                                       d, n)
                        Kgrad_x = SP.kron(KV['K_s'], Kgrad_x)
                        _LMLgrad[n, d] = 0.5 * (KV['W'] * Kgrad_x).sum()
            assert SP.allclose(
                LMLgrad, _LMLgrad, rtol=1E-3, atol=1E-2
            ), 'ouch, something is wrong: %.2f' % LA.norm(LMLgrad - _LMLgrad)

        if 'covar_o' in hyperparams:
            theta = SP.zeros(len(hyperparams['covar_o']))

            for i in range(len(theta)):
                Kgrad_o = self.covar_o.Kgrad_theta(hyperparams['covar_o'], i)
                UdKU = SP.dot(USU.T, SP.dot(Kgrad_o, USU))
                SYUdKU = SP.dot(UdKU, Yhat * KV['Stilde_s'])
                LMLgrad_det = SP.sum(Si *
                                     SP.kron(KV['Stilde_s'], SP.diag(UdKU)))
                LMLgrad_quad = -(Yhat * SYUdKU).sum()
                LMLgrad = 0.5 * (LMLgrad_det + LMLgrad_quad)
                theta[i] = LMLgrad

                if debugging:
                    Kd = SP.kron(KV['K_s'], Kgrad_o)
                    _LMLgrad = 0.5 * (KV['W'] * Kd).sum()
                    assert SP.allclose(
                        LMLgrad, _LMLgrad, rtol=1E-2,
                        atol=1E-2), 'ouch, something is wrong: %.2f' % LA.norm(
                            LMLgrad - _LMLgrad)
            RV['covar_o'] = theta

        return RV