Beispiel #1
0
 def optimize(self, params0):
     """ initialize """
     self.gp.setParams(params0)
     self.cache['params0'] = params0
     self.cache['lml0'] = self.gp.LML()
     self.cache['lmlGrad0'] = self.gp.LMLgrad()
     """ optimize """
     gpopt = limix.CGPopt(self.gp)
     conv = gpopt.opt()
     """ store stuff """
     self.cache['lml'] = self.gp.LML()
     self.cache['lmlGrad'] = self.gp.LMLgrad()
     self.cache['params'] = self.gp.getParams()
     self.cache['X'] = self.Ca.getParams().reshape((self.N, self.k),
                                                   order='F')
     self.cache['K'] = self.Ca.K()
     if self.interaction:
         self.cache['Ki'] = self.Ca.K() * self.Cb2.K()
     else:
         self.cache['Ki'] = None
     self.cache['var'] = {}
     self.cache['var']['K0'] = self.C0.getParams()[0]**2
     self.cache['var']['K'] = self.cache['K'].diagonal().mean()
     if self.interaction:
         self.cache['var']['Ki'] = self.cache['Ki'].diagonal().mean()
     self.cache['var']['noise'] = self.ll.getParams()[0]**2
     self.cache['K'] /= self.cache['var']['K']
     if self.interaction:
         self.cache['Ki'] /= self.cache['var']['Ki']
     return conv
Beispiel #2
0
    def test_fit(self):
        #create optimization object
        self.gpopt = dlimix_legacy.CGPopt(self.gp)
        #run
        RV = self.gpopt.opt()
        RV = self.gpopt.opt()

        m = (SP.absolute(self.gp.LMLgrad()['X']).max() +
             SP.absolute(self.gp.LMLgrad()['covar']).max() +
             SP.absolute(self.gp.LMLgrad()['lik']).max())

        np.testing.assert_almost_equal(m, 0., decimal=1)
Beispiel #3
0
    def train(self, rank=20, Kpop=True, LinearARD=False):
        """train panama module"""

        if 0:
            covar = limix_legacy.CCovLinearISO(rank)
            ll = limix_legacy.CLikNormalIso()
            X0 = sp.random.randn(self.N, rank)
            X0 = PCA(self.Y, rank)[0]
            X0 /= sp.sqrt(rank)
            covar_params = sp.array([1.0])
            lik_params = sp.array([1.0])

            hyperparams = limix_legacy.CGPHyperParams()
            hyperparams['covar'] = covar_params
            hyperparams['lik'] = lik_params
            hyperparams['X'] = X0

            constrainU = limix_legacy.CGPHyperParams()
            constrainL = limix_legacy.CGPHyperParams()
            constrainU['covar'] = +5 * sp.ones_like(covar_params)
            constrainL['covar'] = 0 * sp.ones_like(covar_params)
            constrainU['lik'] = +5 * sp.ones_like(lik_params)
            constrainL['lik'] = 0 * sp.ones_like(lik_params)

        if 1:
            covar = limix_legacy.CSumCF()
            if LinearARD:
                covar_1 = limix_legacy.CCovLinearARD(rank)
                covar_params = []
                for d in range(rank):
                    covar_params.append(1 / sp.sqrt(d + 2))
            else:
                covar_1 = limix_legacy.CCovLinearISO(rank)
                covar_params = [1.0]
            covar.addCovariance(covar_1)

            for K in self.Ks:
                covar.addCovariance(limix_legacy.CFixedCF(K))
                covar_params.append(1.0)

            ll = limix_legacy.CLikNormalIso()
            X0 = PCA(self.Y, rank)[0]
            X0 /= sp.sqrt(rank)
            covar_params = sp.array(covar_params)
            lik_params = sp.array([1.0])

            hyperparams = limix_legacy.CGPHyperParams()
            hyperparams['covar'] = covar_params
            hyperparams['lik'] = lik_params
            hyperparams['X'] = X0

            constrainU = limix_legacy.CGPHyperParams()
            constrainL = limix_legacy.CGPHyperParams()
            constrainU['covar'] = +5 * sp.ones_like(covar_params)
            constrainL['covar'] = -5 * sp.ones_like(covar_params)
            constrainU['lik'] = +5 * sp.ones_like(lik_params)

        gp = limix_legacy.CGPbase(covar, ll)
        gp.setY(self.Y)
        gp.setX(X0)
        lml0 = gp.LML(hyperparams)
        dlml0 = gp.LMLgrad(hyperparams)
        gpopt = limix_legacy.CGPopt(gp)
        gpopt.setOptBoundLower(constrainL)
        gpopt.setOptBoundUpper(constrainU)

        t1 = time.time()
        gpopt.opt()
        t2 = time.time()

        #Kpanama
        self.Xpanama = covar_1.getX()
        if LinearARD:
            self.Xpanama /= self.Xpanama.std(0)
        self.Kpanama = covar_1.K()
        #        self.Kpanama/= self.Kpanama.diagonal().mean()

        # Ktot
        self.Ktot = covar_1.K()
        for c_i in range(len(self.Ks)):
            self.Ktot += covar.getCovariance(c_i + 1).K()
#       self.Ktot/= self.Ktot.diagonal().mean()

#store variances
        V = {}
        if LinearARD:
            V['LinearARD'] = covar_1.getParams()**2 * covar_1.getX().var(0)
        else:
            V['Kpanama'] = sp.array([covar_1.K().diagonal().mean()])
#        if self.use_Kpop:
#            V['Ks'] = sp.array([covar.getCovariance(c_i+1).K().diagonal().mean() for c_i in range(len(self.Ks))])
        V['Ks'] = sp.array(
            [covar.getCovariance(c_i + 1).K() for c_i in range(len(self.Ks))])
        V['noise'] = gp.getParams()['lik']**2
        self.varianceComps = V

        # predictions
        Ki = la.inv(ll.K() + covar.K())
        self.Ypanama = sp.dot(covar_1.K(), sp.dot(Ki, self.Y))
        self.LL = ll.K()