Esempio n. 1
0
    def optimize(self, calc_ste=False, Ifilter=None, bounds=None, verbose=True,
                 opts={}, *args, **kw_args):
        # logger.info('Marginal likelihood optimization.')

        if verbose:
            print 'Marginal likelihood optimization.'
        t0 = time.time()
        conv, info = OPT.opt_hyper(self, Ifilter=Ifilter, bounds=bounds,
                                   opts=opts, *args, **kw_args)
        t1 = time.time()

        # if logger.levelno == logger.DEBUG:
        if verbose:
            # logger.debug('Time elapsed: %.2fs', t1-t0)
            print 'Converged:', conv
            print 'Time elapsed: %.2f s' % (t1-t0)
            grad = self.LML_grad()
            grad_norm = 0
            for key in grad.keys():
                grad_norm += (grad[key]**2).sum()
            grad_norm = sp.sqrt(grad_norm)
            print 'Log Marginal Likelihood: %.7f.' % self.LML()
            print 'Gradient norm: %.7f.' % grad_norm
            # logger.debug('Log Marginal Likelihood: %.7f.', self.LML())
            # logger.debug('Gradient norm: %.7f.', grad_norm)

        if calc_ste:
            self.calc_ste(verbose=verbose)
        return conv, info
Esempio n. 2
0
    def optimize(self, calc_ste=False, Ifilter=None, bounds=None, verbose=True,
                 opts={}, *args, **kw_args):
        # logger.info('Marginal likelihood optimization.')

        if verbose:
            print('Marginal likelihood optimization.')
        t0 = time.time()
        conv, info = OPT.opt_hyper(self, Ifilter=Ifilter, bounds=bounds,
                                   opts=opts, *args, **kw_args)
        t1 = time.time()

        # if logger.levelno == logger.DEBUG:
        if verbose:
            # logger.debug('Time elapsed: %.2fs', t1-t0)
            print(('Converged:', conv))
            print(('Time elapsed: %.2f s' % (t1-t0)))
            grad = self.LML_grad()
            grad_norm = 0
            for key in list(grad.keys()):
                grad_norm += (grad[key]**2).sum()
            grad_norm = sp.sqrt(grad_norm)
            print(('Log Marginal Likelihood: %.7f.' % self.LML()))
            print(('Gradient norm: %.7f.' % grad_norm))
            # logger.debug('Log Marginal Likelihood: %.7f.', self.LML())
            # logger.debug('Gradient norm: %.7f.', grad_norm)

        if calc_ste:
            self.calc_ste(verbose=verbose)
        return conv, info
Esempio n. 3
0
        # compare with mtSet implementation
        params = {}
        params['Cg']   = SP.randn(int(0.5*P*(P+1)))
        params['Cn']   = SP.randn(int(0.5*P*(P+1)))


        print "check gradient with gp2kronSum"
        gp = gp2kronSum(mu,Cg,Cn,XX)
        gp.setParams(params)

        if 0:
            gp.set_reml(False)

        print "test optimization"
        start = TIME.time()
        conv,info = OPT.opt_hyper(gp,params,factr=1e3)
        print 'Reml GP:', TIME.time()-start
        
        if mtSet_present:
            params1 = copy.copy(params)
            params1['mean'] = SP.zeros(mu1.getParams().shape[0])
            gp1 = gp2kronSumMtSet(mu1,Cg1,Cn1,XX)
            gp1.setParams(params1)
            start = TIME.time()
            conv1,info = OPT.opt_hyper(gp1,params1,factr=1e3)
            print 'Old GP:', TIME.time()-start

        print conv

        ipdb.set_trace()
    gp.checkGradient()

    if 0:
        gp.K.optimizeAB(n=100)
        print(('a:', gp.K.a))
        print(('b:', gp.K.b))

        ipdb.set_trace()
        gp.K.optimizeABgrad()

    for i in range(10):

        ipdb.set_trace()

        conv,info = OPT.opt_hyper(gp,params,factr=1e-3)
        print(conv)

        print('C1')
        print((C1.K()))
        print('C2')
        print((C2.K()))
        print('Cn')
        print((Cn.K()))

        ipdb.set_trace()
        print(('before a and b opt:', gp.LML()))
        gp.K.optimizeAB(n=100)
        print(('after a and b opt:', gp.LML()))
        print(('a:', gp.K.a))
        print(('b:', gp.K.b))
Esempio n. 5
0
    print "creating gp2kronSum object"
    XX = SP.dot(X, X.T)
    XX /= XX.diagonal().mean()
    gp = gp2kronSum(mu, Cg, Cn, XX)
    gp.setParams(params)

    if "ML" in sys.argv:
        print "ML estimation"
        gp.set_reml(False)
    else:
        print "REML estimation"

    print "optimization of GP parameters"
    start = TIME.time()
    conv, info = OPT.opt_hyper(gp, params, factr=1e3)
    print 'time for fitting GP:', TIME.time() - start

    print conv

    print "creating lmm for association using GP object"
    assoc = lmm.LmmKronecker(gp=gp)

    #test snps
    print "testing SNPs with any effect"
    pv, LL_snps, LL_snps_0 = assoc.test_snps(snps)

    if 1:
        print "forward selection step"
        print "adding SNP with smalles pv as fixed effect"
        i_pv = pv.argsort()
Esempio n. 6
0
        # compare with mtSet implementation
        params = {}
        params['Cg']   = SP.randn(int(0.5*P*(P+1)))
        params['Cn']   = SP.randn(int(0.5*P*(P+1)))


        print("check gradient with gp2kronSum")
        gp = gp2kronSum(mu,Cg,Cn,XX)
        gp.setParams(params)

        if 0:
            gp.set_reml(False)

        print("test optimization")
        start = TIME.time()
        conv,info = OPT.opt_hyper(gp,params,factr=1e3)
        print(('Reml GP:', TIME.time()-start))
        
        if mtSet_present:
            params1 = copy.copy(params)
            params1['mean'] = SP.zeros(mu1.getParams().shape[0])
            gp1 = gp2kronSumMtSet(mu1,Cg1,Cn1,XX)
            gp1.setParams(params1)
            start = TIME.time()
            conv1,info = OPT.opt_hyper(gp1,params1,factr=1e3)
            print(('Old GP:', TIME.time()-start))

        print(conv)

        ipdb.set_trace()
Esempio n. 7
0
    print("creating gp2kronSum object")
    XX = SP.dot(X,X.T)
    XX/= XX.diagonal().mean()
    gp = gp2kronSum(mu,Cg,Cn,XX)
    gp.setParams(params)

    if "ML" in sys.argv:
        print("ML estimation")
        gp.set_reml(False)
    else:
        print("REML estimation")
    
    print("optimization of GP parameters")
    start = TIME.time()
    conv,info = OPT.opt_hyper(gp,params,factr=1e3)
    print(('time for fitting GP:', TIME.time()-start))
        

    print(conv)

    print("creating lmm for association using GP object") 
    assoc = lmm.LmmKronecker(gp=gp)
    
    #test snps
    print("testing SNPs with any effect")
    pv,LL_snps,LL_snps_0= assoc.test_snps(snps)
    


    if 1:
Esempio n. 8
0
    gp.checkGradient()

    if 0:
        gp.K.optimizeAB(n=100)
        print 'a:', gp.K.a
        print 'b:', gp.K.b

        ipdb.set_trace()
        gp.K.optimizeABgrad()

    for i in range(10):

        ipdb.set_trace()

        conv,info = OPT.opt_hyper(gp,params,factr=1e-3)
        print conv

        print 'C1'
        print C1.K()
        print 'C2'
        print C2.K()
        print 'Cn'
        print Cn.K()

        ipdb.set_trace()
        print 'before a and b opt:', gp.LML()
        gp.K.optimizeAB(n=100)
        print 'after a and b opt:', gp.LML()
        print 'a:', gp.K.a
        print 'b:', gp.K.b