Exemplo n.º 1
0
    def testGPPrior(self):

        # see how GP works with the dataprior...
        def foo(x):
            return sum(sin(x * 20))

        bounds = [[0., 1.]]
        # train prior
        pX = lhcSample([[0., 1.]], 100, seed=6)
        pY = [foo(x) for x in pX]
        prior = RBFNMeanPrior()
        prior.train(pX, pY, bounds, k=10, seed=102)

        X = lhcSample([[0., 1.]], 2, seed=7)
        Y = [foo(x) for x in X]

        kernel = GaussianKernel_ard(array([.1]))
        GP = GaussianProcess(kernel, X, Y, prior=prior)
        GPnoprior = GaussianProcess(kernel, X, Y)

        S = arange(0, 1, .01)

        nopriorErr = mean([(foo(x) - GPnoprior.mu(x))**2 for x in S])
        priorErr = mean([(foo(x) - GP.mu(x))**2 for x in S])

        self.failUnless(priorErr < nopriorErr * .5)

        if False:
            figure(1)
            clf()
            plot(S, [prior.mu(x) for x in S], 'g-', alpha=0.3)
            plot(S, [GPnoprior.mu(x) for x in S], 'b-', alpha=0.3)
            plot(S, [GP.mu(x) for x in S], 'k-', lw=2)
            plot(X, Y, 'ko')
            show()
Exemplo n.º 2
0
    def testShekelGPPrior(self):
        
        # see how the GP works on the Shekel function
        S5 = Shekel5()

        pX = lhcSample(S5.bounds, 100, seed=8)
        pY = [S5.f(x) for x in pX]
        prior = RBFNMeanPrior()
        prior.train(pX, pY, S5.bounds, k=10, seed=103)
        
        X = lhcSample(S5.bounds, 10, seed=9)
        Y = [S5.f(x) for x in X]

        hv = .1
        hyper = [hv, hv, hv, hv]
        gkernel = GaussianKernel_ard(hyper)
        priorGP = GaussianProcess(gkernel, X, Y, prior=prior)
        nopriorGP = GaussianProcess(gkernel, X, Y)
        
        S = lhcSample(S5.bounds, 1000, seed=10)
        nopriorErr = mean([(S5.f(x)-nopriorGP.mu(x))**2 for x in S])
        priorErr = mean([(S5.f(x)-priorGP.mu(x))**2 for x in S])
        
        # print '\nno prior Err =', nopriorErr
        # print 'prior Err =', priorErr
        self.failUnless(priorErr < nopriorErr*.8)
Exemplo n.º 3
0
class Synthetic(TestFunction):
    """
    randomly-generated synthetic function
    """
    def __init__(self, kernel, bounds, NX, noise=0.05, xstar=None, **kwargs):
        super(Synthetic, self).__init__("Synthetic", 0, None, bounds, **kwargs)
        
        self.name += ' %d'%len(bounds)
        
        self.GP = GaussianProcess(kernel)
        X = lhcSample(bounds, NX)
        self.GP.addData([X[0]], [normal(0, 1)])
        if xstar is not None:
            ystar = min(self.GP.Y[0]-1.0, -2.0)
            self.GP.addData(xstar, ystar)
        for x in X[1:]:
            mu, sig2 = self.GP.posterior(x)
            y = normal(mu, sqrt(sig2)) + normal(0, noise)
            # preserve min if necessary
            if xstar is not None and y < ystar+.5:
                y = ystar+.5
            self.GP.addData(x, y)
            
        # now, try minimizing with BFGS
        start = self.GP.X[argmin(self.GP.Y)]
        xopt = fmin_bfgs(self.GP.mu, start, disp=False)
        
        print "\t[synthetic] optimization started at %s, ended at %s" % (start, xopt)
        
        if xstar is not None:
            print '\t[synthetic] realigning minimum'
            # now, align minimum with what we specified
            for i, (target, origin) in enumerate(zip(xstar, xopt)):
                self.GP.X[:,i] += target-origin
            xopt = xstar
            
        
        self.minimum = self.GP.mu(xopt)
        self.xstar = xopt
        
        # print self.GP.X
        # print self.GP.Y
        print '\t[synthetic] x+ = %s, f(x+) = %.3f' % (self.xstar, self.f(self.xstar))
            
            
    def f(self, x):
        
        y = self.GP.mu(x)
        if y < self.minimum:
            self.minimum = y
            
        if self.maximize:
            return -y
        else:
            return y
Exemplo n.º 4
0
def test():

    GP = GaussianProcess(GaussianKernel_iso([0.2, 1.0]))
    X = array([[0.2], [0.3], [0.5], [1.5]])
    Y = [1, 0, 1, 0.75]
    GP.addData(X, Y)

    figure(1)
    A = arange(0, 2, 0.01)
    mu = array([GP.mu(x) for x in A])
    sig2 = array([GP.posterior(x)[1] for x in A])

    Ei = EI(GP)
    ei = [-Ei.negf(x) for x in A]

    Pi = PI(GP)
    pi = [-Pi.negf(x) for x in A]

    Ucb = UCB(GP, 1, T=2)
    ucb = [-Ucb.negf(x) for x in A]

    ax = subplot(1, 1, 1)
    ax.plot(A, mu, "k-", lw=2)
    xv, yv = poly_between(A, mu - sig2, mu + sig2)
    ax.fill(xv, yv, color="#CCCCCC")

    ax.plot(A, ei, "g-", lw=2, label="EI")
    ax.plot(A, ucb, "g--", lw=2, label="UCB")
    ax.plot(A, pi, "g:", lw=2, label="PI")
    ax.plot(X, Y, "ro")
    ax.legend()
    draw()
    show()
Exemplo n.º 5
0
def test():
    GP = GaussianProcess(GaussianKernel_iso([.2, 1.0]))
    X = array([[.2], [.3], [.5], [1.5]])
    Y = [1, 0, 1, .75]
    GP.addData(X, Y)

    figure(1)
    A = arange(0, 2, 0.01)
    mu = array([GP.mu(x) for x in A])
    sig2 = array([GP.posterior(x)[1] for x in A])

    Ei = EI(GP)
    ei = [-Ei.negf(x) for x in A]

    Pi = PI(GP)
    pi = [-Pi.negf(x) for x in A]

    Ucb = UCB(GP, 1, T=2)
    ucb = [-Ucb.negf(x) for x in A]

    ax = subplot(1, 1, 1)
    ax.plot(A, mu, 'k-', lw=2)
    xv, yv = poly_between(A, mu - sig2, mu + sig2)
    ax.fill(xv, yv, color="#CCCCCC")

    ax.plot(A, ei, 'g-', lw=2, label='EI')
    ax.plot(A, ucb, 'g--', lw=2, label='UCB')
    ax.plot(A, pi, 'g:', lw=2, label='PI')
    ax.plot(X, Y, 'ro')
    ax.legend()
    draw()
    show()
Exemplo n.º 6
0
    def testGPPrior(self):
        
        # see how GP works with the dataprior...
        def foo(x):
            return sum(sin(x*20))
        
        bounds = [[0., 1.]]
        # train prior
        pX = lhcSample([[0., 1.]], 100, seed=6)
        pY = [foo(x) for x in pX]
        prior = RBFNMeanPrior()
        prior.train(pX, pY, bounds, k=10, seed=102)
        
        X = lhcSample([[0., 1.]], 2, seed=7)
        Y = [foo(x) for x in X]
        
        kernel = GaussianKernel_ard(array([.1]))
        GP = GaussianProcess(kernel, X, Y, prior=prior)
        GPnoprior = GaussianProcess(kernel, X, Y)

        S = arange(0, 1, .01)

        nopriorErr = mean([(foo(x)-GPnoprior.mu(x))**2 for x in S])
        priorErr = mean([(foo(x)-GP.mu(x))**2 for x in S])
        
        # print '\nno prior Err =', nopriorErr
        # print 'prior Err =', priorErr
        
        self.failUnless(priorErr < nopriorErr*.5)
        
        if False:
            figure(1)
            clf()
            plot(S, [prior.mu(x) for x in S], 'g-', alpha=0.3)
            plot(S, [GPnoprior.mu(x) for x in S], 'b-', alpha=0.3)
            plot(S, [GP.mu(x) for x in S], 'k-', lw=2)
            plot(X, Y, 'ko')
            show()
Exemplo n.º 7
0
def fastUCBGallery(GP, bounds, N, useBest=True, samples=300, useCDIRECT=True, xi=0, passback={}):
    """
    Use UCB to generate a gallery of N instances using Monte Carlo to 
    approximate the optimization of the utility function.
    """
    gallery = []

    if len(GP.X) > 0:
        if useBest:
            # find best sample already seen, that lies within the bounds
            bestY = -inf
            bestX = None
            for x, y in zip(GP.X, GP.Y):
                if y > bestY:
                    for v, b in zip(x, bounds):
                        if v < b[0] or v > b[1]:
                            break
                    else:
                        bestY = y
                        bestX = x
            if bestX is not None:
                gallery.append(bestX)
    
        # create a "fake" GP from the GP that was passed in (can't just copy 
        # b/c original could have been PrefGP)
        hallucGP = GaussianProcess(deepcopy(GP.kernel), deepcopy(GP.X), deepcopy(GP.Y), prior=GP.prior)
    elif GP.prior is None:            
        # if we have no data and no prior, start in the center
        x = array([(b[0]+b[1])/2. for b in bounds])
        gallery.append(x)
        hallucGP = GaussianProcess(deepcopy(GP.kernel), [x], [0.0], prior=GP.prior)
    else:
        # optimize from prior
        if DEBUG: print 'GET DATA FROM PRIOR'
        bestmu = -inf
        bestX = None
        for m in GP.prior.means:
            argmin = fmin_bfgs(GP.negmu, m, disp=False)
            if DEBUG: print argmin,
            for i in xrange(len(argmin)):
                argmin[i] = clip(argmin[i], bounds[i][0], bounds[i][1])
            # if DEBUG: print 'converted to', argmin
            if GP.mu(argmin) > bestmu:
                bestX = argmin
                bestmu = GP.mu(argmin)
                if DEBUG: print '***** bestmu =', bestmu
                if DEBUG: print '***** bestX =', bestX
        gallery.append(bestX)
        hallucGP = GaussianProcess(deepcopy(GP.kernel), bestX, bestmu, prior=GP.prior)
        
        
    while len(gallery) < N:
        if DEBUG: print '\n\n\thave %d data for gallery' % len(gallery)
        bestUCB = -inf
        bestX = None
        # ut = UCB(hallucGP, len(bounds), N)
        ut = EI(hallucGP, xi=xi)
        
        if DEBUG: print '\tget with max EI'
        opt, optx = maximizeEI(hallucGP, bounds, xi=xi, useCDIRECT=useCDIRECT)
        #if len(gallery)==0 or min(norm(optx-gx) for gx in gallery) > .5:
        #    if DEBUG: print '\tgot one'
        bestUCB = opt
        bestX = optx
        #else:
        #    if DEBUG: print '\ttoo close to existing'
        '''        
        # try some random samples
        if DEBUG: print '\ttry random samples'
        for x in lhcSample(bounds, samples):
            u = -ut.negf(x)
            if u > bestUCB and min(norm(x-gx) for gx in gallery) > .5:
                '\they, this one is even better!'
                bestUCB = u
                bestX = x
                passback['found_random'] = 1
        
        # now try the prior means
        if hallucGP.prior is not None:
            if DEBUG: print '\ttry prior means (bestUCB = %f)'%bestUCB
            for x in hallucGP.prior.means:
                x = array([clip(x[i], bounds[i][0], bounds[i][1]) for i in xrange(len(x))])
                x = x * hallucGP.prior.width + hallucGP.prior.lowerb
                u = -ut.negf(x)
                # if DEBUG: print 'u = %f', u
                if u > bestUCB:
                    if len(gallery)==0 or min(norm(x-gx) for gx in gallery) > .5:
                        if DEBUG: print '\tthis one is even better!  prior mean %s has u = %f' % (x, u)
                        bestUCB = u
                        bestX = x
                        passback['found_prior'] = 1
        '''
                    
        gallery.append(bestX)
        if len(gallery) < N-1:
            hallucGP.addData(bestX, hallucGP.mu(bestX))

    # these can be optionally passed back if the caller passes in its own dict
    passback['hallucGP'] = hallucGP
    passback['utility'] = ut
        
    return gallery
Exemplo n.º 8
0
def fastUCBGallery(GP, bounds, N, useBest=True, samples=300, useCDIRECT=True):
    """
    Use UCB to generate a gallery of N instances using Monte Carlo to 
    approximate the optimization of the utility function.
    """
    gallery = []

    if len(GP.X) > 0:
        if useBest:
            # find best sample already seen, that lies within the bounds
            bestY = -inf
            bestX = None
            for x, y in zip(GP.X, GP.Y):
                if y > bestY:
                    for v, b in zip(x, bounds):
                        if v < b[0] or v > b[1]:
                            break
                    else:
                        bestY = y
                        bestX = x
            if bestX is not None:
                gallery.append(bestX)

        # create a "fake" GP from the GP that was passed in (can't just copy
        # b/c original could have been PrefGP)
        hallucGP = GaussianProcess(deepcopy(GP.kernel),
                                   deepcopy(GP.X),
                                   deepcopy(GP.Y),
                                   prior=GP.prior)
    elif GP.prior is None:
        # if we have no data and no prior, start in the center
        x = array([(b[0] + b[1]) / 2. for b in bounds])
        gallery.append(x)
        hallucGP = GaussianProcess(deepcopy(GP.kernel), [x], [0.0],
                                   prior=GP.prior)
    else:
        # optimize from prior
        bestmu = -inf
        bestX = None
        for m in GP.prior.means:
            argmin = fmin_bfgs(GP.negmu, m, disp=False)
            if GP.mu(argmin) > bestmu:
                bestX = argmin
                bestmu = GP.mu(argmin)
        gallery.append(bestX)
        hallucGP = GaussianProcess(deepcopy(GP.kernel),
                                   bestX,
                                   bestmu,
                                   prior=GP.prior)

    while len(gallery) < N:
        bestUCB = -inf
        bestX = None
        # ut = UCB(hallucGP, len(bounds), N)
        ut = EI(hallucGP, xi=.4)

        opt, optx = maximizeEI(hallucGP, bounds, xi=.3, useCDIRECT=useCDIRECT)
        if len(gallery) == 0 or min(norm(optx - gx) for gx in gallery) > .5:
            bestUCB = opt
            bestX = optx

        # try some random samples
        for x in lhcSample(bounds, samples):
            u = -ut.negf(x)
            if u > bestUCB and min(norm(x - gx) for gx in gallery) > .5:
                '\they, this one is even better!'
                bestUCB = u
                bestX = x

        # now try the prior means
        if hallucGP.prior is not None:
            for x in hallucGP.prior.means:
                x = array([
                    clip(x[i], bounds[i][0], bounds[i][1])
                    for i in xrange(len(x))
                ])
                x = x * hallucGP.prior.width + hallucGP.prior.lowerb
                u = -ut.negf(x)
                if u > bestUCB:
                    if len(gallery) == 0 or min(
                            norm(x - gx) for gx in gallery) > .5:
                        bestUCB = u
                        bestX = x

        gallery.append(bestX)

        hallucGP.addData(bestX, hallucGP.mu(bestX))

    return gallery