Exemplo n.º 1
0
    def findMin(self, inffunc, meanfunc, covfunc, likfunc, x, y):
        hypInArray = self.convert_to_array(meanfunc, covfunc, likfunc)
        opt = cg(self.nlml, hypInArray, self.dnlml, (inffunc, meanfunc, covfunc, likfunc, x, y), maxiter=100, disp=False, full_output=True)
        optimalHyp = opt[0]
        funcValue  = opt[1]
        warnFlag   = opt[4]
        if warnFlag == 1:
            print "Maximum number of iterations exceeded."
        elif warnFlag ==  2:
            print "Gradient and/or function calls not changing."

        if self.searchConfig:
            searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange 
            if not (self.searchConfig.max_trails or self.searchConfig.min_threshold):
                raise Exception('Specify at least one of the stop conditions')
            while True:
                self.trailsCounter += 1                 # increase counter
                for i in xrange(hypInArray.shape[0]):   # random init of hyp
                    hypInArray[i]= np.random.uniform(low=searchRange[i][0], high=searchRange[i][1])
                # value this time is better than optiaml min value
                thisopt = cg(self.nlml, hypInArray, self.dnlml, (inffunc, meanfunc, covfunc, likfunc, x, y), maxiter=100, disp=False, full_output=True)
                if thisopt[1] < funcValue:
                    funcValue  = thisopt[1]
                    optimalHyp = thisopt[0]
                if self.searchConfig.max_trails and self.trailsCounter > self.searchConfig.max_trails:         # if exceed max_trails
                    return optimalHyp, funcValue
                if self.searchConfig.min_threshold and funcValue <= self.searchConfig.min_threshold:                           # reach provided mininal
                    return optimalHyp, funcValue 

        return optimalHyp, funcValue
Exemplo n.º 2
0
def min_wrapper(hyp, F, Flag, *varargin):
    # Utilize scipy.optimize functions to minimize the negative log marginal liklihood.  This is REALLY inefficient!
    x = convert_to_array(hyp)

    if Flag == 'CG':
        aa = cg(nlml, x, dnlml, (F,hyp,varargin), maxiter=100, disp=False, full_output=True)
        x = aa[0]; fx = aa[1]; funcCalls = aa[2]; gradcalls = aa[3]
        if aa[4] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[4] ==  2:
            print "Gradient and/or function calls not changing."
        gvals = dnlml(x,F,hyp,varargin)
        return convert_to_class(x,hyp), fx, gvals, funcCalls

    elif Flag == 'BFGS':
        # Use BFGS
        aa = bfgs(nlml, x, dnlml, (F,hyp,varargin), maxiter=100, disp=True, full_output=True)
        x = aa[0]; fvals = aa[1]; gvals = aa[2]; Bopt = aa[3]; funcCalls = aa[4]; gradcalls = aa[5]
        if aa[6] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[6] ==  2:
            print "Gradient and/or function calls not changing."
        return convert_to_class(x,hyp), fvals, gvals, funcCalls

    else:
        raise Exception('Incorrect usage of optimization flag in min_wrapper')
Exemplo n.º 3
0
Arquivo: opt.py Projeto: mathDR/gpts
    def findMin(self, x, y):
        meanfunc = self.model.meanfunc
        covfunc = self.model.covfunc
        likfunc = self.model.likfunc
        inffunc = self.model.inffunc
        hypInArray = self._convert_to_array()
        try:
            opt = cg(self._nlml, hypInArray, self._dnlml, maxiter=100, disp=False, full_output=True)
            optimalHyp = deepcopy(opt[0])
            funcValue  = opt[1]
            warnFlag   = opt[4]
            if warnFlag == 1:
                print "Maximum number of iterations exceeded."
            elif warnFlag ==  2:
                print "Gradient and/or function calls not changing."
        except:
            self.errorCounter += 1
            if not self.searchConfig:
                raise Exception("Can not use conjugate gradient. Try other hyparameters")
        self.trailsCounter += 1

        if self.searchConfig:
            searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange 
            if not (self.searchConfig.num_restarts or self.searchConfig.min_threshold):
                raise Exception('Specify at least one of the stop conditions')
            while True:
                self.trailsCounter += 1                 # increase counter
                for i in xrange(hypInArray.shape[0]):   # random init of hyp
                    hypInArray[i]= np.random.uniform(low=searchRange[i][0], high=searchRange[i][1])
                # value this time is better than optiaml min value
                try:
                    thisopt = cg(self._nlml, hypInArray, self._dnlml, maxiter=100, disp=False, full_output=True)
                    if thisopt[1] < funcValue:
                        funcValue  = thisopt[1]
                        optimalHyp = thisopt[0]
                except:
                    self.errorCounter += 1
                if self.searchConfig.num_restarts and self.errorCounter > self.searchConfig.num_restarts/2:
                    print "[CG] %d out of %d trails failed during optimization" % (self.errorCounter, self.trailsCounter)
                    raise Exception("Over half of the trails failed for conjugate gradient")
                if self.searchConfig.num_restarts and self.trailsCounter > self.searchConfig.num_restarts-1:         # if exceed num_restarts
                    print "[CG] %d out of %d trails failed during optimization" % (self.errorCounter, self.trailsCounter)
                    return optimalHyp, funcValue
                if self.searchConfig.min_threshold and funcValue <= self.searchConfig.min_threshold:           # reach provided mininal
                    print "[CG] %d out of %d trails failed during optimization" % (self.errorCounter, self.trailsCounter)
                    return optimalHyp, funcValue 
        return optimalHyp, funcValue
Exemplo n.º 4
0
def min_wrapper(hyp, F, Flag, *varargin):
    # Utilize scipy.optimize functions to minimize the negative log marginal liklihood.  This is REALLY inefficient!
    x = convert_to_array(hyp)  # Converts the hyperparameter class to an array

    if Flag == 'CG':
        aa = cg(nlml,
                x,
                dnlml, (F, hyp, varargin),
                maxiter=100,
                disp=True,
                full_output=True)
        x = aa[0]
        fx = aa[1]
        funcCalls = aa[2]
        gradcalls = aa[3]
        if aa[4] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[4] == 2:
            print "Gradient and/or function calls not changing."
        gvals = dnlml(x, F, hyp, varargin)
        return convert_to_class(x, hyp), fx, gvals, funcCalls

    elif Flag == 'BFGS':
        # Use BFGS
        aa = bfgs(nlml,
                  x,
                  dnlml, (F, hyp, varargin),
                  maxiter=100,
                  disp=False,
                  full_output=True)
        x = aa[0]
        fvals = aa[1]
        gvals = aa[2]
        Bopt = aa[3]
        funcCalls = aa[4]
        gradcalls = aa[5]
        if aa[6] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[6] == 2:
            print "Gradient and/or function calls not changing."
        return convert_to_class(x, hyp), fvals, gvals, funcCalls

    elif Flag == 'SCG':
        # Use SCG
        aa = scg(x, nlml, dnlml, (F, hyp, varargin), niters=40)
        x = aa[0]
        fvals = aa[1]
        gvals = dnlml(x, F, hyp, varargin)
        return convert_to_class(x, hyp), fvals, gvals

    else:
        raise Exception('Incorrect usage of optimization flag in min_wrapper')
Exemplo n.º 5
0
def min_wrapper(hyp, F, Flag, *varargin):
    # Utilize scipy.optimize functions, sgc.py, or minimize.py to
    # minimize the negative log marginal liklihood.
    
    x = convert_to_array(hyp)   # convert the hyperparameter class to an array

    if Flag == 'CG':
        aa = cg(nlml, x, dnlml, (F,hyp,varargin), maxiter=100, disp=False, full_output=True)
        x = aa[0]; fopt = aa[1]; funcCalls = aa[2]; gradcalls = aa[3]
        if aa[4] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[4] ==  2:
            print "Gradient and/or function calls not changing."
        gopt = dnlml(x,F,hyp,varargin)
        return convert_to_class(x,hyp), fopt, gopt, funcCalls

    elif Flag == 'BFGS':
        # Use BFGS
        aa = bfgs(nlml, x, dnlml, (F,hyp,varargin), maxiter=100, disp=False, full_output=True)
        x = aa[0]; fopt = aa[1]; gopt = aa[2]; Bopt = aa[3]; funcCalls = aa[4]; gradcalls = aa[5]
        if aa[6] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[6] ==  2:
            print "Gradient and/or function calls not changing."
        if isinstance(fopt, ndarray):
            fopt = fopt[0]
        return convert_to_class(x,hyp), fopt, gopt, funcCalls

    elif Flag == 'SCG':
        # use sgc.py
        aa   = scg(x, nlml, dnlml, (F,hyp,varargin), niters = 100)
        hyp  = convert_to_class(aa[0],hyp)
        fopt = aa[1][-1]
        gopt = dnlml(aa[0],F,hyp,varargin)
        return hyp, fopt, gopt, len(aa[1])

    elif Flag == 'Minimize':
        # use minimize.py
        aa   = run(x, nlml, dnlml, (F,hyp,varargin), maxnumfuneval=-100)
        hyp  = convert_to_class(aa[0],hyp)
        fopt = aa[1][-1]
        gopt = dnlml(aa[0],F,hyp,varargin)
        return hyp, fopt, gopt, len(aa[1])

    else:
        raise Exception('Incorrect usage of optimization flag in min_wrapper')
Exemplo n.º 6
0
def gp_train(gp, X, y, R=None, w=None, Flag = None):
    ''' gp_train() returns the learnt hyperparameters.
    Following chapter 5.4.1 in Rasmussen and Williams: GPs for ML (2006).
    The original version (MATLAB implementation) of used optimizer minimize.m 
    is copyright (C) 1999 - 2006, Carl Edward Rasmussen.
    The used python versions are in scipy.optimize
    
    Input R and w is needed for XGP regression! '''

    # Build the parameter list that we will optimize
    theta = np.concatenate((gp['meantheta'],gp['covtheta']))
    if Flag == 'CG':
        aa = cg(nlml, theta, dnlml, [gp,X,y,R,w], maxiter=100, disp=False, full_output=True)
        theta = aa[0]; fvals = aa[1]; funcCalls = aa[2]; gradcalls = aa[3]
        gvals = dnlml(theta, gp, X, y, R, w)
        if aa[4] == 1:
            print "Maximum number of iterations exceeded." 
        elif aa[4] ==  2:
            print "Gradient and/or function calls not changing."
        mt = len(gp['meantheta'])
        gp['meantheta'] = theta[:mt]
        gp['covtheta']  = theta[mt:]
        return gp, fvals, gvals, funcCalls
    elif Flag == 'BFGS':
        # Use BFGS
        #aa = bfgs(nlml, theta, dnlml, [gp,X,y,R,w], maxiter=100, disp=False, full_output=True)
        aa = bfgs(nlml, theta, dnlml, [gp,X,y,R,w], maxiter=100, disp=True, full_output=True)
        theta = aa[0]; fvals = aa[1]; gvals = aa[2]; Bopt = aa[3]; funcCalls = aa[4]; gradcalls = aa[5]
        if aa[6] == 1:
            print "Maximum number of iterations exceeded." 
        elif aa[6] ==  2:
            print "Gradient and/or function calls not changing."
        mt = len(gp['meantheta'])
        gp['meantheta'] = theta[:mt]
        gp['covtheta']  = theta[mt:]
        return gp, fvals, gvals, funcCalls
    elif Flag == 'SCG':
        theta, listF = scg.scg(theta, nlml, dnlml, [gp,X,y,R,w], niters = 100)
        mt = len(gp['meantheta'])
        gp['meantheta'] = theta[:mt]
        gp['covtheta']  = theta[mt:]
        return gp, listF 
    else:
        raise Exception("Need to specify a method for optimization in gp_train")
Exemplo n.º 7
0
Arquivo: opt.py Projeto: s-bear/pyGPs
    def findMin(self, x, y, numIters=100):
        meanfunc = self.model.meanfunc
        covfunc = self.model.covfunc
        likfunc = self.model.likfunc
        inffunc = self.model.inffunc
        hypInArray = self._convert_to_array()
        try:
            opt = cg(self._nlml,
                     hypInArray,
                     self._dnlml,
                     maxiter=numIters,
                     disp=False,
                     full_output=True)
            optimalHyp = deepcopy(opt[0])
            funcValue = opt[1]
            warnFlag = opt[4]
            if warnFlag == 1:
                print("Maximum number of iterations exceeded.")
            elif warnFlag == 2:
                print("Gradient and/or function calls not changing.")
        except:
            self.errorCounter += 1
            if not self.searchConfig:
                raise Exception(
                    "Can not learn hyperparamters using conjugate gradient.")
        self.trailsCounter += 1

        if self.searchConfig:
            searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange
            if not (self.searchConfig.num_restarts
                    or self.searchConfig.min_threshold):
                raise Exception('Specify at least one of the stop conditions')
            while True:
                self.trailsCounter += 1  # increase counter
                for i in range(hypInArray.shape[0]):  # random init of hyp
                    hypInArray[i] = np.random.uniform(low=searchRange[i][0],
                                                      high=searchRange[i][1])
                # value this time is better than optiaml min value
                try:
                    thisopt = cg(self._nlml,
                                 hypInArray,
                                 self._dnlml,
                                 maxiter=100,
                                 disp=False,
                                 full_output=True)
                    if thisopt[1] < funcValue:
                        funcValue = thisopt[1]
                        optimalHyp = thisopt[0]
                except:
                    self.errorCounter += 1
                if self.searchConfig.num_restarts and self.errorCounter > old_div(
                        self.searchConfig.num_restarts, 2):
                    print(
                        "[CG] %d out of %d trails failed during optimization" %
                        (self.errorCounter, self.trailsCounter))
                    raise Exception(
                        "Over half of the trails failed for conjugate gradient"
                    )
                if self.searchConfig.num_restarts and self.trailsCounter > self.searchConfig.num_restarts - 1:  # if exceed num_restarts
                    print(
                        "[CG] %d out of %d trails failed during optimization" %
                        (self.errorCounter, self.trailsCounter))
                    return optimalHyp, funcValue
                if self.searchConfig.min_threshold and funcValue <= self.searchConfig.min_threshold:  # reach provided mininal
                    print(
                        "[CG] %d out of %d trails failed during optimization" %
                        (self.errorCounter, self.trailsCounter))
                    return optimalHyp, funcValue
        return optimalHyp, funcValue
Exemplo n.º 8
0
 def check_grad(self, N=1):
     Ws = [np.random.rand(self.crf.n_W) for _ in xrange(N)]
     return [cg(self.obj, self.grad, W) for W in Ws]
Exemplo n.º 9
0
def min_wrapper(hyp, F, Flag, *varargin):
    # Utilize scipy.optimize functions, sgc.py, or minimize.py to
    # minimize the negative log marginal liklihood.

    x = convert_to_array(hyp)  # convert the hyperparameter class to an array

    if Flag == 'CG':
        aa = cg(nlml,
                x,
                dnlml, (F, hyp, varargin),
                maxiter=100,
                disp=False,
                full_output=True)
        x = aa[0]
        fopt = aa[1]
        funcCalls = aa[2]
        gradcalls = aa[3]
        if aa[4] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[4] == 2:
            print "Gradient and/or function calls not changing."
        gopt = dnlml(x, F, hyp, varargin)
        return convert_to_class(x, hyp), fopt, gopt, funcCalls

    elif Flag == 'BFGS':
        # Use BFGS
        aa = bfgs(nlml,
                  x,
                  dnlml, (F, hyp, varargin),
                  maxiter=100,
                  disp=False,
                  full_output=True)
        x = aa[0]
        fopt = aa[1]
        gopt = aa[2]
        Bopt = aa[3]
        funcCalls = aa[4]
        gradcalls = aa[5]
        if aa[6] == 1:
            print "Maximum number of iterations exceeded."
        elif aa[6] == 2:
            print "Gradient and/or function calls not changing."
        if isinstance(fopt, ndarray):
            fopt = fopt[0]
        return convert_to_class(x, hyp), fopt, gopt, funcCalls

    elif Flag == 'SCG':
        # use sgc.py
        aa = scg(x, nlml, dnlml, (F, hyp, varargin), niters=100)
        hyp = convert_to_class(aa[0], hyp)
        fopt = aa[1][-1]
        gopt = dnlml(aa[0], F, hyp, varargin)
        return hyp, fopt, gopt, len(aa[1])

    elif Flag == 'Minimize':
        # use minimize.py
        aa = run(x, nlml, dnlml, (F, hyp, varargin), maxnumfuneval=-100)
        hyp = convert_to_class(aa[0], hyp)
        fopt = aa[1][-1]
        gopt = dnlml(aa[0], F, hyp, varargin)
        return hyp, fopt, gopt, len(aa[1])

    else:
        raise Exception('Incorrect usage of optimization flag in min_wrapper')