Beispiel #1
0
def awayStepFWSimplex(function, feasibleReg, x, typeStep):
    grad = function.fEvalGrad(x)
    v = feasibleReg.LPOracle(grad)
    a, indexMax = feasibleReg.AwayOracle(grad, x)
    vertvar = 0
    #Choose FW direction, can overwrite index.
    if (np.dot(grad, x - v) > np.dot(grad, a - x)):
        d = v - x
        alphaMax = 1.0
        optStep = stepSize(function, d, grad, typeStep)
        alpha = min(optStep, alphaMax)
        #Less than maxStep
        if (alpha != alphaMax):
            #newVertex returns true if vertex is new.
            if (np.dot(v, x) == 0.0):
                vertvar = 1
        #Max step length away step, only one vertex now.
        else:
            vertvar = -1
    else:
        d = x - a
        alphaMax = x[indexMax] / (1.0 - x[indexMax])
        optStep = stepSize(function, d, grad, typeStep)
        alpha = min(optStep, alphaMax)
        #Max step, need to delete a vertex.
        if (alpha == alphaMax):
            vertvar = -1
    return x + alpha * d, vertvar, np.dot(grad, x - v)
Beispiel #2
0
def awayStepFWLazy(function, feasibleReg, x, activeSet, lambdas, phiVal, typeStep):
    grad = function.fEvalGrad(x)
    a, indexMax, v, indexMin = maxMinVertex(grad, activeSet)
    vertvar = 0
    #Use old FW vertex.
    if(np.dot(grad, x - v) >= np.dot(grad, a - x) and np.dot(grad, x - v) > phiVal[0]/2.0):
        d = v - x
        alphaMax = 1.0
        optStep = stepSize(function, d, grad, typeStep)
        alpha = min(optStep, alphaMax)
        if(alpha != alphaMax):
            lambdas[:] = [i * (1 - alpha) for i in lambdas]
            lambdas[indexMin] += alpha
        #Max step length away step, only one vertex now.
        else:
            activeSet[:] = [v]
            lambdas[:] = [alphaMax]
            vertvar = -1        
    else:
        #Use old away vertex.
        if(np.dot(grad, a - x) > np.dot(grad, x - v) and np.dot(grad, a - x) > phiVal[0]/2.0):
            d = x - a
            alphaMax = lambdas[indexMax]/(1.0 - lambdas[indexMax])
            optStep = stepSize(function, d, grad, typeStep)
            alpha = min(optStep, alphaMax)
            lambdas[:] = [i * (1 + alpha) for i in lambdas]
            #Max step, need to delete a vertex.
            if(alpha != alphaMax):
                lambdas[indexMax] -= alpha
            else:
                deleteVertexIndex(indexMax, activeSet, lambdas)
                vertvar = -1            
        else:
            v = feasibleReg.LPOracle(grad)
            #New FW vertex.
            if(np.dot(grad, x - v) > phiVal[0]/2.0):
                d = v - x
                alphaMax = 1.0
                optStep = stepSize(function, d, grad, typeStep)
                alpha = min(optStep, alphaMax)
                #Less than maxStep
                if(alpha != alphaMax):
                    #newVertex returns true if vertex is new.
                    lambdas[:] = [i * (1 - alpha) for i in lambdas]
                    activeSet.append(v)
                    lambdas.append(alpha)
                    vertvar = 1
                #Max step length away step, only one vertex now.
                else:
                    activeSet[:] = [v]
                    lambdas[:] = [alphaMax]
                    vertvar = -1                
            #None of the vertices are satisfactory, halve phi.
            else:
                phiVal[0] = phiVal[0]/2.0
                alpha = 0.0
                d = v - x
    return x + alpha*d, vertvar, np.dot(grad, x - v)
Beispiel #3
0
def awayStepFW(function, feasibleReg, x, activeSet, lambdas, typeStep):
    grad = function.fEvalGrad(x)
    v = feasibleReg.LPOracle(grad)
    a, indexMax = feasibleReg.AwayOracle(grad, activeSet)
    vertvar = 0
    #Choose FW direction, can overwrite index.
    if(np.dot(grad, x - v) > np.dot(grad, a - x)):
      d = v - x
      alphaMax = 1.0
      optStep = stepSize(function, d, grad, typeStep)
      alpha = min(optStep, alphaMax)
      #Less than maxStep
      if(alpha != alphaMax):
          #newVertex returns true if vertex is new.
          flag, index = newVertexFailFast(v, activeSet)
          lambdas[:] = [i * (1 - alpha) for i in lambdas]
          if(flag):
              activeSet.append(v)
              lambdas.append(alpha)
              vertvar = 1
          else:
              #Update existing weights
              lambdas[index] += alpha
      #Max step length away step, only one vertex now.
      else:
          activeSet[:] = [v]
          lambdas[:] = [alphaMax]
          vertvar = -1
    else:
      d = x - a
      alphaMax = lambdas[indexMax]/(1.0 - lambdas[indexMax])
      optStep = stepSize(function, d, grad, typeStep)
      alpha = min(optStep, alphaMax)
      lambdas[:] = [i * (1 + alpha) for i in lambdas]
      #Max step, need to delete a vertex.
      if(alpha != alphaMax):
          lambdas[indexMax] -= alpha
      else:
          deleteVertexIndex(indexMax, activeSet, lambdas)
          vertvar = -1
    return x + alpha*d, vertvar, np.dot(grad, x - v)
Beispiel #4
0
def pairwiseStepFWSimplex(function, feasibleReg, x, typeStep):
    grad = function.fEvalGrad(x)
    v = feasibleReg.LPOracle(grad)
    a, index = feasibleReg.AwayOracle(grad, x)
    vertVar = 0
    #Find the weight of the extreme point a in the decomposition.
    alphaMax = x[index]
    #Update weight of away vertex.
    d = v - a
    optStep = stepSize(function, d, grad, typeStep)
    alpha = min(optStep, alphaMax)
    if (alpha == alphaMax):
        vertVar = -1
    #Update the FW vertex
    if (np.dot(v, x) == 0.0):
        vertVar = 1
    return x + alpha * d, vertVar, np.dot(grad, x - v)
Beispiel #5
0
def pairwiseStepFW(function, feasibleReg, x, activeSet, lambdas, typeStep):
    grad = function.fEvalGrad(x)
    v = feasibleReg.LPOracle(grad)
    a, index = feasibleReg.AwayOracle(grad, activeSet)
    vertVar = 0
    #Find the weight of the extreme point a in the decomposition.
    alphaMax = lambdas[index]
    #Update weight of away vertex.
    d = v - a
    optStep = stepSize(function, d, grad, typeStep)
    alpha = min(optStep, alphaMax)
    lambdas[index] -= alpha
    if(alpha == alphaMax):
        deleteVertexIndex(index, activeSet, lambdas)
        vertVar = -1
    #Update the FW vertex
    flag, index = newVertexFailFast(v, activeSet)
    if(flag):
        activeSet.append(v)
        lambdas.append(alpha)
        vertVar = 1
    else:
        lambdas[index] += alpha
    return x + alpha*d, vertVar, np.dot(grad, x - v)