Example #1
0
    def localInputAndChecks(self, xmlNode):
        """
      Local method for additional reading.
      @ In, xmlNode, xml.etree.ElementTree.Element, Xml element node
      @ Out, None
    """
        GradientBasedOptimizer.localInputAndChecks(self, xmlNode)
        self.paramDict['alpha'] = float(self.paramDict.get('alpha', 0.602))
        self.paramDict['gamma'] = float(self.paramDict.get('gamma', 0.101))
        self.paramDict['A'] = float(
            self.paramDict.get('A', self.limit['mdlEval'] / 10.))
        self.paramDict['a'] = self.paramDict.get('a', None)
        self.paramDict['c'] = float(self.paramDict.get('c', 0.005))
        #FIXME the optimization parameters should probably all operate ONLY on normalized data!
        #  -> perhaps the whole optimizer should only work on optimized data.

        #FIXME normalizing doesn't seem to have the desired effect, currently; it makes the step size very small (for large scales)
        #if "a" was defaulted, use the average scale of the input space.
        #This is the suggested value from the paper, missing a 1/gradient term since we don't know it yet.
        if self.paramDict['a'] is None:
            self.paramDict['a'] = mathUtils.hyperdiagonal(
                np.ones(len(
                    self.getOptVars())))  # the features are always normalized
            self.raiseAMessage('Defaulting "a" gradient parameter to',
                               self.paramDict['a'])
        else:
            self.paramDict['a'] = float(self.paramDict['a'])

        self.constraintHandlingPara['innerBisectionThreshold'] = float(
            self.paramDict.get('innerBisectionThreshold', 1e-2))
        self.constraintHandlingPara['innerLoopLimit'] = float(
            self.paramDict.get('innerLoopLimit', 1000))

        self.gradDict['pertNeeded'] = self.gradDict['numIterForAve'] * 2

        stochDist = self.paramDict.get('stochasticDistribution', 'Hypersphere')
        if stochDist == 'Bernoulli':
            self.stochasticDistribution = Distributions.returnInstance(
                'Bernoulli', self)
            self.stochasticDistribution.p = 0.5
            self.stochasticDistribution.initializeDistribution()
            # Initialize bernoulli distribution for random perturbation. Add artificial noise to avoid that specular loss functions get false positive convergence
            # FIXME there has to be a better way to get two random numbers
            self.stochasticEngine = lambda: [
                (0.5 + randomUtils.random() * (1. + randomUtils.random(
                ) / 1000. * randomUtils.randomIntegers(-1, 1, self)))
                if self.stochasticDistribution.rvs() == 1 else -1. *
                (0.5 + randomUtils.random() * (1. + randomUtils.random(
                ) / 1000. * randomUtils.randomIntegers(-1, 1, self)))
                for _ in range(len(self.getOptVars()))
            ]
        elif stochDist == 'Hypersphere':
            self.stochasticEngine = lambda: randomUtils.randPointsOnHypersphere(
                len(self.getOptVars()))
        else:
            self.raiseAnError(
                IOError, self.paramDict['stochasticEngine'] +
                'is currently not supported for SPSA')
Example #2
0
  def localInputAndChecks(self, xmlNode, paramInput):
    """
      Local method for additional reading.
      @ In, xmlNode, xml.etree.ElementTree.Element, Xml element node
      @ In, paramInput, InputData.ParameterInput, the parsed parameters
      @ Out, None
    """
    GradientBasedOptimizer.localInputAndChecks(self, xmlNode, paramInput)
    self.currentDirection   = None
    numValues = self._numberOfSamples()
    # set the initial step size
    ## use the hyperdiagonal of a unit hypercube with a side length equal to the user's provided initialStepSize * 1.0
    stepPercent = float(self.paramDict.get('initialStepSize', 0.05))
    self.paramDict['initialStepSize'] = mathUtils.hyperdiagonal(np.ones(numValues)*stepPercent)
    self.raiseADebug('Based on initial step size factor of "{:1.5e}", initial step size is "{:1.5e}"'
                         .format(stepPercent, self.paramDict['initialStepSize']))
    # set the perturbation distance
    ## if not given, default to 10% of the step size
    self.paramDict['pertDist'] = float(self.paramDict.get('perturbationDistance',0.01))
    self.raiseADebug('Perturbation distance is "{:1.5e}" percent of the step size'
                         .format(self.paramDict['pertDist']))

    self.constraintHandlingPara['innerBisectionThreshold'] = float(self.paramDict.get('innerBisectionThreshold', 1e-2))
    if not 0 < self.constraintHandlingPara['innerBisectionThreshold'] < 1:
      self.raiseAnError(IOError,'innerBisectionThreshold must be between 0 and 1; got',self.constraintHandlingPara['innerBisectionThreshold'])
    self.constraintHandlingPara['innerLoopLimit'] = float(self.paramDict.get('innerLoopLimit', 1000))

    self.gradDict['pertNeeded'] = self.gradDict['numIterForAve'] * (self.paramDict['pertSingleGrad']+1)

    # determine the number of indpendent variables (scalar and vectors included)
    stochDist = self.paramDict.get('stochasticDistribution', 'Hypersphere')
    if stochDist == 'Bernoulli':
      self.stochasticDistribution = Distributions.returnInstance('Bernoulli',self)
      self.stochasticDistribution.p = 0.5
      self.stochasticDistribution.initializeDistribution()
      # Initialize bernoulli distribution for random perturbation. Add artificial noise to avoid that specular loss functions get false positive convergence
      # FIXME there has to be a better way to get two random numbers
      self.stochasticEngine = lambda: [(0.5+randomUtils.random()*(1.+randomUtils.random()/1000.*randomUtils.randomIntegers(-1, 1, self))) if self.stochasticDistribution.rvs() == 1 else
                                   -1.*(0.5+randomUtils.random()*(1.+randomUtils.random()/1000.*randomUtils.randomIntegers(-1, 1, self))) for _ in range(numValues)]
    elif stochDist == 'Hypersphere':
      # TODO assure you can't get a "0" along any dimension! Need to be > 1e-15. Right now it's just highly unlikely.
      self.stochasticEngine = lambda: randomUtils.randPointsOnHypersphere(numValues) if numValues > 1 else [randomUtils.randPointsOnHypersphere(numValues)]
    else:
      self.raiseAnError(IOError, self.paramDict['stochasticEngine']+'is currently not supported for SPSA')
Example #3
0
 def chooseEvaluationPoints(self, opt, stepSize):
   """
     Determines new point(s) needed to evaluate gradient
     @ In, opt, dict, current opt point (normalized)
     @ In, stepSize, float, distance from opt point to sample neighbors
     @ Out, evalPoints, list(dict), list of points that need sampling
     @ Out, evalInfo, list(dict), identifying information about points
   """
   dh = self._proximity * stepSize
   perturb = randomUtils.randPointsOnHypersphere(self.N)
   delta = {}
   new = {}
   for i, var in enumerate(self._optVars):
     delta[var] = perturb[i] * dh
     new[var] = opt[var] + delta[var]
   # only one point needed for SPSA, but still needs to store as a list
   evalPoints = [new]
   evalInfo = [{'type': 'grad',
                'delta': delta}]
   return evalPoints, evalInfo
Example #4
0
  n = randomUtils.randomIntegers(10,20,None,engine=eng) #no message handler, error handling will error out
  checkAnswer('random integer, {} sample for local engine provided'.format(i),n,right[i])
### randomPermutation(), rearranging lists
randomUtils.randomSeed(42,engine=None)
randomUtils.randomSeed(42,engine=eng)
l = [1,2,3,4,5]
l2 = randomUtils.randomPermutation(l,None,engine=None)
checkArray('random permutation for engine not provided',l2,[2,4,5,1,3])
l2 = randomUtils.randomPermutation(l,None,engine=eng)
checkArray('random permutation for local engine provided',l2,[2,4,5,1,3])
### randPointsOnHypersphere(), unit hypersphere surface sampling (aka random direction)
randomUtils.randomSeed(42,engine=None)
randomUtils.randomSeed(42,engine=eng)
## check the radius is always 1 (if not specified)
for i in range(1,6):
  pt = randomUtils.randPointsOnHypersphere(i,engine=None)
  checkAnswer('Random {}D hypersphere surface for engine not provided'.format(i),np.sum(pt*pt),1.0)
for i in range(1,6):
  pt = randomUtils.randPointsOnHypersphere(i,engine=eng)
  checkAnswer('Random {}D hypersphere surface for local engine provided'.format(i),np.sum(pt*pt),1.0)
## check the sum of the squares is always the square of the radius
randomUtils.randomSeed(42,engine=None)
randomUtils.randomSeed(42,engine=eng)
for i in [0.2,0.7,1.5,10.0, 100.0]:
  pt = randomUtils.randPointsOnHypersphere(4,r=i,engine=None)
  checkAnswer('Random 4D hypersphere surface with {} radius for engine not provided'.format(i),np.sum(pt*pt),i*i)
for i in [0.2,0.7,1.5,10.0, 100.0]:
  pt = randomUtils.randPointsOnHypersphere(4,r=i,engine=eng)
  checkAnswer('Random 4D hypersphere surface with {} radius for local engine provided'.format(i),np.sum(pt*pt),i*i)
## check multiple sampling simultaneously
randomUtils.randomSeed(42,engine=None)
Example #5
0
for i in range(5):
    n = randomUtils.randomIntegers(
        10, 20, None)  #no message handler, error handling will error out
    checkAnswer('random integer, {} sample'.format(i), n, right[i])

### randomPermutation(), rearranging lists
randomUtils.randomSeed(42)
l = [1, 2, 3, 4, 5]
l2 = randomUtils.randomPermutation(l, None)
checkArray('random permutation', l2, [2, 4, 5, 1, 3])

### randPointsOnHypersphere(), unit hypersphere surface sampling (aka random direction)
randomUtils.randomSeed(42)
## check the radius is always 1 (if not specified)
for i in range(1, 6):
    pt = randomUtils.randPointsOnHypersphere(i)
    checkAnswer('Random {}D hypersphere surface'.format(i), np.sum(pt * pt),
                1.0)
## check the sum of the squares is always the square of the radius
randomUtils.randomSeed(42)
for i in [0.2, 0.7, 1.5, 10.0, 100.0]:
    pt = randomUtils.randPointsOnHypersphere(4, r=i)
    checkAnswer('Random 4D hypersphere surface with {} radius'.format(i),
                np.sum(pt * pt), i * i)
## check multiple sampling simultaneously
randomUtils.randomSeed(42)
samps = randomUtils.randPointsOnHypersphere(5, samples=100)
checkAnswer('Simultaneous random 5D on hypersphere, 0 axis', samps.shape[0],
            100)
checkAnswer('Simultaneous random 5D on hypersphere, 1 axis', samps.shape[1], 5)
for i, s in enumerate(samps):